gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
---|---|
#!/usr/bin/env python3
#SenseHatLogger. Author: kurtd5105
from sense_hat import SenseHat
import argparse
import sys
import time
from datetime import datetime
from datetime import timedelta
from itertools import product
#Create a dictionary for the representation of the numbers in a pixel array
numbers = {
"0":
[
[0, 1, 1, 1],
[0, 1, 0, 1],
[0, 1, 0, 1],
[0, 1, 0, 1],
[0, 1, 1, 1]
],
"1":
[
[0, 0, 1, 0],
[0, 1, 1, 0],
[0, 0, 1, 0],
[0, 0, 1, 0],
[0, 1, 1, 1]
],
"2":
[
[0, 1, 1, 1],
[0, 0, 0, 1],
[0, 1, 1, 1],
[0, 1, 0, 0],
[0, 1, 1, 1]
],
"3":
[
[0, 1, 1, 1],
[0, 0, 0, 1],
[0, 1, 1, 1],
[0, 0, 0, 1],
[0, 1, 1, 1]
],
"4":
[
[0, 1, 0, 1],
[0, 1, 0, 1],
[0, 1, 1, 1],
[0, 0, 0, 1],
[0, 0, 0, 1]
],
"5":
[
[0, 1, 1, 1],
[0, 1, 0, 0],
[0, 1, 1, 1],
[0, 0, 0, 1],
[0, 1, 1, 1]
],
"6":
[
[0, 1, 1, 1],
[0, 1, 0, 0],
[0, 1, 1, 1],
[0, 1, 0, 1],
[0, 1, 1, 1]
],
"7":
[
[0, 1, 1, 1],
[0, 0, 0, 1],
[0, 0, 1, 0],
[0, 1, 0, 0],
[0, 1, 0, 0]
],
"8":
[
[0, 1, 1, 1],
[0, 1, 0, 1],
[0, 1, 1, 1],
[0, 1, 0, 1],
[0, 1, 1, 1]
],
"9":
[
[0, 1, 1, 1],
[0, 1, 0, 1],
[0, 1, 1, 1],
[0, 0, 0, 1],
[0, 1, 1, 1]
],
}
"""
generateNumberGroupings
Generates a grid of 0 and 1 for LED off/on for each possible ordering of numbers of a
given grouping length. The grid will be of size screenDimensions, [rows, cols]. Each
number given will be of size numberDimensions, [rows, cols]. A dictionary is returned,
with the number string as the key and the display grid as its value.
"""
def generateNumberGroupings(numbers, groupingLength, screenDimensions, numberDimensions):
groupings = {}
#For every combination of numbers that are of groupingLength
for group in product(range(10), repeat=groupingLength):
#Create an empty screen
grouping = [[0 for col in range(screenDimensions[1])] for row in range(screenDimensions[0])]
#Copy each number onto the screen in the correct position
for i in range(groupingLength):
for row in range(numberDimensions[0]):
for col in range(numberDimensions[1]):
grouping[row][col + (i * numberDimensions[1])] = numbers[str(group[i])][row][col]
groupings[str(group[0]) + str(group[1])] = list(grouping)
return groupings
"""
displayMetrics
Uses the Sense Hat to display the current temperature, as well as the hourly temperature
average, hourly pressure average, and hourly humidity average. currTemp is a float,
metric is in the standard
[time, [avgT, minT, maxT], [avgP, minP, maxP], [avgH, minH, maxH]] format, groupings have
the strings of all possible number combinations of int groupingLength as their key and
the display grid as the value. The time each part will be displayed on screen will be
approximately gap seconds. The default is 1.5 seconds. Color is an rgb list, defaults to green.
"""
def displayMetrics(sense, currTemp, metric, groupings, groupingLength, rotation, gap=1, color=[0, 255, 0]):
#X10 in the bottom 3 rows
extraDigit = [
[128, 128, 128], [0, 0, 0], [128, 128, 128], [255, 255, 255], [0, 0, 0],
[255, 255, 255], [255, 255, 255], [255, 255, 255],
[0, 0, 0], [128, 128, 128], [0, 0, 0], [255, 255, 255], [0, 0, 0],
[255, 255, 255], [0, 0, 0], [255, 255, 255],
[128, 128, 128], [0, 0, 0], [128, 128, 128], [255, 255, 255], [0, 0, 0],
[255, 255, 255], [255, 255, 255], [255, 255, 255]
]
#T in the bottom 3 rows
t = [
[0, 0, 0], [192, 192, 192], [192, 192, 192], [192, 192, 192], [0, 0, 0], [0, 0, 0],
[0, 0, 0], [0, 0, 0],
[0, 0, 0], [0, 0, 0], [192, 192, 192], [0, 0, 0], [0, 0, 0], [0, 0, 0],
[0, 0, 0], [0, 0, 0],
[0, 0, 0], [0, 0, 0], [192, 192, 192], [0, 0, 0], [0, 0, 0], [0, 0, 0],
[0, 0, 0], [0, 0, 0]
]
#P in the bottom 3 rows
p = [
[0, 0, 0], [192, 192, 192], [192, 192, 192], [0, 0, 0], [0, 0, 0], [0, 0, 0],
[0, 0, 0], [0, 0, 0],
[0, 0, 0], [192, 192, 192], [192, 192, 192], [0, 0, 0], [0, 0, 0], [0, 0, 0],
[0, 0, 0], [0, 0, 0],
[0, 0, 0], [192, 192, 192], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0],
[0, 0, 0], [0, 0, 0]
]
#H in the bottom 3 rows
h = [
[0, 0, 0], [192, 192, 192], [0, 0, 0], [192, 192, 192], [0, 0, 0], [0, 0, 0],
[0, 0, 0], [0, 0, 0],
[0, 0, 0], [192, 192, 192], [192, 192, 192], [192, 192, 192], [0, 0, 0], [0, 0, 0],
[0, 0, 0], [0, 0, 0],
[0, 0, 0], [192, 192, 192], [0, 0, 0], [192, 192, 192], [0, 0, 0], [0, 0, 0],
[0, 0, 0], [0, 0, 0]
]
sense.clear()
sense.set_rotation(rotation)
groups = []
#Append the number as the whole number and then the decimal and whether it's a decimal or not
groups.append([str(int(currTemp)), False])
groups.append([str(currTemp - int(currTemp))[2:], True])
#Add each metric to the display groups
for m in metric[1]:
groups.append([str(int(m[0])), False])
groups.append([str(m[0] - int(m[0]))[2:], True])
#Set the pressure to ignore the most significant digit, it is probably 1
groups[4][0] = str(int(metric[1][1][0]) % 1000)
overflow = [False for x in range(len(groups))]
for i in range(8):
if groups[i][0] == '':
groups[i][0] = "00"
continue
#Check to see if any group overflows and set its overflow flag and shorten the group
if len(groups[i][0]) > groupingLength:
groups[i][0] = groups[i][0][0:groupingLength]
if i % 2 == 0:
overflow[i] = True
#Add a 0 to the front of a non decimal, or in the back of a decimal if necessary
elif i % 2 == 0:
if len(groups[i][0]) == 1:
groups[i][0] = '0' + groups[i][0]
else:
if len(groups[i][0]) == 1:
groups[i][0] = groups[i][0] + '0'
for i in range(8):
sense.clear()
#Change color accordingly here
#Create a list of r, g, b values for each LED
displayList = [color if groupings[groups[i][0]][row][col] else [0, 0, 0] for row in range(5) for col in range(8)]
#If it's a decimal
if groups[i][1]:
displayList[32] = [255, 255, 255]
#If there is an overflow, add the overflow signal to the screen, and move the thp indicator to the side
if overflow[i]:
if i < 4:
displayList[0] = [255, 0, 0]
elif i < 6:
displayList[8] = [255, 255, 0]
else:
displayList[16] = [0, 0, 255]
displayList.extend(extraDigit)
#If there isn't an overflow, display the thp symbol on the bottom of the screen
else:
if i < 4:
displayList.extend(t)
elif i < 6:
displayList.extend(p)
else:
displayList.extend(h)
sense.set_pixels(displayList)
time.sleep(gap)
sense.clear()
sense.set_rotation(0)
"""
logData
Logs all the data to a data log file, given by the dataPath.
"""
def logData(dataPath, data):
with open(dataPath, 'a') as f:
for point in data:
f.write("{} Temperature: {}; Pressure: {}; Humidity: {};\n".format(*point))
"""
logMetric
Logs the given metric to the metric log file, given by the metricPath.
"""
def logMetric(metricPath, metric):
with open(metricPath, 'a') as f:
f.write("{} ".format(metric[0]))
metric1, metric2, metric3 = metric[1][0], metric[1][1], metric[1][2]
f.write("Temperature avg: {}; min: {}; max: {}; ".format(*metric1))
f.write("Pressure avg: {}; min: {}; max: {}; ".format(*metric2))
f.write("Humidity avg: {}; min: {}; max: {};\n".format(*metric3))
def offHour(timerange, curr):
if timerange[0] > timerange[1]:
if curr.hour >= timerange[0]:
return True
elif curr.hour < timerange[1]:
return True
else:
if curr.hour >= timerange[0] and curr.hour < timerange[1]:
return True
return False
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="A Raspberry Pi Sense Hat sensor logger with LED and text file output.")
parser.add_argument("-t", "--timerange", nargs=2, type=int, help="Optional argument to change the time range the LED matrix should be off.")
parser.add_argument(
"-r", "--rotation", nargs=1, type=int,
help="Optional argument to change the LED matrix rotation in degrees. The screen will be rotated to the nearest 90 degree."
)
parser.add_argument("-b", "--bright", action="store_true", help="Optional argument to turn the LED matrix to full brightness instead of low.")
args = parser.parse_args()
t = []
#Setup custom timerange if there was a valid range provided
if args.timerange:
t = args.timerange
for i in t:
if i < 0 or i > 23:
print("Time out of range, setting to default.")
t = [23, 8]
break
else:
t = [23, 8]
rotation = 0
#Setup the rotation if it was provided
if args.rotation:
rotation = int(((round(args.rotation[0]/90, 0) % 4) * 90))
sense = SenseHat()
#Set the LED matrix to bright if the argument was provided
if args.bright:
sense.low_light = False
else:
sense.low_light = True
groupings = generateNumberGroupings(numbers, 2, (5, 8), (5, 4))
now = datetime.now()
target = datetime.now()
#[time, [avgT, minT, maxT], [avgP, minP, maxP], [avgH, minH, maxH]]
metric = [0, [[20, 0, 0], [1000, 0, 0], [50, 0, 0]]]
while True:
data = []
#From t 0 to 59
for i in range(60):
start = datetime.now()
#Print the current time for debug purposes
print(start)
#Take measurements
data.append([
str(start),
round(sense.get_temperature(), 2),
round(sense.get_pressure(), 2),
round(sense.get_humidity(), 2)
])
#Display the current temperature and the current metrics every 2 minutes
if i % 2 == 0:
if not offHour(t, start):
displayMetrics(sense, data[-1][1], metric, groupings, 2, rotation)
#Add a 60 second time delta from the start
target = timedelta(seconds = 60) - (datetime.now() - start)
delay = target.total_seconds()
if delay < 0:
delay = 0
time.sleep(delay)
start = datetime.now()
metrics = [str(start)]
data.append([
str(start),
round(sense.get_temperature(), 2),
round(sense.get_pressure(), 2),
round(sense.get_humidity(), 2)
])
#Calculate metrics here
metric = [str(start), []]
for i in range(1, 4):
metricData = [d[i] for d in data]
metric[1].append([round(sum(metricData) / len(metricData), 2), min(metricData), max(metricData)])
print(metric)
#Log the data and metric to log files
logData(start.strftime("%d-%m-%Y") + "_data.log", data)
logMetric(start.strftime("%d-%m-%Y") + "_metric.log", metric)
target = timedelta(seconds = 60) - (datetime.now() - start)
delay = target.total_seconds()
if delay < 0:
delay = 0
time.sleep(delay)
|
|
from django.shortcuts import render_to_response
from django.core import serializers
from django.http import HttpResponse
from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.conf import settings
from django import forms
from django.db.models import Avg, Max, Min, Count
from energyweb.graph.models import SensorGroup, SensorReading, Sensor, \
PowerAverage, SRProfile
import calendar, datetime, simplejson
# If a full graph has this many points or fewer, show the individual
# points. (Otherwise only draw the lines.)
GRAPH_SHOW_POINTS_THRESHOLD = 40
def _graph_max_points(start, end, res):
'''
Return the maximum number of points a graph for this date range
(starting at datetime start and ending at datetime end), and
using resolution res, might have. (The graph may have fewer
points if there are missing sensor readings in the date range.)
'''
delta = end - start
per_incr = PowerAverage.AVERAGE_TYPE_TIMEDELTAS[res]
return ((delta.days * 3600 * 24 + delta.seconds)
/ float(per_incr.days * 3600 * 24 + per_incr.seconds))
class StaticGraphForm(forms.Form):
# TODO: GRAPH_MAX_POINTS is used to determine when to refuse data
# because the resolution is too fine (it would be too hard on the
# database). This functionality could be more robust.
GRAPH_MAX_POINTS = 2000
DATE_INPUT_FORMATS = (
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y', # '10/25/06'
)
TIME_INPUT_FORMATS = (
'%H:%M', # '14:30'
'%I:%M %p', # '2:30 PM'
'%I%p', # '2PM'
)
DATE_FORMAT = '%Y-%m-%d'
TIME_FORMAT = '%l:%M %p'
DT_INPUT_SIZE = '10'
# TODO (last I checked, the entry for month in AVERAGE_TYPES was
# None... handle this less hackishly)
RES_LIST = [res for res in PowerAverage.AVERAGE_TYPES if res != 'month']
RES_CHOICES = (
[('auto', 'auto')]
+ [(res_choice, PowerAverage.AVERAGE_TYPE_DESCRIPTIONS[res_choice])
for res_choice in RES_LIST]
)
start = forms.SplitDateTimeField(input_date_formats=DATE_INPUT_FORMATS,
input_time_formats=TIME_INPUT_FORMATS,
widget=forms.SplitDateTimeWidget(attrs={'size': DT_INPUT_SIZE},
date_format=DATE_FORMAT,
time_format=TIME_FORMAT))
end = forms.SplitDateTimeField(input_date_formats=DATE_INPUT_FORMATS,
input_time_formats=TIME_INPUT_FORMATS,
widget=forms.SplitDateTimeWidget(attrs={'size': DT_INPUT_SIZE},
date_format=DATE_FORMAT,
time_format=TIME_FORMAT))
res = forms.ChoiceField(label='Resolution', choices=RES_CHOICES)
def clean(self):
'''
Ensure that:
* Start and end times constitute a valid range.
* The number of data points requested is reasonable.
Also set computed_res (a key in the cleaned data dictionary)
to the specified resolution, or if the specified resolution was
auto, set it to the actual resolution to be used.
'''
cleaned_data = self.cleaned_data
if not cleaned_data['start'] < cleaned_data['end']:
raise forms.ValidationError('Start and end times do not '
'constitute a valid range.')
delta = cleaned_data['end'] - cleaned_data['start']
if cleaned_data['res'] in self.RES_LIST:
per_incr = PowerAverage.AVERAGE_TYPE_TIMEDELTAS[
cleaned_data['res']]
max_points = ((delta.days * 3600 * 24 + delta.seconds)
/ float(per_incr.days * 3600 * 24 + per_incr.seconds))
if _graph_max_points(cleaned_data['start'],
cleaned_data['end'],
cleaned_data['res']) > self.GRAPH_MAX_POINTS:
raise forms.ValidationError('Too many points in graph '
'(resolution too fine).')
cleaned_data['computed_res'] = cleaned_data['res']
else:
if delta.days > 7*52*3: # 3 years
cleaned_data['computed_res'] = 'week'
elif delta.days > 7*8: # 8 weeks
cleaned_data['computed_res'] = 'day'
elif delta.days > 6:
cleaned_data['computed_res'] = 'hour'
elif delta.days > 0:
cleaned_data['computed_res'] = 'minute*10'
elif delta.seconds > 3600*3: # 3 hours
cleaned_data['computed_res'] = 'minute'
else:
cleaned_data['computed_res'] = 'second*10'
return cleaned_data
# TODO: this could probably be done in a more portable way.
def _get_sensor_groups():
'''
Return a list representing the sensors and sensor groups. (See the
code for a description of the structure of the list.)
'''
sensors = Sensor.objects.select_related().order_by('sensor_group__pk')
sensor_groups = []
sensor_ids = []
sensor_ids_by_group = {}
sg_id = None
for sensor in sensors:
sensor_ids.append(sensor.pk)
if sg_id == sensor.sensor_group.pk:
sensor_groups[-1][3].append([sensor.pk, sensor.name])
sensor_ids_by_group[sg_id].append(sensor.pk)
else:
sg_id = sensor.sensor_group.pk
sensor_groups.append([
sg_id,
sensor.sensor_group.name,
sensor.sensor_group.color,
[
[sensor.pk, sensor.name]
]
])
sensor_ids_by_group[sg_id] = [sensor.pk]
return (sensor_groups, sensor_ids, sensor_ids_by_group)
def data_interface(request):
'''
A mostly static-HTML view explaining the public data interface.
'''
return render_to_response('graph/data_interface.html',
{'interface_url_template':
'/graph/static/<start>/to/<end>/<res>/data.json',
'interface_start_placeholder': '<start>',
'interface_end_placeholder': '<end>',
'interface_res_placeholder': '<res>',
'interface_junk_suffix': '?junk=<junk>',
'interface_junk_placeholder': '<junk>',
'res_choices': StaticGraphForm.RES_CHOICES},
context_instance=RequestContext(request))
def dynamic_graph(request):
'''
A view returning the HTML for the dynamic (home-page) graph.
(This graph represents the last three hours and updates
automatically.)
'''
junk = str(calendar.timegm(datetime.datetime.now().timetuple()))
start_dt = datetime.datetime.now() - datetime.timedelta(0, 3600*3, 0)
data = str(int(calendar.timegm(start_dt.timetuple()) * 1000))
return render_to_response('graph/dynamic_graph.html',
{'sensor_groups': _get_sensor_groups()[0],
'data_url': reverse('energyweb.graph.views.dynamic_graph_data',
kwargs={'data': data}) + '?junk=' + junk},
context_instance=RequestContext(request))
def dynamic_graph_data(request, data):
'''
A view returning the JSON data used to populate the dynamic graph.
'''
from django.db import connection, transaction
cur = connection.cursor()
(sensor_groups, sensor_ids, sensor_ids_by_group) = _get_sensor_groups()
week_and_month_averages = dict(week={}, month={})
for average_type in ('week', 'month'):
trunc_reading_time = None
for average in PowerAverage.objects.filter(average_type=average_type
).order_by('-trunc_reading_time')[:len(sensor_ids)]:
if trunc_reading_time is None:
trunc_reading_time = average.trunc_reading_time
if average.trunc_reading_time == trunc_reading_time:
# Note that we limited the query by the number of sensors in
# the database. However, there may not be an average for
# every sensor for this time period. If this is the case,
# some of the results will be for an earlier time period and
# have an earlier trunc_reading_time .
week_and_month_averages[average_type][average.sensor_id] \
= average.watts / 1000.0
for sensor_id in sensor_ids:
if not week_and_month_averages[average_type].has_key(sensor_id):
# We didn't find an average for this sensor; set the entry
# to None.
week_and_month_averages[average_type][sensor_id] = None
week_averages = week_and_month_averages['week']
month_averages = week_and_month_averages['month']
# If the client has supplied data (a string of digits in the
# URL---representing UTC seconds since the epoch), then we only
# consider data since (and including) that timestamp.
# The max is here just in case a client accidentally calls this
# view with a weeks-old timestamp...
start_dt = max(datetime.datetime.utcfromtimestamp(int(data) / 1000),
datetime.datetime.now() - datetime.timedelta(0, 3600*3, 0))
PowerAverage.graph_data_execute(cur, 'second*10', start_dt)
# Also note, above, that if data was supplied then we selected
# everything since the provided timestamp's truncated date,
# including that date. We will always provide the client with
# a new copy of the latest record he received last time, since
# that last record may have changed (more sensors may have
# submitted measurements and added to it). The second to
# latest and older records, however, will never change.
# Now organize the query in a format amenable to the
# (javascript) client. (The grapher wants (x, y) pairs.)
sg_xy_pairs = dict([[sg[0], []] for sg in sensor_groups])
r = cur.fetchone()
if r is None:
d = {'no_results': True,
'week_averages': week_and_month_averages['week'],
'month_averages': week_and_month_averages['month']}
else:
per = r[2]
per_incr = datetime.timedelta(0, 10, 0)
# At the end of each outer loop, we increment per (the current
# ten-second period of time we're considering) by ten seconds.
while r is not None:
# Remember that the JavaScript client takes (and
# gives) UTC timestamps in ms
x = int(calendar.timegm(per.timetuple()) * 1000)
for sg in sensor_groups:
y = 0
for sid in sensor_ids_by_group[sg[0]]:
# If this sensor has a reading for the current per,
# update y. There are three ways the sensor might
# not have such a reading:
# 1. r is None, i.e. there are no more readings at
# all
# 2. r is not None and r[2] > per, i.e. there are
# more readings but not for this per
# 3. r is not None and r[2] <= per and r[1] != s[0],
# i.e. there are more readings for this per,
# but none for this sensor
if r is not None and r[2] <= per and r[1] == sid:
# If y is None, leave it as such. Else, add
# this sensor reading to y. Afterwards, in
# either case, fetch a new row.
if y is not None:
y += float(r[0])
r = cur.fetchone()
else:
y = None
sg_xy_pairs[sg[0]].append((x, y))
per += per_incr
last_record = x
# desired_first_record lags by (3:00:00 - 0:00:10) = 2:59:50
desired_first_record = x - 1000*3600*3 + 1000*10
junk = str(calendar.timegm(datetime.datetime.now().timetuple()))
data_url = reverse('energyweb.graph.views.dynamic_graph_data',
kwargs={'data': str(last_record)}) + '?junk=' + junk
d = {'no_results': False,
'sg_xy_pairs': sg_xy_pairs,
'desired_first_record':
desired_first_record,
'week_averages': week_and_month_averages['week'],
'month_averages': week_and_month_averages['month'],
'sensor_groups': sensor_groups,
'data_url': data_url}
json_serializer = serializers.get_serializer("json")()
return HttpResponse(simplejson.dumps(d),
mimetype='application/json')
def static_graph(request):
'''
A view returning the HTML for the static (custom-time-period) graph.
'''
if (request.method == 'GET'
and 'start_0' in request.GET
and 'end_0' in request.GET
and 'res' in request.GET):
_get = request.GET.copy()
for field in ('start_0', 'start_1', 'end_0', 'end_1'):
if field in ('start_1', 'end_1'):
# Allow e.g. pm or p.m. instead of PM
_get[field] = _get[field].upper().replace('.', '')
# Allow surrounding whitespace
_get[field] = _get[field].strip()
form = StaticGraphForm(_get)
if form.is_valid():
start = form.cleaned_data['start']
end = form.cleaned_data['end']
res = form.cleaned_data['computed_res']
int_start = int(calendar.timegm(start.timetuple()))
int_end = int(calendar.timegm(end.timetuple()))
js_start = int_start * 1000
js_end = int_end * 1000
junk = str(calendar.timegm(datetime.datetime.now().timetuple()))
data_url = reverse('energyweb.graph.views.static_graph_data',
kwargs={'start': str(int_start),
'end': str(int_end),
'res': res}) + '?junk=' + junk
return render_to_response('graph/static_graph.html',
{'start': js_start,
'end': js_end,
'data_url': data_url,
'form': form,
'form_action': reverse('energyweb.graph.views.static_graph'),
'res': res},
context_instance=RequestContext(request))
return render_to_response('graph/static_graph_form.html',
{'form_action': reverse('energyweb.graph.views.static_graph'),
'form': form},
context_instance=RequestContext(request))
else:
now = datetime.datetime.now()
one_day_ago = now - datetime.timedelta(1)
form = StaticGraphForm(initial={
'start': one_day_ago,
'end': now
})
return render_to_response('graph/static_graph_form.html',
{'form_action': reverse('energyweb.graph.views.static_graph'),
'form': form},
context_instance=RequestContext(request))
def static_graph_data(request, start, end, res):
'''
A view returning the JSON data used to populate the static graph.
'''
from django.db import connection, transaction
cur = connection.cursor()
start_dt = datetime.datetime.utcfromtimestamp(int(start))
end_dt = datetime.datetime.utcfromtimestamp(int(end))
per_incr = PowerAverage.AVERAGE_TYPE_TIMEDELTAS[res]
(sensor_groups, sensor_ids, sensor_ids_by_group) = _get_sensor_groups()
PowerAverage.graph_data_execute(cur, res, start_dt, end_dt)
# Now organize the query in a format amenable to the
# (javascript) client. (The grapher wants (x, y) pairs.)
sg_xy_pairs = dict([[sg[0], []] for sg in sensor_groups])
r = cur.fetchone()
if r is None:
d = {'no_results': True,
'sensor_groups': sensor_groups}
else:
per = r[2]
# At the end of each outer loop, we increment per (the current
# ten-second period of time we're considering) by ten seconds.
while r is not None:
# Remember that the JavaScript client takes (and
# gives) UTC timestamps in ms
x = int(calendar.timegm(per.timetuple()) * 1000)
for sg in sensor_groups:
y = 0
for sid in sensor_ids_by_group[sg[0]]:
# If this sensor has a reading for the current per,
# update y. There are three ways the sensor might
# not have such a reading:
# 1. r is None, i.e. there are no more readings at
# all
# 2. r is not None and r[2] > per, i.e. there are
# more readings but not for this per
# 3. r is not None and r[2] <= per and r[1] != s[0],
# i.e. there are more readings for this per,
# but none for this sensor
if r is not None and r[2] <= per and r[1] == sid:
# If y is None, leave it as such. Else, add
# this sensor reading to y. Afterwards, in
# either case, fetch a new row.
if y is not None:
y += float(r[0])
r = cur.fetchone()
else:
y = None
sg_xy_pairs[sg[0]].append((x, y))
per += per_incr
d = {'no_results': False,
'sg_xy_pairs': sg_xy_pairs,
'show_points': _graph_max_points(start_dt, end_dt, res)
<= GRAPH_SHOW_POINTS_THRESHOLD,
'sensor_groups': sensor_groups}
json_serializer = serializers.get_serializer("json")()
return HttpResponse(simplejson.dumps(d),
mimetype='application/json')
def mon_status_data(request):
junk = str(calendar.timegm(datetime.datetime.now().timetuple()))
(sensor_groups, sensor_ids, sensor_ids_by_group) = _get_sensor_groups()
sreadings = dict()
for s_id in sensor_ids:
sreadings[s_id] = [None, None, None, None]
try:
sr = SensorReading.objects.filter(sensor__id=s_id).latest('reading_time')
sreadings[s_id][0] = int(calendar.timegm(sr.reading_time.timetuple()) * 1000)
except SensorReading.DoesNotExist:
pass
# TODO: magic number
d = SRProfile.objects.filter(sensor_reading__sensor__id=s_id, sensor_reading__reading_time__gte=(datetime.datetime.now() - datetime.timedelta(1))).aggregate(
Avg('transaction_time'),
Min('transaction_time'),
Max('transaction_time'))
sreadings[s_id][1] = int(d['transaction_time__avg'])
sreadings[s_id][2] = d['transaction_time__min']
sreadings[s_id][3] = d['transaction_time__max']
return HttpResponse(simplejson.dumps({'sensor_readings': sreadings,
'sensor_groups': sensor_groups,
'data_url': reverse('energyweb.graph.views.mon_status_data')
+ '?junk=' + junk}),
mimetype='application/json')
def mon_status(request):
junk = str(calendar.timegm(datetime.datetime.now().timetuple()))
return render_to_response('graph/mon_status.html',
{'sensor_groups': _get_sensor_groups()[0],
'data_url': reverse('energyweb.graph.views.mon_status_data')
+ '?junk=' + junk},
context_instance=RequestContext(request))
if settings.DEBUG:
def _html_wrapper(view_name):
'''
Wrap a view in HTML. (Useful for using the debug toolbar with
JSON responses.)
'''
view = globals()[view_name]
def _view(*args, **kwargs):
response = view(*args, **kwargs)
return HttpResponse('''
<html>
<head><title>%s HTML</title></head>
<body>%s</body>
</html>
''' % (view_name, response.content),
mimetype='text/html')
return _view
dynamic_graph_data_html = _html_wrapper('dynamic_graph_data')
static_graph_data_html = _html_wrapper('static_graph_data')
|
|
from __future__ import absolute_import
from datafs.services.service import DataService
from datafs.core.data_archive import DataArchive
from datafs._compat import open_filelike
import hashlib
import fnmatch
import re
import fs.path
from fs.osfs import OSFS
try:
PermissionError
except NameError:
class PermissionError(NameError):
pass
_VALID_AUTHORITY_PATTERNS = r'[\w\-]+'
class DataAPI(object):
DefaultAuthorityName = None
_ArchiveConstructor = DataArchive
def __init__(self, default_versions=None, **kwargs):
if default_versions is None:
default_versions = {}
self.user_config = kwargs
self._manager = None
self._cache = None
self._authorities = {}
self.default_versions = default_versions
self._authorities_locked = False
self._manager_locked = False
def attach_authority(self, service_name, service):
if self._authorities_locked:
raise PermissionError('Authorities locked')
self._validate_authority_name(service_name)
self._authorities[service_name] = DataService(service)
def lock_authorities(self):
self._authorities_locked = True
def lock_manager(self):
self._manager_locked = True
def attach_cache(self, service):
if service in self._authorities.values():
raise ValueError('Cannot attach an authority as a cache')
else:
self._cache = DataService(service)
@property
def manager(self):
return self._manager
# set cache attr
@property
def cache(self):
return self._cache
# get the default athority setting
@property
def default_authority_name(self):
if self.DefaultAuthorityName is not None:
return self.DefaultAuthorityName
if len(self._authorities) == 0:
raise ValueError(
'No authorities found. See attach_authority.')
if len(self._authorities) > 1:
raise ValueError(
'Authority ambiguous. Set authority or DefaultAuthorityName.')
# get the zeroth key
return list(self._authorities.keys())[0]
# Do we want to provide a method for setting authorities
@property
def default_authority(self):
return self._authorities[self.default_authority_name]
@property
def default_versions(self):
return self._default_versions
@default_versions.setter
def default_versions(self, default_versions):
'''
Set archive default read versions
Parameters
----------
default_versions: dict
Dictionary of archive_name, version pairs. On read/download,
archives in this dictionary will download the specified version
by default. Before assignment, archive_names are checked and
normalized.
'''
default_versions = {
self._normalize_archive_name(arch)[1]: v
for arch, v in default_versions.items()}
self._default_versions = default_versions
def attach_manager(self, manager):
if self._manager_locked:
raise PermissionError('Manager locked')
self._manager = manager
def create(
self,
archive_name,
authority_name=None,
versioned=True,
raise_on_err=True,
metadata=None,
tags=None,
helper=False):
'''
Create a DataFS archive
Parameters
----------
archive_name: str
Name of the archive
authority_name: str
Name of the data service to use as the archive's data authority
versioned: bool
If true, store all versions with explicit version numbers (defualt)
raise_on_err: bool
Raise an error if the archive already exists (default True)
metadata: dict
Dictionary of additional archive metadata
helper: bool
If true, interactively prompt for required metadata (default False)
'''
authority_name, archive_name = self._normalize_archive_name(
archive_name, authority_name=authority_name)
if authority_name is None:
authority_name = self.default_authority_name
self._validate_archive_name(archive_name)
if metadata is None:
metadata = {}
res = self.manager.create_archive(
archive_name,
authority_name,
archive_path=archive_name,
versioned=versioned,
raise_on_err=raise_on_err,
metadata=metadata,
user_config=self.user_config,
tags=tags,
helper=helper)
return self._ArchiveConstructor(
api=self,
**res)
def get_archive(self, archive_name, default_version=None):
'''
Retrieve a data archive
Parameters
----------
archive_name: str
Name of the archive to retrieve
default_version: version
str or :py:class:`~distutils.StrictVersion` giving the default
version number to be used on read operations
Returns
-------
archive: object
New :py:class:`~datafs.core.data_archive.DataArchive` object
Raises
------
KeyError:
A KeyError is raised when the ``archive_name`` is not found
'''
auth, archive_name = self._normalize_archive_name(archive_name)
res = self.manager.get_archive(archive_name)
if default_version is None:
default_version = self._default_versions.get(archive_name, None)
if (auth is not None) and (auth != res['authority_name']):
raise ValueError(
'Archive "{}" not found on {}.'.format(archive_name, auth) +
' Did you mean "{}://{}"?'.format(
res['authority_name'], archive_name))
return self._ArchiveConstructor(
api=self,
default_version=default_version,
**res)
def batch_get_archive(self, archive_names, default_versions=None):
'''
Batch version of :py:meth:`~DataAPI.get_archive`
Parameters
----------
archive_names: list
Iterable of archive names to retrieve
default_versions: str, object, or dict
Default versions to assign to each returned archive. May be a dict
with archive names as keys and versions as values, or may be a
version, in which case the same version is used for all archives.
Versions must be a strict version number string, a
:py:class:`~distutils.version.StrictVersion`, or a
:py:class:`~datafs.core.versions.BumpableVersion` object.
Returns
-------
archives: list
List of :py:class:`~datafs.core.data_archive.DataArchive` objects.
If an archive is not found, it is omitted (``batch_get_archive``
does not raise a ``KeyError`` on invalid archive names).
'''
# toss prefixes and normalize names
archive_names = map(
lambda arch: self._normalize_archive_name(arch)[1],
archive_names)
responses = self.manager.batch_get_archive(archive_names)
archives = {}
if default_versions is None:
default_versions = {}
for res in responses:
res['archive_name'] = self._normalize_archive_name(
res['archive_name'])
archive_name = res['archive_name']
if hasattr(default_versions, 'get'):
# Get version number from default_versions or
# self._default_versions if key not present.
default_version = default_versions.get(
archive_name,
self._default_versions.get(archive_name, None))
else:
default_version = default_versions
archive = self._ArchiveConstructor(
api=self,
default_version=default_version,
**res)
archives[archive_name] = archive
return archives
def listdir(self, location, authority_name=None):
'''
List archive path components at a given location
.. Note ::
When using listdir on versioned archives, listdir will provide the
version numbers when a full archive path is supplied as the
location argument. This is because DataFS stores the archive path
as a directory and the versions as the actual files when versioning
is on.
Parameters
----------
location: str
Path of the "directory" to search
`location` can be a path relative to the authority root (e.g
`/MyFiles/Data`) or can include authority as a protocol (e.g.
`my_auth://MyFiles/Data`). If the authority is specified as a
protocol, the `authority_name` argument is ignored.
authority_name: str
Name of the authority to search (optional)
If no authority is specified, the default authority is used (if
only one authority is attached or if
:py:attr:`DefaultAuthorityName` is assigned).
Returns
-------
list
Archive path components that exist at the given "directory"
location on the specified authority
Raises
------
ValueError
A ValueError is raised if the authority is ambiguous or invalid
'''
authority_name, location = self._normalize_archive_name(
location,
authority_name=authority_name)
if authority_name is None:
authority_name = self.default_authority_name
return self._authorities[authority_name].fs.listdir(location)
def filter(self, pattern=None, engine='path', prefix=None):
'''
Performs a filtered search on entire universe of archives
according to pattern or prefix.
Parameters
----------
prefix: str
string matching beginning characters of the archive or set of
archives you are filtering. Note that authority prefixes, e.g.
``local://my/archive.txt`` are not supported in prefix searches.
pattern: str
string matching the characters within the archive or set of
archives you are filtering on. Note that authority prefixes, e.g.
``local://my/archive.txt`` are not supported in pattern searches.
engine: str
string of value 'str', 'path', or 'regex'. That indicates the
type of pattern you are filtering on
Returns
-------
generator
'''
if pattern is not None:
pattern = fs.path.relpath(pattern)
if prefix is not None:
prefix = fs.path.relpath(prefix)
archives = self.manager.search(tuple([]), begins_with=prefix)
if not pattern:
for archive in archives:
yield archive
if engine == 'str':
for arch in archives:
if pattern in arch:
yield arch
elif engine == 'path':
# Change to generator version of fnmatch.filter
for arch in archives:
if fnmatch.fnmatch(arch, pattern):
yield arch
elif engine == 'regex':
for arch in archives:
if re.search(pattern, arch):
yield arch
else:
raise ValueError(
'search engine "{}" not recognized. '.format(engine) +
'choose "str", "fn", or "regex"')
def search(self, *query, **kwargs):
'''
Searches based on tags specified by users
Parameters
---------
query: str
tags to search on. If multiple terms, provided in comma delimited
string format
prefix: str
start of archive name. Providing a start string improves search
speed.
'''
prefix = kwargs.get('prefix')
if prefix is not None:
prefix = fs.path.relpath(prefix)
return self.manager.search(query, begins_with=prefix)
def _validate_archive_name(self, archive_name):
'''
Utility function for creating and validating archive names
Parameters
----------
archive_name: str
Name of the archive from which to create a service path
Returns
-------
archive_path: str
Internal path used by services to reference archive data
'''
archive_name = fs.path.normpath(archive_name)
patterns = self.manager.required_archive_patterns
for pattern in patterns:
if not re.search(pattern, archive_name):
raise ValueError(
"archive name does not match pattern '{}'".format(pattern))
def delete_archive(self, archive_name):
'''
Delete an archive
Parameters
----------
archive_name: str
Name of the archive to delete
'''
archive = self.get_archive(archive_name)
archive.delete()
@staticmethod
def hash_file(f):
'''
Utility function for hashing file contents
Overload this function to change the file equality checking algorithm
Parameters
----------
f: file-like
File-like object or file path from which to compute checksum value
Returns
-------
checksum: dict
dictionary with {'algorithm': 'md5', 'checksum': hexdigest}
'''
md5 = hashlib.md5()
with open_filelike(f, 'rb') as f_obj:
for chunk in iter(lambda: f_obj.read(128 * md5.block_size), b''):
md5.update(chunk)
return {'algorithm': 'md5', 'checksum': md5.hexdigest()}
def close(self):
for service in self._authorities:
self._authorities[service].fs.close()
if self.cache:
self.cache.fs.close()
@staticmethod
def _validate_authority_name(authority_name):
matched = re.match(
r'^{}$'.format(_VALID_AUTHORITY_PATTERNS),
authority_name)
if matched:
return
raise ValueError('"{}" not a valid authority name'.format(
authority_name))
@staticmethod
def _split_authority(archive_name):
matched = re.match(
r'^((?P<auth>{})\:\/\/)?(?P<archive>.*)$'.format(
_VALID_AUTHORITY_PATTERNS),
archive_name)
return matched.group('auth'), matched.group('archive')
def _normalize_archive_name(self, archive_name, authority_name=None):
full_archive_arg = archive_name
str_authority_name, archive_name = self._split_authority(archive_name)
if ((str_authority_name is not None)
and (authority_name is not None)
and (str_authority_name != authority_name)):
raise ValueError(
'authority name "{}" not found in archive: "{}"'.format(
authority_name, full_archive_arg))
relpath = fs.path.relpath(fs.path.normpath(archive_name))
if str_authority_name is None:
str_authority_name = authority_name
if str_authority_name is None:
try:
str_authority_name = self.default_authority_name
except ValueError:
pass
if str_authority_name is not None:
if str_authority_name not in self._authorities:
raise ValueError('Authority "{}" not found'.format(
str_authority_name))
self._authorities[str_authority_name].fs.validatepath(relpath)
# additional check - not all fs.validatepath functions do anything:
OSFS('').isvalidpath(relpath)
return str_authority_name, relpath
|
|
import json
import pymongo
from datadiff.tools import assert_equal
from nose.tools import assert_is_none
from data_access import ReviewerDao, ReviewDao, MagazineDao
from mongo_utils import MongoUtils
from my_exceptions import InvalidData
from my_mongo_client import MyMongoClientReader
class DataAccessTestCase(object):
def __init__(self):
self.mongo_client = MyMongoClientReader('test')
self.mongo_utils = MongoUtils(self.mongo_client)
def setup(self):
self.mongo_utils.clear_collection('review')
self.mongo_utils.clear_collection('reviewer')
self.mongo_utils.clear_collection('magazine')
def _assert_equals_without_review_id(self, expected, actual):
reviews_without_id = []
for review in actual['review']:
review = {k: v for k, v in review.items() if k != '_id'}
reviews_without_id.append(review)
actual['review'] = reviews_without_id
assert_equal(expected, actual)
def _assert_equals(self, expected, actual):
reviews_without_id = []
for review in actual:
review = {k: v for k, v in review.items() if k != '_id'}
reviews_without_id.append(review)
assert_equal(expected, reviews_without_id)
def _assert_tuple_equal_without_id(self, expected, actual):
tuple_list = []
for first, second in actual:
second_content_without_id = []
for elem in second:
second_content_without_id.append({k: v for k, v in elem.items() if k != '_id'})
tuple_list.append((first, second_content_without_id))
assert_equal(expected, tuple_list)
class TestMagazineDao(DataAccessTestCase):
def __init__(self):
super(TestMagazineDao, self).__init__()
self.magazine_dao = MagazineDao(self.mongo_client)
def test_get_all_magazine_years(self):
self.mongo_utils.insertJsonFile('magazine', 'magazines.json', 'issueNumber')
magazine_years = self.magazine_dao.get_all_magazine_years()
assert_equal([2003, 2007], magazine_years)
def test_get_last_magazine(self):
self.mongo_utils.insertJsonFile('magazine', 'magazines.json', 'issueNumber')
magazine = self.magazine_dao.get_last_magazine()
assert_equal(273, magazine['issueNumber'])
def test_get_all_magazines_by_year(self):
self.mongo_utils.insertJsonFile('magazine', 'magazines.json', 'issueNumber')
magazines = self.magazine_dao.get_all_magazines_by_year(2007)
assert_equal(1, len(magazines))
assert_equal(273, magazines[0]['issueNumber'])
def test_get_magazine_by_issue_number(self):
self.mongo_utils.insertJsonFile('magazine', 'magazines.json', 'title')
self.mongo_utils.insertJsonFile('review', 'reviews.json', 'title')
actual_magazine = self.magazine_dao.get_magazine_by_issue_number(273)
with open('data/expected_magazine_273.json', 'rb') as expected_file:
data = expected_file.read()
expected_magazine = json.loads(data)
self._assert_equals_without_review_id(expected_magazine, actual_magazine)
def test_get_empty_magazines(self):
self.mongo_utils.insertJsonFile('magazine', 'magazines.json', 'issueNumber')
empty_magazines = self.magazine_dao.get_empty_magazines()
assert_equal([273, 200], empty_magazines)
def test_save_magazine(self):
magazine_data = {
'_id': '',
'issueNumber': '280',
'issueDate': '15/07/2013',
'coverUrl': 'http://www.canardpc.com/img/couv/couv_Canard_PC_280.jpg',
'coverTnUrl': 'http://www.canardpc.com/img/couv/couv_Canard_PC_280_169.jpg',
'title': 'Minecraft',
'subTitle': 'Le dossier qui vous laissera sur le cube',
}
self.magazine_dao.save_magazine(magazine_data)
added_magazine = self.magazine_dao.get_magazine_by_issue_number(280)
assert_equal('http://www.canardpc.com/img/couv/couv_Canard_PC_280.jpg', added_magazine['coverUrl'])
assert_equal('http://www.canardpc.com/img/couv/couv_Canard_PC_280_169.jpg', added_magazine['coverTnUrl'])
def test_get_all_magazines(self):
self.mongo_utils.insertJsonFile('magazine', 'magazines.json', 'issueNumber')
self.mongo_utils.insertJsonFile('magazine', 'others_magazines.json', 'issueNumber', False)
actual_magazines = self.magazine_dao.get_all_magazines()
assert_equal([(2003, [{"issueDate": "03/12/2003",
"coverUrl": "http://www.canardpc.com/img/couv/couv_Canard_PC_200.jpg",
"coverTnUrl": "http://www.canardpc.com/img/couv/couv_Canard_PC_200.jpg",
"issueNumber": 200},
{"issueDate": "03/05/2003",
"coverUrl": "http://www.canardpc.com/img/couv/couv_Canard_PC_200.jpg",
"coverTnUrl": "http://www.canardpc.com/img/couv/couv_Canard_PC_200.jpg",
"issueNumber": 180}]),
(2007, [{"issueDate": "26/11/2007",
"coverUrl": "http://www.canardpc.com/img/couv/couv_Canard_PC_273.jpg",
"coverTnUrl": "http://www.canardpc.com/img/couv/couv_Canard_PC_273.jpg",
"issueNumber": 273},
{"issueDate": "26/01/2007",
"coverUrl": "http://www.canardpc.com/img/couv/couv_Canard_PC_273.jpg",
"coverTnUrl": "http://www.canardpc.com/img/couv/couv_Canard_PC_273.jpg",
"issueNumber": 260}])], actual_magazines)
class TestReviewDao(DataAccessTestCase):
def __init__(self):
super(TestReviewDao, self).__init__()
self.review_dao = ReviewDao(self.mongo_client)
def test_get_all_review_by_title(self):
self.mongo_utils.insertJsonFile('review', 'reviews.json', 'title')
reviews = self.review_dao.get_all_review_by_title('bioshock')
assert_equal(2, len(reviews))
assert_equal('Bioshock Infinite', reviews[0]['title'])
assert_equal('http://www.canardpc.com/img/couv/couv_Canard_PC_273.jpg', reviews[0]['issue']['coverTnUrl'])
assert_equal('Bioshock', reviews[1]['title'])
assert_equal('http://www.canardpc.com/img/couv/couv_Canard_PC_200.jpg', reviews[1]['issue']['coverTnUrl'])
reviews = self.review_dao.get_all_review_by_title('Sim')
assert_equal('Guy Moquette', reviews[0]['reviewer']['name'])
def test_get_all_game_titles(self):
self.mongo_utils.insertJsonFile('review', 'reviews.json', 'title')
expected_list = [u'Bioshock', u'Bioshock Infinite', u'Doom & Destiny', u'Sim City',
u'Starcraft 2: Heart Of The Swarm']
actual_list = self.review_dao.get_all_game_titles()
assert_equal(expected_list, actual_list)
def test_get_studio_by_name(self):
self.mongo_utils.insertJsonFile('review', 'reviews.json', 'title')
actual_studio = self.review_dao.get_studio_by_name('Irrational Games')
with open('data/expected_studio_irrational.json', 'rb') as expected_file:
data = expected_file.read()
expected_studio = json.loads(data)
self._assert_equals_without_review_id(expected_studio, actual_studio)
def test_reviews_by_genre(self):
self.mongo_utils.insertJsonFile('review', 'reviews.json', 'title')
actual_genre = self.review_dao.get_reviews_by_genre('FPS')
with open('data/expected_genre_FPS.json', 'rb') as expected_file:
data = expected_file.read()
expected_genre = json.loads(data)
self._assert_equals_without_review_id(expected_genre, actual_genre)
def test_get_all_reviews(self):
self.mongo_utils.insertJsonFile('review', 'reviews.json', 'title')
reviews = self.review_dao.get_all_reviews()
assert_equal(5, len(reviews))
def test_save_review(self):
self.mongo_utils.insertJsonFile('reviewer', 'reviewer.json', 'name')
self.mongo_utils.insertJsonFile('magazine', 'magazines.json', 'issueNumber')
review_data_from_ui = {
'_id': '',
"issueNumber": 273,
"title": "Bioshock",
"subTitle": "Nouveau sous-titre",
"year": 2009,
"displayedGenre": "FPS",
"primaryGenre": "FPS",
"reviewer": "Threanor",
"score": 10,
"studioName": "Irrational Games",
"studio": "Irrational Games",
"publisher": "2K",
"coverTnUrl": "http://cpc.cx/7ol"
}
self.review_dao.save_review(review_data_from_ui)
updated_review = self.review_dao.get_review_by_title('Bioshock')
assert_equal('Nouveau sous-titre', updated_review['subTitle'])
assert_equal({'name': 'Threanor',
'reviewerTnUrl': 'http://www.canardpc.com/img/redac/threanor.png',
'function': u'Journaliste',
'isPresent': False}, updated_review['reviewer'])
assert_equal({'issueNumber': 273, 'coverTnUrl': 'http://www.canardpc.com/img/couv/couv_Canard_PC_273.jpg',
'issueDate': '26/11/2007'},
updated_review['issue'])
def test_save_review_with_unknown_reviewer_raise_an_exception(self):
self.mongo_utils.insertJsonFile('magazine', 'magazines.json', 'issueNumber')
review_data_from_ui = {
'_id': '',
"issueNumber": 273,
"title": "Bioshock",
"subTitle": "Nouveau sous-titre",
"year": 2009,
"displayedGenre": "FPS",
"primaryGenre": "FPS",
"reviewer": "coincoin",
"score": 10,
"studioName": "Irrational Games",
"studio": "Irrational Games",
"publisher": "2K",
"coverTnUrl": "http://cpc.cx/7ol"
}
try:
self.review_dao.save_review(review_data_from_ui)
raise ValueError('Should raise an exception')
except InvalidData as ex:
assert_equal(["Testeur 'coincoin' inconnu"], ex.errors)
assert_is_none(self.review_dao.get_review_by_title('Bioshock'))
def test_save_review_with_unknown_magazine_raise_an_exception(self):
self.mongo_utils.insertJsonFile('reviewer', 'reviewer.json', 'name')
review_data_from_ui = {
'_id': '',
"issueNumber": 1,
"title": "Bioshock",
"subTitle": "Nouveau sous-titre",
"year": 2009,
"displayedGenre": "FPS",
"primaryGenre": "FPS",
"reviewer": "Threanor",
"score": 10,
"studioName": "Irrational Games",
"studio": "Irrational Games",
"publisher": "2K",
"coverTnUrl": "http://cpc.cx/7ol"
}
try:
self.review_dao.save_review(review_data_from_ui)
raise ValueError('Should raise an exception')
except InvalidData as ex:
assert_equal(["Magazine '1' inconnu"], ex.errors)
assert_is_none(self.review_dao.get_review_by_title('Bioshock'))
def test_save_review_with_score_as_number_string(self):
self.mongo_utils.insertJsonFile('reviewer', 'reviewer.json', 'name')
self.mongo_utils.insertJsonFile('magazine', 'magazines.json', 'issueNumber')
review_data_from_ui = {
'_id': '',
"issueNumber": 273,
"title": "Bioshock",
"subTitle": "Nouveau sous-titre",
"year": 2009,
"displayedGenre": "FPS",
"primaryGenre": "FPS",
"reviewer": "Threanor",
"score": '10',
"studioName": "Irrational Games",
"studio": "Irrational Games",
"publisher": "2K",
"coverTnUrl": "http://cpc.cx/7ol"
}
self.review_dao.save_review(review_data_from_ui)
updated_review = self.review_dao.get_review_by_title('Bioshock')
assert_equal('Nouveau sous-titre', updated_review['subTitle'])
assert_equal({'name': 'Threanor',
'reviewerTnUrl': 'http://www.canardpc.com/img/redac/threanor.png',
'function': u'Journaliste',
'isPresent': False}, updated_review['reviewer'])
assert_equal({'issueNumber': 273, 'coverTnUrl': 'http://www.canardpc.com/img/couv/couv_Canard_PC_273.jpg',
'issueDate': '26/11/2007'},
updated_review['issue'])
assert_equal(10, updated_review['score'])
def test_save_review_with_score_as_not_number_string(self):
self.mongo_utils.insertJsonFile('reviewer', 'reviewer.json', 'name')
self.mongo_utils.insertJsonFile('magazine', 'magazines.json', 'issueNumber')
review_data_from_ui = {
'_id': '',
"issueNumber": 273,
"title": "Bioshock",
"subTitle": "Nouveau sous-titre",
"year": 2009,
"displayedGenre": "FPS",
"primaryGenre": "FPS",
"reviewer": "Threanor",
"score": 'ahhhh',
"studioName": "Irrational Games",
"studio": "Irrational Games",
"publisher": "2K",
"coverTnUrl": "http://cpc.cx/7ol"
}
self.review_dao.save_review(review_data_from_ui)
updated_review = self.review_dao.get_review_by_title('Bioshock')
assert_equal('Nouveau sous-titre', updated_review['subTitle'])
assert_equal({'name': 'Threanor',
'reviewerTnUrl': 'http://www.canardpc.com/img/redac/threanor.png',
'function': u'Journaliste',
'isPresent': False}, updated_review['reviewer'])
assert_equal({'issueNumber': 273, 'coverTnUrl': 'http://www.canardpc.com/img/couv/couv_Canard_PC_273.jpg',
'issueDate': '26/11/2007'},
updated_review['issue'])
assert_equal(-1, updated_review['score'])
assert_equal('ahhhh', updated_review['otherScore'])
def test_get_all_genres(self):
self.mongo_utils.insertJsonFile('review', 'reviews.json', 'title')
actual_genres = self.review_dao.get_all_genres()
self._assert_tuple_equal_without_id([('City Builder', [{'title': 'Sim City', 'primaryGenre': 'City Builder'}]),
('FPS', [{'title': 'Bioshock', 'primaryGenre': 'FPS'},
{'title': 'Bioshock Infinite', 'primaryGenre': 'FPS'}]),
('RPG', [{'title': 'Doom & Destiny', 'primaryGenre': 'RPG'}]),
('STR',
[{'title': 'Starcraft 2: Heart Of The Swarm', 'primaryGenre': 'STR'}])],
actual_genres)
def test_get_all_scores(self):
self.mongo_utils.insertJsonFile('review', 'reviews.json', 'title')
self.mongo_utils.insertData('review', [
{
"title": "Surgeon Simulator",
"year": 2013,
"score": 4,
"otherScore": "Scie"
}
], 'title', False)
self._assert_tuple_equal_without_id([(4, [{'score': 4, 'title': 'Sim City'},
{'score': 4, 'title': 'Surgeon Simulator'}]),
(6, [{'score': 6, 'title': 'Starcraft 2: Heart Of The Swarm'}]),
(8, [{'score': 8, 'title': 'Doom & Destiny'}]),
(9, [{'score': 9, 'title': 'Bioshock Infinite'}]),
(10, [{'score': 10, 'title': 'Bioshock'}])],
self.review_dao.get_all_scores())
def test_get_all_review_titles(self):
self.mongo_utils.insertJsonFile('review', 'reviews.json', 'title')
self._assert_tuple_equal_without_id([('B', [{'title': 'Bioshock'}, {'title': 'Bioshock Infinite'}]),
('D', [{'title': 'Doom & Destiny'}]),
('S',
[{'title': 'Sim City'}, {'title': 'Starcraft 2: Heart Of The Swarm'}])],
self.review_dao.get_all_review_titles())
def test_get_reviews_without_critic(self):
json_data = [{'issue': {'issueNumber': 273}, 'title': 'Bioshock Infinite', 'critic': ''},
{'issue': {'issueNumber': 155}, 'title': 'Bioshock', 'critic': 'youpi banane !'},
{'issue': {'issueNumber': 208}, 'title': 'Bioshock 2'}]
self.mongo_utils.insertData('review', json_data, 'title', True)
self._assert_equals(
[{'issue': {'issueNumber': 273}, 'title': 'Bioshock Infinite'}],
self.review_dao.get_reviews_without_critic())
def test_update_reviewer(self):
self.mongo_utils.insertJsonFile('review', 'reviews.json', 'title')
self.review_dao.update_reviewer(reviewer={
'name': 'Omar Boulon',
'reviewerTnUrl': 'http://monurl/avatar.jpg'
})
assert_equal([
{u'title': u'Bioshock',
u'reviewer': {u'name': u'Omar Boulon', u'reviewerTnUrl': u'http://monurl/avatar.jpg'}},
{u'title': u'Bioshock Infinite',
u'reviewer': {u'name': u'Omar Boulon', u'reviewerTnUrl': u'http://monurl/avatar.jpg'}},
{u'title': u'Doom & Destiny',
u'reviewer': {u'reviewerTnUrl': u'http://www.canardpc.com/img/redac/kalash.png',
u'name': u'Maria Kalash'}},
{u'title': u'Sim City',
u'reviewer': {u'reviewerTnUrl': u'http://www.canardpc.com/img/redac/moquette.png',
u'name': u'Guy Moquette'}},
{u'title': u'Starcraft 2: Heart Of The Swarm',
u'reviewer': {u'reviewerTnUrl': u'http://www.canardpc.com/img/redac/lfs.png',
u'name': u'Louis Ferdinand Sebum'}},
], list(self.mongo_client.database['review'].find({}, {'_id': False,
'title': True,
'reviewer': True}).sort([('title', pymongo.ASCENDING)])))
def test_update_magazine_issue(self):
self.mongo_utils.insertJsonFile('review', 'reviews.json', 'title')
self.review_dao.update_magazine_issue(magazine_data={
u'issueNumber': 273,
u'coverTnUrl': u'https://my_magazine_cover.png'
})
assert_equal([
{u'title': u'Bioshock',
u'issue': {
u'issueNumber': 200,
u'coverTnUrl': u'http://www.canardpc.com/img/couv/couv_Canard_PC_200.jpg'
}},
{u'title': u'Bioshock Infinite',
u'issue': {
u'issueNumber': 273,
u'coverTnUrl': u'https://my_magazine_cover.png'
}},
{u'title': u'Doom & Destiny',
u'issue': {
u'issueNumber': 273,
u'coverTnUrl': u'https://my_magazine_cover.png'
}},
{u'title': u'Sim City',
u'issue': {
u'issueNumber': 273,
u'coverTnUrl': u'https://my_magazine_cover.png'
}},
{u'title': u'Starcraft 2: Heart Of The Swarm',
u'issue': {
u'issueNumber': 273,
u'coverTnUrl': u'https://my_magazine_cover.png'
}},
], list(self.mongo_client.database['review'].find({},
{'_id': False,
'title': True,
'issue': True}).sort([('title', pymongo.ASCENDING)])))
class TestReviewerDao(DataAccessTestCase):
def __init__(self):
super(TestReviewerDao, self).__init__()
self.reviewer_dao = ReviewerDao(self.mongo_client)
def test_get_reviewer_by_name(self):
self.mongo_utils.insertJsonFile('reviewer', 'reviewer.json', 'name')
actual_reviewer = self.reviewer_dao.get_reviewer_by_name('Omar Boulon')
with open('data/expected_reviewer_boulon.json', 'rb') as expected_file:
data = expected_file.read()
expected_reviewer = json.loads(data)
assert_equal(expected_reviewer, actual_reviewer)
def test_get_review_number_by_score(self):
self.mongo_utils.insertJsonFile('review', 'a_lot_of_review.json', 'title')
assert_equal((3, [(0, 2), (5, 3), (8, 1)]),
self.reviewer_dao.get_reviewer_number_by_score('Ivan Le Fou'))
def test_get_review_number_by_genre(self):
self.mongo_utils.insertJsonFile('review', 'a_lot_of_review.json', 'title')
assert_equal((3, [('FPS', 3), ('RPG', 1), ('STR', 1), ('City Builder', 1)]),
self.reviewer_dao.get_reviewer_number_by_genre('Ivan Le Fou'))
|
|
#
# Copyright (c) 2013-2017 Kevin Steves <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
"""Interface to the WildFire API
The pan.wfapi module implements the PanWFapi class. It provides an
interface to the WildFire API on Palo Alto Networks' WildFire Cloud
and WildFire appliance.
"""
# XXX Using the requests module which uses urllib3 and has support
# for multipart form-data would make this much simpler/cleaner (main
# issue is support for Python 2.x and 3.x in one source). However I
# decided to not require non-default modules. That decision may need
# to be revisited as some parts of this are not clean.
from __future__ import print_function
import socket
import sys
import os
from io import BytesIO
import email
import email.errors
import email.utils
import logging
try:
# 3.2
from urllib.request import Request, \
build_opener, HTTPErrorProcessor, HTTPSHandler
from urllib.error import URLError
from urllib.parse import urlencode
from http.client import responses
_legacy_urllib = False
except ImportError:
# 2.7
from urllib2 import Request, URLError, \
build_opener, HTTPErrorProcessor, HTTPSHandler
from urllib import urlencode
from httplib import responses
_legacy_urllib = True
import xml.etree.ElementTree as etree
from . import __version__, DEBUG1, DEBUG2, DEBUG3
import pan.rc
try:
import ssl
except ImportError:
raise ValueError('SSL support not available')
try:
import certifi
_have_certifi = True
except ImportError:
_have_certifi = False
_cloud_server = 'wildfire.paloaltonetworks.com'
_encoding = 'utf-8'
_rfc2231_encode = False
_wildfire_responses = {
418: 'Unsupported File Type',
}
BENIGN = 0
MALWARE = 1
GRAYWARE = 2
PHISHING = 4
PENDING = -100
ERROR = -101
UNKNOWN = -102
INVALID = -103
VERDICTS = {
BENIGN: ('benign', None),
MALWARE: ('malware', None),
GRAYWARE: ('grayware', None),
PHISHING: ('phishing', None),
PENDING: ('pending', 'sample exists and verdict not known'),
ERROR: ('error', 'sample is in error state'),
UNKNOWN: ('unknown', 'sample does not exist'),
INVALID: ('invalid', 'hash is invalid'),
}
def _isunicode(s):
try:
if isinstance(s, unicode):
return True
return False
except NameError:
if isinstance(s, str):
return True
return False
def _isbytes(s):
try:
if isinstance(s, basestring) and isinstance(s, bytes):
return True
return False
except NameError:
if isinstance(s, bytes):
return True
return False
class PanWFapiError(Exception):
pass
class PanWFapi:
def __init__(self,
tag=None,
hostname=None,
api_key=None,
timeout=None,
http=False,
ssl_context=None):
self._log = logging.getLogger(__name__).log
self.tag = tag
self.hostname = hostname
self.api_key = None
self.timeout = timeout
self.ssl_context = ssl_context
self._log(DEBUG3, 'Python version: %s', sys.version)
self._log(DEBUG3, 'xml.etree.ElementTree version: %s', etree.VERSION)
self._log(DEBUG3, 'ssl: %s', ssl.OPENSSL_VERSION)
self._log(DEBUG3, 'pan-python version: %s', __version__)
if self.timeout is not None:
try:
self.timeout = int(self.timeout)
if not self.timeout > 0:
raise ValueError
except ValueError:
raise PanWFapiError('Invalid timeout: %s' % self.timeout)
if self.ssl_context is not None:
try:
ssl.SSLContext(ssl.PROTOCOL_SSLv23)
except AttributeError:
raise PanWFapiError('SSL module has no SSLContext()')
elif _have_certifi:
self.ssl_context = self._certifi_ssl_context()
# handle Python versions with no ssl.CertificateError
if hasattr(ssl, 'CertificateError'):
self._certificateerror = ssl.CertificateError
else:
self._certificateerror = NotImplementedError # XXX Can't happen
init_panrc = {} # .panrc args from constructor
if hostname is not None:
init_panrc['hostname'] = hostname
if api_key is not None:
init_panrc['api_key'] = api_key
try:
panrc = pan.rc.PanRc(tag=self.tag,
init_panrc=init_panrc)
except pan.rc.PanRcError as msg:
raise PanWFapiError(str(msg))
if 'api_key' in panrc.panrc:
self.api_key = panrc.panrc['api_key']
if 'hostname' in panrc.panrc:
self.hostname = panrc.panrc['hostname']
else:
self.hostname = _cloud_server
if self.api_key is None:
raise PanWFapiError('api_key required')
if http:
self.uri = 'http://%s' % self.hostname
else:
self.uri = 'https://%s' % self.hostname
if _legacy_urllib:
self._log(DEBUG2, 'using legacy urllib')
def __str__(self):
x = self.__dict__.copy()
for k in x:
if k in ['api_key'] and x[k] is not None:
x[k] = '*' * 6
return '\n'.join((': '.join((k, str(x[k]))))
for k in sorted(x))
def __clear_response(self):
# XXX naming
self._msg = None
self.http_code = None
self.http_reason = None
self.response_body = None
self.response_type = None
self.xml_element_root = None
self.attachment = None
def __set_response(self, response):
message_body = response.read()
content_type = self._message.get_content_type()
if not content_type:
if self._msg is None:
self._msg = 'no content-type response header'
return False
if content_type == 'application/octet-stream':
return self.__set_stream_response(response, message_body)
# XXX text/xml RFC 3023
elif (content_type == 'application/xml' or
content_type == 'text/xml'):
return self.__set_xml_response(message_body)
elif content_type == 'text/html':
return self.__set_html_response(message_body)
else:
msg = 'no handler for content-type: %s' % content_type
self._msg = msg
return False
def __set_stream_response(self, response, message_body):
filename = self._message.get_filename()
if not filename:
self._msg = 'no content-disposition response header'
return False
attachment = {}
attachment['filename'] = filename
attachment['content'] = message_body
self.attachment = attachment
return True
def __set_xml_response(self, message_body):
self._log(DEBUG2, '__set_xml_response: %s', repr(message_body))
self.response_type = 'xml'
_message_body = message_body.decode(_encoding)
if len(_message_body) == 0:
return True
self.response_body = _message_body
# ParseError: "XML or text declaration not at start of entity"
# fix: remove leading blank lines if exist
_message_body = message_body
while (_message_body[0:1] == b'\r' or
_message_body[0:1] == b'\n'):
_message_body = _message_body[1:]
if len(_message_body) == 0:
return True
try:
element = etree.fromstring(_message_body)
except etree.ParseError as msg:
self._msg = 'ElementTree.fromstring ParseError: %s' % msg
return False
self.xml_element_root = element
return True
def __set_html_response(self, message_body):
self._log(DEBUG2, '__set_html_response: %s', repr(message_body))
self.response_type = 'html'
_message_body = message_body.decode()
if len(_message_body) == 0:
return True
self.response_body = _message_body
return True
# XXX store tostring() results?
# XXX rework this
def xml_root(self):
if self.xml_element_root is None:
return None
s = etree.tostring(self.xml_element_root, encoding=_encoding)
if not s:
return None
self._log(DEBUG3, 'xml_root: %s', type(s))
self._log(DEBUG3, 'xml_root.decode(): %s', type(s.decode(_encoding)))
return s.decode(_encoding)
# XXX Unicode notes
# 2.7
# decode() str (bytes) -> unicode
# encode() unicode -> str (bytes)
# encode() of str will call decode()
# 3.x
# decode() bytes -> str (unicode)
# encode() str (unicode) -> bytes
# cannot encode() bytes
# cannot decode() str
def __api_request(self, request_uri, body, headers={}):
url = self.uri
url += request_uri
# body must by type 'bytes' for 3.x
if _isunicode(body):
body = body.encode()
request = Request(url, body, headers)
self._log(DEBUG1, 'URL: %s', url)
self._log(DEBUG1, 'method: %s', request.get_method())
self._log(DEBUG1, 'headers: %s', request.header_items())
# XXX leaks apikey
# self._log(DEBUG3, 'body: %s', repr(body))
kwargs = {
'url': request,
}
if self.ssl_context is not None:
kwargs['context'] = self.ssl_context
if self.timeout is not None:
kwargs['timeout'] = self.timeout
try:
response = self._urlopen(**kwargs)
except self._certificateerror as e:
self._msg = 'ssl.CertificateError: %s' % e
return False
except (URLError, IOError) as e:
self._log(DEBUG2, 'urlopen() exception: %s', sys.exc_info())
self._msg = str(e)
return False
self.http_code = response.getcode()
if hasattr(response, 'reason'):
# 3.2
self.http_reason = response.reason
elif hasattr(response, 'msg'):
# 2.7
self.http_reason = response.msg
if self.http_reason == '':
if self.http_code in _wildfire_responses:
self.http_reason = _wildfire_responses[self.http_code]
elif self.http_code in responses:
self.http_reason = responses[self.http_code]
try:
self._message = email.message_from_string(str(response.info()))
except (TypeError, email.errors.MessageError) as e:
raise PanWFapiError('email.message_from_string() %s' % e)
self._log(DEBUG2, 'HTTP response code: %s', self.http_code)
self._log(DEBUG2, 'HTTP response reason: %s', self.http_reason)
self._log(DEBUG2, 'HTTP response headers:')
self._log(DEBUG2, '%s', self._message)
if not (200 <= self.http_code < 300):
self._msg = 'HTTP Error %s: %s' % (self.http_code,
self.http_reason)
self.__set_response(response)
return False
return response
def _read_file(self, path):
try:
f = open(path, 'rb')
except IOError as e:
msg = 'open: %s: %s' % (path, e)
self._msg = msg
return None
buf = f.read()
f.close()
self._log(DEBUG2, 'path: %s %d', type(path), len(path))
self._log(DEBUG2, 'path: %s size: %d', path, len(buf))
if logging.getLogger(__name__).getEffectiveLevel() == DEBUG3:
import hashlib
md5 = hashlib.md5()
md5.update(buf)
sha256 = hashlib.sha256()
sha256.update(buf)
self._log(DEBUG3, 'MD5: %s', md5.hexdigest())
self._log(DEBUG3, 'SHA256: %s', sha256.hexdigest())
return buf
def report(self,
hash=None,
format=None):
self.__clear_response()
request_uri = '/publicapi/get/report'
query = {}
query['apikey'] = self.api_key
if hash is not None:
query['hash'] = hash
if format is not None:
query['format'] = format
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def verdict(self,
hash=None):
self.__clear_response()
request_uri = '/publicapi/get/verdict'
query = {}
query['apikey'] = self.api_key
if hash is not None:
query['hash'] = hash
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def verdicts(self,
hashes=None):
self.__clear_response()
request_uri = '/publicapi/get/verdicts'
form = _MultiPartFormData()
form.add_field('apikey', self.api_key)
if hashes is not None:
form.add_field('file', '\n'.join(hashes))
headers = form.http_headers()
body = form.http_body()
response = self.__api_request(request_uri=request_uri,
body=body, headers=headers)
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def verdicts_changed(self,
date=None):
self.__clear_response()
request_uri = '/publicapi/get/verdicts/changed'
query = {}
query['apikey'] = self.api_key
if date is not None:
query['date'] = date
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def sample(self,
hash=None):
self.__clear_response()
request_uri = '/publicapi/get/sample'
query = {}
query['apikey'] = self.api_key
if hash is not None:
query['hash'] = hash
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def pcap(self,
hash=None,
platform=None):
self.__clear_response()
request_uri = '/publicapi/get/pcap'
query = {}
query['apikey'] = self.api_key
if hash is not None:
query['hash'] = hash
if platform is not None:
query['platform'] = platform
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def testfile(self):
self.__clear_response()
request_uri = '/publicapi/test/pe'
query = {}
response = self.__api_request(request_uri=request_uri,
body=urlencode(query))
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def submit(self,
file=None,
url=None,
links=None):
self.__clear_response()
if (sum(bool(x) for x in [file, url, links]) != 1):
raise PanWFapiError('must submit one of file, url or links')
if file is not None:
request_uri = '/publicapi/submit/file'
elif url is not None:
request_uri = '/publicapi/submit/url'
elif len(links) < 2:
request_uri = '/publicapi/submit/link'
elif len(links) > 1:
request_uri = '/publicapi/submit/links'
form = _MultiPartFormData()
form.add_field('apikey', self.api_key)
if file is not None:
buf = self._read_file(file)
if buf is None:
raise PanWFapiError(self._msg)
filename = os.path.basename(file)
form.add_file(filename, buf)
if url is not None:
form.add_field('url', url)
if links is not None:
if len(links) == 1:
form.add_field('link', links[0])
elif len(links) > 1:
magic = 'panlnk' # XXX should be optional in future
# XXX requires filename in Content-Disposition header
if links[0] == magic:
form.add_file(filename='pan',
body='\n'.join(links))
else:
form.add_file(filename='pan',
body=magic + '\n' + '\n'.join(links))
headers = form.http_headers()
body = form.http_body()
response = self.__api_request(request_uri=request_uri,
body=body, headers=headers)
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
def change_request(self,
hash=None,
verdict=None,
email=None,
comment=None):
self.__clear_response()
request_uri = '/publicapi/submit/change-request'
form = _MultiPartFormData()
form.add_field('apikey', self.api_key)
if hash is not None:
form.add_field('hash', hash)
if verdict is not None:
form.add_field('verdict', verdict)
if email is not None:
form.add_field('email', email)
if comment is not None:
form.add_field('comment', comment)
headers = form.http_headers()
body = form.http_body()
response = self.__api_request(request_uri=request_uri,
body=body, headers=headers)
if not response:
raise PanWFapiError(self._msg)
if not self.__set_response(response):
raise PanWFapiError(self._msg)
# allow non-2XX error codes
# see http://bugs.python.org/issue18543 for why we can't just
# install a new HTTPErrorProcessor()
@staticmethod
def _urlopen(url, data=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
cafile=None, capath=None, cadefault=False,
context=None):
def http_response(request, response):
return response
http_error_processor = HTTPErrorProcessor()
http_error_processor.https_response = http_response
if context:
https_handler = HTTPSHandler(context=context)
opener = build_opener(https_handler, http_error_processor)
else:
opener = build_opener(http_error_processor)
return opener.open(url, data, timeout)
def _certifi_ssl_context(self):
if (sys.version_info.major == 2 and sys.hexversion >= 0x02070900 or
sys.version_info.major == 3 and sys.hexversion >= 0x03040300):
where = certifi.where()
self._log(DEBUG1, 'certifi %s: %s', certifi.__version__, where)
return ssl.create_default_context(
purpose=ssl.Purpose.SERVER_AUTH,
cafile=where)
else:
return None
#
# XXX USE OF cloud_ssl_context() IS DEPRECATED!
#
# If your operating system certificate store is out of date you can
# install certifi (https://pypi.python.org/pypi/certifi) and its CA
# bundle will be used for SSL server certificate verification when
# ssl_context is None.
#
def cloud_ssl_context():
# WildFire cloud cafile:
# https://certs.godaddy.com/anonymous/repository.pki
# Go Daddy Class 2 Certification Authority Root Certificate
# use:
# $ openssl x509 -in wfapi.py -text
# to view text form.
gd_class2_root_crt = b'''
-----BEGIN CERTIFICATE-----
MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
ReYNnyicsbkqWletNw+vHX/bvZ8=
-----END CERTIFICATE-----
'''
if (sys.version_info.major == 2 and sys.hexversion >= 0x02070900 or
sys.version_info.major == 3 and sys.hexversion >= 0x03040300):
# XXX python >= 2.7.9 needs catada as Unicode, or we get:
# 'ssl.SSLError: nested asn1 error'
return ssl.create_default_context(
purpose=ssl.Purpose.SERVER_AUTH,
cadata=gd_class2_root_crt.decode())
else:
return None
# Minimal RFC 2388 implementation
# Content-Type: multipart/form-data; boundary=___XXX
#
# Content-Disposition: form-data; name="apikey"
#
# XXXkey
# --___XXX
# Content-Disposition: form-data; name="file"; filename="XXXname"
# Content-Type: application/octet-stream
#
# XXXfilecontents
# --___XXX--
class _MultiPartFormData:
def __init__(self):
self._log = logging.getLogger(__name__).log
self.parts = []
self.boundary = self._boundary()
def add_field(self, name, value):
part = _FormDataPart(name=name,
body=value)
self.parts.append(part)
def add_file(self, filename=None, body=None):
part = _FormDataPart(name='file')
if filename is not None:
part.append_header('filename', filename)
if body is not None:
part.add_header(b'Content-Type: application/octet-stream')
part.add_body(body)
self.parts.append(part)
def _boundary(self):
rand_bytes = 48
prefix_char = b'_'
prefix_len = 16
import base64
try:
import os
seq = os.urandom(rand_bytes)
self._log(DEBUG1, '_MultiPartFormData._boundary: %s',
'using os.urandom')
except NotImplementedError:
import random
self._log(DEBUG1, '_MultiPartFormData._boundary: %s',
'using random')
seq = bytearray()
[seq.append(random.randrange(256)) for i in range(rand_bytes)]
prefix = prefix_char * prefix_len
boundary = prefix + base64.b64encode(seq)
return boundary
def http_headers(self):
# headers cannot be bytes
boundary = self.boundary.decode('ascii')
headers = {
'Content-Type':
'multipart/form-data; boundary=' + boundary,
}
return headers
def http_body(self):
bio = BytesIO()
boundary = b'--' + self.boundary
for part in self.parts:
bio.write(boundary)
bio.write(b'\r\n')
bio.write(part.serialize())
bio.write(b'\r\n')
bio.write(boundary)
bio.write(b'--')
return bio.getvalue()
class _FormDataPart:
def __init__(self, name=None, body=None):
self._log = logging.getLogger(__name__).log
self.headers = []
self.add_header(b'Content-Disposition: form-data')
self.append_header('name', name)
self.body = None
if body is not None:
self.add_body(body)
def add_header(self, header):
self.headers.append(header)
self._log(DEBUG1, '_FormDataPart.add_header: %s', self.headers[-1])
def append_header(self, name, value):
self.headers[-1] += b'; ' + self._encode_field(name, value)
self._log(DEBUG1, '_FormDataPart.append_header: %s', self.headers[-1])
def _encode_field(self, name, value):
self._log(DEBUG1, '_FormDataPart._encode_field: %s %s',
type(name), type(value))
if not _rfc2231_encode:
s = '%s="%s"' % (name, value)
self._log(DEBUG1, '_FormDataPart._encode_field: %s %s',
type(s), s)
if _isunicode(s):
s = s.encode('utf-8')
self._log(DEBUG1, '_FormDataPart._encode_field: %s %s',
type(s), s)
return s
if not [ch for ch in '\r\n\\' if ch in value]:
try:
return ('%s="%s"' % (name, value)).encode('ascii')
except UnicodeEncodeError:
self._log(DEBUG1, 'UnicodeEncodeError 3.x')
except UnicodeDecodeError: # 2.x
self._log(DEBUG1, 'UnicodeDecodeError 2.x')
# RFC 2231
value = email.utils.encode_rfc2231(value, 'utf-8')
return ('%s*=%s' % (name, value)).encode('ascii')
def add_body(self, body):
if _isunicode(body):
body = body.encode('latin-1')
self.body = body
self._log(DEBUG1, '_FormDataPart.add_body: %s %d',
type(self.body), len(self.body))
def serialize(self):
bio = BytesIO()
bio.write(b'\r\n'.join(self.headers))
bio.write(b'\r\n\r\n')
if self.body is not None:
bio.write(self.body)
return bio.getvalue()
if __name__ == '__main__':
# python -m pan.wfapi [tag] [sha256]
import pan.wfapi
tag = None
sha256 = '5f31d8658a41aa138ada548b7fb2fc758219d40b557aaeab80681d314f739f92'
if len(sys.argv) > 1 and sys.argv[1]:
tag = sys.argv[1]
if len(sys.argv) > 2:
hash = sys.argv[2]
try:
wfapi = pan.wfapi.PanWFapi(tag=tag)
except pan.wfapi.PanWFapiError as msg:
print('pan.wfapi.PanWFapi:', msg, file=sys.stderr)
sys.exit(1)
try:
wfapi.report(hash=sha256)
except pan.wfapi.PanWFapiError as msg:
print('report: %s' % msg, file=sys.stderr)
sys.exit(1)
if (wfapi.response_body is not None):
print(wfapi.response_body)
|
|
"""
Copyright (c) 2017 SONATA-NFV and Paderborn University
ALL RIGHTS RESERVED.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Neither the name of the SONATA-NFV, Paderborn University
nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written
permission.
This work has been performed in the framework of the SONATA project,
funded by the European Commission under Grant number 671517 through
the Horizon 2020 and 5G-PPP programmes. The authors would like to
acknowledge the contributions of their colleagues of the SONATA
partner consortium (www.sonata-nfv.eu).
"""
import re
class Net:
def __init__(self, name):
self.name = name
self.id = None
self.subnet_name = None
self.subnet_id = None
self.subnet_creation_time = None
self.subnet_update_time = None
self.gateway_ip = None
self.segmentation_id = None # not set
self._cidr = None
self.start_end_dict = None
self._issued_ip_addresses = dict()
def get_short_id(self):
"""
Returns a shortened UUID, with only the first 6 characters.
:return: First 6 characters of the UUID
:rtype: ``str``
"""
return str(self.id)[:6]
def get_new_ip_address(self, port_name):
"""
Calculates the next unused IP Address which belongs to the subnet.
:param port_name: Specifies the port.
:type port_name: ``str``
:return: Returns a unused IP Address or none if all are in use.
:rtype: ``str``
"""
if self.start_end_dict is None:
return None
int_start_ip = Net.ip_2_int(self.start_end_dict['start']) + 2 # First address as network address not usable
# Second one is for gateways only
int_end_ip = Net.ip_2_int(self.start_end_dict['end']) - 1 # Last address for broadcasts
while int_start_ip in self._issued_ip_addresses and int_start_ip <= int_end_ip:
int_start_ip += 1
if int_start_ip > int_end_ip:
return None
self._issued_ip_addresses[int_start_ip] = port_name
return Net.int_2_ip(int_start_ip) + '/' + self._cidr.rsplit('/', 1)[1]
def assign_ip_address(self, cidr, port_name):
"""
Assigns the IP address to the port if it is currently NOT used.
:param cidr: The cidr used by the port - e.g. 10.0.0.1/24
:type cidr: ``str``
:param port_name: The port name
:type port_name: ``str``
:return: * *False*: If the IP address is already issued or if it is not within this subnet mask.
* *True*: Else
"""
int_ip = Net.cidr_2_int(cidr)
if int_ip in self._issued_ip_addresses:
return False
int_start_ip = Net.ip_2_int(self.start_end_dict['start']) + 1 # First address as network address not usable
int_end_ip = Net.ip_2_int(self.start_end_dict['end']) - 1 # Last address for broadcasts
if int_ip < int_start_ip or int_ip > int_end_ip:
return False
self._issued_ip_addresses[int_ip] = port_name
return True
def is_my_ip(self, cidr, port_name):
"""
Checks if the IP is registered for this port name.
:param cidr: The cidr used by the port - e.g. 10.0.0.1/24
:type cidr: ``str``
:param port_name: The port name
:type port_name: ``str``
:return: Returns true if the IP address belongs to the port name. Else it returns false.
"""
int_ip = Net.cidr_2_int(cidr)
if not int_ip in self._issued_ip_addresses:
return False
if self._issued_ip_addresses[int_ip] == port_name:
return True
return False
def withdraw_ip_address(self, ip_address):
"""
Removes the IP address from the list of issued addresses, thus other ports can use it.
:param ip_address: The issued IP address.
:type ip_address: ``str``
"""
if ip_address is None:
return
if "/" in ip_address:
address, suffix = ip_address.rsplit('/', 1)
else:
address = ip_address
int_ip_address = Net.ip_2_int(address)
if int_ip_address in self._issued_ip_addresses.keys():
del self._issued_ip_addresses[int_ip_address]
def reset_issued_ip_addresses(self):
"""
Resets all issued IP addresses.
"""
self._issued_ip_addresses = dict()
def update_port_name_for_ip_address(self, ip_address, port_name):
"""
Updates the port name of the issued IP address.
:param ip_address: The already issued IP address.
:type ip_address: ``str``
:param port_name: The new port name
:type port_name: ``str``
"""
address, suffix = ip_address.rsplit('/', 1)
int_ip_address = Net.ip_2_int(address)
self._issued_ip_addresses[int_ip_address] = port_name
def set_cidr(self, cidr):
"""
Sets the CIDR for the subnet. It previously checks for the correct CIDR format.
:param cidr: The new CIDR for the subnet.
:type cidr: ``str``
:return: * *True*: When the new CIDR was set successfully.
* *False*: If the CIDR format was wrong.
:rtype: ``bool``
"""
if cidr is None:
if self._cidr is not None:
import emuvim.api.openstack.ip_handler as IP
IP.free_cidr(self._cidr, self.subnet_id)
self._cidr = None
self.reset_issued_ip_addresses()
self.start_end_dict = dict()
return True
if not Net.check_cidr_format(cidr):
return False
self.reset_issued_ip_addresses()
self.start_end_dict = Net.calculate_start_and_end_dict(cidr)
self._cidr = cidr
return True
def get_cidr(self):
"""
Gets the CIDR.
:return: The CIDR
:rtype: ``str``
"""
return self._cidr
def clear_cidr(self):
self._cidr = None
self.start_end_dict = dict()
self.reset_issued_ip_addresses()
def delete_subnet(self):
self.subnet_id = None
self.subnet_name = None
self.subnet_creation_time = None
self.subnet_update_time = None
self.set_cidr(None)
@staticmethod
def calculate_start_and_end_dict(cidr):
"""
Calculates the start and end IP address for the subnet.
:param cidr: The CIDR for the subnet.
:type cidr: ``str``
:return: Dict with start and end ip address
:rtype: ``dict``
"""
address, suffix = cidr.rsplit('/', 1)
int_suffix = int(suffix)
int_address = Net.ip_2_int(address)
address_space = 2 ** 32 - 1
for x in range(0, 31 - int_suffix):
address_space = ~(~address_space | (1 << x))
start = int_address & address_space
end = start + (2 ** (32 - int_suffix) - 1)
return {'start': Net.int_2_ip(start), 'end': Net.int_2_ip(end)}
@staticmethod
def cidr_2_int(cidr):
if cidr is None:
return None
ip = cidr.rsplit('/', 1)[0]
return Net.ip_2_int(ip)
@staticmethod
def ip_2_int(ip):
"""
Converts a IP address to int.
:param ip: IP address
:type ip: ``str``
:return: IP address as int.
:rtype: ``int``
"""
o = map(int, ip.split('.'))
res = (16777216 * o[0]) + (65536 * o[1]) + (256 * o[2]) + o[3]
return res
@staticmethod
def int_2_ip(int_ip):
"""
Converts a int IP address to string.
:param int_ip: Int IP address.
:type int_ip: ``int``
:return: IP address
:rtype: ``str``
"""
o1 = int(int_ip / 16777216) % 256
o2 = int(int_ip / 65536) % 256
o3 = int(int_ip / 256) % 256
o4 = int(int_ip) % 256
return '%(o1)s.%(o2)s.%(o3)s.%(o4)s' % locals()
@staticmethod
def check_cidr_format(cidr):
"""
Checks the CIDR format. An valid example is: 192.168.0.0/29
:param cidr: CIDR to be checked.
:type cidr: ``str``
:return: * *True*: If the Format is correct.
* *False*: If it is not correct.
:rtype: ``bool``
"""
r = re.compile('\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/\d{2}')
if r.match(cidr):
return True
return False
def create_network_dict(self):
"""
Creates the network description dictionary.
:return: Network description.
:rtype: ``dict``
"""
network_dict = dict()
network_dict["status"] = "ACTIVE" # TODO do we support inactive networks?
if self.subnet_id == None:
network_dict["subnets"] = []
else:
network_dict["subnets"] = [self.subnet_id]
network_dict["name"] = self.name
network_dict["admin_state_up"] = True # TODO is it always true?
network_dict["tenant_id"] = "abcdefghijklmnopqrstuvwxyz123456" # TODO what should go in here
network_dict["id"] = self.id
network_dict["shared"] = False # TODO is it always false?
return network_dict
def create_subnet_dict(self):
"""
Creates the subnet description dictionary.
:return: Subnet description.
:rtype: ``dict``
"""
subnet_dict = dict()
subnet_dict["name"] = self.subnet_name
subnet_dict["network_id"] = self.id
subnet_dict["tenant_id"] = "abcdefghijklmnopqrstuvwxyz123456" # TODO what should go in here?
subnet_dict["created_at"] = self.subnet_creation_time
subnet_dict["dns_nameservers"] = []
subnet_dict["allocation_pools"] = [self.start_end_dict]
subnet_dict["host_routers"] = []
subnet_dict["gateway_ip"] = self.gateway_ip
subnet_dict["ip_version"] = "4"
subnet_dict["cidr"] = self.get_cidr()
subnet_dict["updated_at"] = self.subnet_update_time
subnet_dict["id"] = self.subnet_id
subnet_dict["enable_dhcp"] = False # TODO do we support DHCP?
return subnet_dict
def __eq__(self, other):
if self.name == other.name and self.subnet_name == other.subnet_name and \
self.gateway_ip == other.gateway_ip and \
self.segmentation_id == other.segmentation_id and \
self._cidr == other._cidr and \
self.start_end_dict == other.start_end_dict:
return True
return False
def __hash__(self):
return hash((self.name,
self.subnet_name,
self.gateway_ip,
self.segmentation_id,
self._cidr,
self.start_end_dict))
|
|
import os
import sys
import logging
import inspect
import pandas as pd
import numpy as np
from sklearn.externals import joblib
from drain import util, metrics
from drain.step import Step, Call
class FitPredict(Step):
"""
Step which can fit a scikit-learn estimator and make predictions.
"""
def __init__(self, inputs,
return_estimator=False,
return_feature_importances=True,
return_predictions=True,
prefit=False,
predict_train=False):
"""
Args:
return_estimator: whether to return the fitted estimator object
return_feature_importances: whether to return a DataFrame of feature importances
prefit: whether the estimator input is already fitted
predict_train: whether to make predictions on training set
"""
Step.__init__(self, inputs=inputs, return_estimator=return_estimator,
return_feature_importances=return_feature_importances,
return_predictions=return_predictions, prefit=prefit,
predict_train=predict_train)
def run(self, estimator, X, y=None, train=None, test=None, aux=None, sample_weight=None,
feature_importances=None):
if not self.prefit:
if y is None:
raise ValueError("Need outcome data y for predictions")
if train is not None:
X_train, y_train = X[train], y[train]
else:
X_train, y_train = X, y
y_missing = y_train.isnull()
y_missing_count = y_missing.sum()
if y_missing.sum() > 0:
logging.info('Dropping %s training examples with missing outcomes'
% y_missing_count)
y_train = y_train[~y_missing]
X_train = X_train[~y_missing]
y_train = y_train.astype(bool)
logging.info('Fitting with %s examples, %s features' % X_train.shape)
if 'sample_weight' in inspect.getargspec(estimator.fit).args and\
sample_weight is not None:
logging.info('Using sample weight')
sample_weight = sample_weight.loc[y_train.index]
estimator.fit(X_train, y_train, sample_weight=sample_weight)
else:
estimator.fit(X_train, y_train)
result = {}
if self.return_estimator:
result['estimator'] = estimator
if self.return_feature_importances:
result['feature_importances'] = feature_importance(estimator, X)
if self.return_predictions:
if test is not None and not self.predict_train:
X_test, y_test = X[test], y[test]
else:
X_test, y_test = X, y
logging.info('Predicting %s examples' % len(X_test))
if y_test is not None:
y = pd.DataFrame({'test': y_test})
else:
y = pd.DataFrame(index=X_test.index)
y['score'] = y_score(estimator, X_test)
if self.predict_train:
y['train'] = train
if aux is not None:
y = y.join(aux, how='left')
result['y'] = y
return result
def dump(self):
result = self.result
if self.return_estimator:
filename = os.path.join(self._dump_dirname, 'estimator.pkl')
joblib.dump(result['estimator'], filename)
if self.return_feature_importances:
filename = os.path.join(self._dump_dirname, 'feature_importances.hdf')
result['feature_importances'].to_hdf(filename, 'df')
if self.return_predictions:
filename = os.path.join(self._dump_dirname, 'y.hdf')
result['y'].to_hdf(filename, 'df')
def load(self):
result = {}
if self.return_estimator:
filename = os.path.join(self._dump_dirname, 'estimator.pkl')
result['estimator'] = joblib.load(filename)
if self.return_feature_importances:
filename = os.path.join(self._dump_dirname, 'feature_importances.hdf')
result['feature_importances'] = pd.read_hdf(filename, 'df')
if self.return_predictions:
filename = os.path.join(self._dump_dirname, 'y.hdf')
result['y'] = pd.read_hdf(filename, 'df')
self.result = result
class Fit(FitPredict):
def __init__(self, inputs, return_estimator=True, return_feature_importances=False):
FitPredict.__init__(self, inputs=inputs, prefit=False,
return_estimator=return_estimator,
return_feature_importances=return_feature_importances,
return_predictions=False)
class Predict(FitPredict):
def __init__(self, inputs, return_estimator=False, return_feature_importances=False):
FitPredict.__init__(self, inputs=inputs,
return_feature_importances=return_feature_importances,
return_estimator=return_estimator,
return_predictions=True, prefit=True)
class PredictProduct(Step):
def run(self, **kwargs):
keys = list(kwargs.keys())
ys = [kwargs[k]['y'] for k in keys]
y = ys[0].copy()
y.rename(columns={'score': 'score_%s' % keys[0]}, inplace=True)
y['score_%s' % keys[1]] = ys[1].score
y['score'] = ys[0].score * ys[1].score
return {'y': y}
class InverseProbabilityWeights(Step):
def run(self, y, train=None, **kwargs):
if train is not None:
logging.info("Using training mask")
train = train[train].index
intersection = y.index.intersection(train)
if len(intersection) != len(train):
raise ValueError("Must provide scores for every training example.")
y = y.ix[intersection]
return {'sample_weight': y.score**-1}
def y_score(estimator, X):
"""
Score examples from a new matrix X
Args:
estimator: an sklearn estimator object
X: design matrix with the same features that the estimator was trained on
Returns: a vector of scores of the same length as X
Note that estimator.predict_proba is preferred but when unavailable
(e.g. SVM without probability calibration) decision_function is used.
"""
try:
y = estimator.predict_proba(X)
return y[:, 1]
except(AttributeError):
return estimator.decision_function(X)
def feature_importance(estimator, X):
if hasattr(estimator, 'coef_'):
i = estimator.coef_[0]
elif hasattr(estimator, 'feature_importances_'):
i = estimator.feature_importances_
else:
i = [np.nan]*X.shape[1]
features = X.columns if hasattr(X, 'columns') else range(X.shape[1])
return pd.DataFrame({'feature': features, 'importance': i}).\
sort_values('importance', ascending=False)
class LogisticRegression(object):
def __init__(self):
pass
def fit(self, X, y, **kwargs):
from statsmodels.discrete.discrete_model import Logit
self.model = Logit(y, X)
self.result = self.model.fit()
def predict_proba(self, X):
return self.result.predict(X)
def _proximity_parallel_helper(train_nodes, t, k):
d = (train_nodes == t).sum(axis=1)
n = d.argsort()[::-1][:k]
return d[n], n # distance, neighbors
def _proximity_helper(train_nodes, test_nodes, k):
from sklearn.externals.joblib import Parallel, delayed
results = Parallel(n_jobs=16, backend='threading')(
delayed(_proximity_parallel_helper)(train_nodes, t, k) for t in test_nodes)
distance, neighbors = zip(*results)
return np.array(distance), np.array(neighbors)
def apply_forest(run):
# store nodes in run
run['nodes'] = pd.DataFrame(run.estimator.apply(run['data'].X), index=run['data'].X.index)
def proximity(run, ix, k):
# look for nodes in training set proximal to the given nodes
if 'nodes' not in run:
apply_forest(run)
distance, neighbors = _proximity_helper(run['nodes'][run.y.train].values,
run['nodes'].loc[ix].values, k)
neighbors = run['nodes'][run.y.train].irow(neighbors.flatten()).index
neighbors = [neighbors[k*i:k*(i+1)] for i in range(len(ix))]
return distance, neighbors
def y_subset(y, query=None, aux=None, subset=None, dropna=False, outcome='true',
k=None, p=None, ascending=False, score='score', p_of='notnull'):
"""
Subset a model "y" dataframe
Args:
query: operates on y, or aux if present
subset: takes a dataframe or index thereof and subsets to that
dropna: means drop missing outcomes
return: top k (count) or p (proportion) if specified
p_of: specifies what the proportion is relative to
'notnull' means proportion is relative to labeled count
'true' means proportion is relative to positive count
'all' means proportion is relative to total count
"""
if query is not None:
if aux is None:
y = y.query(query)
else:
s = aux.ix[y.index]
if len(s) != len(y):
logging.warning('y not a subset of aux')
y = y.ix[s.query(query).index]
if subset is not None:
if hasattr(subset, 'index'):
subset = subset.index
y = y.ix[y.index.intersection(subset)]
if dropna:
y = y.dropna(subset=[outcome])
if k is not None and p is not None:
raise ValueError("Cannot specify both k and p")
elif k is not None:
k = k
elif p is not None:
if p_of == 'notnull':
k = int(p*y[outcome].notnull().sum())
elif p_of == 'true':
k = int(p*y[outcome].sum())
elif p_of == 'all':
k = int(p*len(y))
else:
raise ValueError('Invalid value for p_of: %s' % p_of)
else:
k = None
if k is not None:
y = y.sort_values(score, ascending=ascending).head(k)
return y
# list of arguments to y_subset() for Metric above
Y_SUBSET_ARGS = inspect.getargspec(y_subset).args
def true_score(y, outcome='true', score='score', **subset_args):
y = y_subset(y, outcome=outcome, score=score, **subset_args)
return util.to_float(y[outcome], y[score])
def make_metric(function):
def metric(predict_step, **kwargs):
y = predict_step.result['y']
subset_args = [k for k in Y_SUBSET_ARGS if k in kwargs]
kwargs_subset = {k: kwargs[k] for k in subset_args}
y_true, y_score = true_score(y, **kwargs_subset)
kwargs_metric = {k: kwargs[k] for k in kwargs if k not in Y_SUBSET_ARGS}
r = function(y_true, y_score, **kwargs_metric)
return r
return metric
metric_functions = [o for o in inspect.getmembers(metrics)
if inspect.isfunction(o[1]) and not o[0].startswith('_')]
for name, function in metric_functions:
function = make_metric(function)
function.__name__ = name
setattr(sys.modules[__name__], name, function)
def lift(predict_step, **kwargs):
p = precision(predict_step, **kwargs) # noqa: F821
kwargs.pop('k', None)
kwargs.pop('p', None)
b = baseline(predict_step, **kwargs) # noqa: F821
return p/b
def lift_series(predict_step, **kwargs):
p = precision_series(predict_step, **kwargs) # noqa: F821
# pass everything except k or p to baseline
b_kwargs = {k: v for k, v in kwargs.items() if k not in ('k', 'p')}
b = baseline(predict_step, **b_kwargs) # noqa: F821
return p/b
def recall(predict_step, prop=True, **kwargs):
r = make_metric(metrics.recall)(predict_step, **kwargs)
if prop:
kwargs.pop('k', None)
kwargs.pop('p', None)
c = make_metric(metrics.recall)(predict_step, **kwargs)
return r/c
else:
return r
def recall_series(predict_step, prop=True, **kwargs):
r = make_metric(metrics.recall_series)(predict_step, **kwargs)
if prop:
kwargs.pop('k', None)
kwargs.pop('p', None)
c = make_metric(metrics.recall)(predict_step, **kwargs)
return r/c
else:
return r
def overlap(self, other, **kwargs):
y0 = self.result['y']
y0 = y_subset(y0, **kwargs)
y1 = other.result['y']
y1 = y_subset(y1, **kwargs)
return len(y0.index & y1.index)
def similarity(self, other, **kwargs):
y0 = self.result['y']
y0 = y_subset(y0, **kwargs)
y1 = other.result['y']
y1 = y_subset(y1, **kwargs)
return np.float32(len(y0.index & y1.index)) / \
len(y0.index | y1.index)
def rank(self, **kwargs):
y0 = self.result['y']
y0 = y_subset(y0, **kwargs)
return y0.score.rank(ascending=False)
def perturb(estimator, X, bins, columns=None):
"""
Predict on peturbations of a feature vector
estimator: a fitted sklearn estimator
index: the index of the example to perturb
bins: a dictionary of column:bins arrays
columns: list of columns if bins doesn't cover all columns
TODO make this work when index is multiple rows
"""
if columns is None:
if len(bins) != X.shape[1]:
raise ValueError("Must specify columns when not perturbing all columns")
else:
columns = X.columns
n = np.concatenate(([0], np.cumsum([len(b) for b in bins])))
X_test = np.empty((n[-1]*X.shape[0], X.shape[1]))
r = pd.DataFrame(columns=['value', 'feature', 'index'], index=np.arange(n[-1]*X.shape[0]))
for j, index in enumerate(X.index):
X_test[j*n[-1]:(j+1)*n[-1], :] = X.values[j, :]
for i, c in enumerate(columns):
s = slice(j*n[-1] + n[i], j*n[-1] + n[i+1])
r['value'].values[s] = bins[i]
r['feature'].values[s] = c
r['index'].values[s] = [index]*(n[i+1]-n[i])
X_test[s, (X.columns == c).argmax()] = bins[i]
y = estimator.predict_proba(X_test)[:, 1]
r['y'] = y
return r
def forests(**kwargs):
steps = []
d = dict(criterion=['entropy', 'gini'], max_features=['sqrt', 'log2'],
n_jobs=[-1], **kwargs)
for estimator_args in util.dict_product(d):
steps.append(Call(
'sklearn.ensemble.RandomForestClassifier',
**estimator_args))
return steps
def logits(**kwargs):
steps = []
for estimator_args in util.dict_product(dict(
penalty=['l1', 'l2'], C=[.001, .01, .1, 1], **kwargs)):
steps.append(Call('sklearn.linear_model.LogisticRegression',
**estimator_args))
return steps
def svms(**kwargs):
steps = []
for estimator_args in util.dict_product(dict(
penalty=['l2'],
dual=[True, False], C=[.001, .01, .1, 1])) + \
util.dict_product(dict(
penalty=['l1'], dual=[False], C=[.001, .01, .1, 1])):
steps.append(Call('sklearn.svm.LinearSVC',
**estimator_args))
return steps
|
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for clients that communicate with apps over a JSON RPC interface.
The JSON protocol expected by this module is:
Request:
{
"id": <monotonically increasing integer containing the ID of this request>
"method": <string containing the name of the method to execute>
"params": <JSON array containing the arguments to the method>
}
Response:
{
"id": <int id of request that this response maps to>,
"result": <Arbitrary JSON object containing the result of executing the
method. If the method could not be executed or returned void,
contains 'null'.>,
"error": <String containing the error thrown by executing the method.
If no error occurred, contains 'null'.>
"callback": <String that represents a callback ID used to identify events
associated with a particular CallbackHandler object.>
"""
from builtins import str
import json
import logging
import socket
import threading
from mobly.controllers.android_device_lib import callback_handler
# UID of the 'unknown' jsonrpc session. Will cause creation of a new session.
UNKNOWN_UID = -1
# Maximum time to wait for the socket to open on the device.
_SOCKET_CONNECTION_TIMEOUT = 60
# Maximum time to wait for a response message on the socket.
_SOCKET_READ_TIMEOUT = callback_handler.MAX_TIMEOUT
class Error(Exception):
pass
class AppStartError(Error):
"""Raised when the app is not able to be started."""
class AppRestoreConnectionError(Error):
"""Raised when failed to restore app from disconnection."""
class ApiError(Error):
"""Raised when remote API reports an error."""
class ProtocolError(Error):
"""Raised when there is some error in exchanging data with server."""
NO_RESPONSE_FROM_HANDSHAKE = 'No response from handshake.'
NO_RESPONSE_FROM_SERVER = 'No response from server.'
MISMATCHED_API_ID = 'Mismatched API id.'
class JsonRpcCommand(object):
"""Commands that can be invoked on all jsonrpc clients.
INIT: Initializes a new session.
CONTINUE: Creates a connection.
"""
INIT = 'initiate'
CONTINUE = 'continue'
class JsonRpcClientBase(object):
"""Base class for jsonrpc clients that connect to remote servers.
Connects to a remote device running a jsonrpc-compatible app. Before opening
a connection a port forward must be setup to go over usb. This be done using
adb.forward([local, remote]). Once the port has been forwarded it can be
used in this object as the port of communication.
Attributes:
host_port: (int) The host port of this RPC client.
device_port: (int) The device port of this RPC client.
app_name: (str) The user-visible name of the app being communicated
with.
uid: (int) The uid of this session.
"""
def __init__(self, app_name, log=logging.getLogger()):
"""
Args:
app_name: (str) The user-visible name of the app being communicated
with.
log: (logging.Logger) logger to which to send log messages.
"""
self.host_port = None
self.device_port = None
self.app_name = app_name
self.log = log
self.uid = None
self._client = None # prevent close errors on connect failure
self._conn = None
self._counter = None
self._lock = threading.Lock()
self._event_client = None
def __del__(self):
self.disconnect()
# Methods to be implemented by subclasses.
def start_app_and_connect(self):
"""Starts the server app on the android device and connects to it.
After this, the self.host_port and self.device_port attributes must be
set.
Must be implemented by subclasses.
Raises:
AppStartError: When the app was not able to be started.
"""
raise NotImplementedError()
def stop_app(self):
"""Kills any running instance of the app.
Must be implemented by subclasses.
"""
raise NotImplementedError()
def restore_app_connection(self, port=None):
"""Reconnects to the app after device USB was disconnected.
Instead of creating new instance of the client:
- Uses the given port (or finds a new available host_port if none is
given).
- Tries to connect to remote server with selected port.
Must be implemented by subclasses.
Args:
port: If given, this is the host port from which to connect to remote
device port. If not provided, find a new available port as host
port.
Raises:
AppRestoreConnectionError: When the app was not able to be
reconnected.
"""
raise NotImplementedError()
def _start_event_client(self):
"""Starts a separate JsonRpc client to the same session for propagating
events.
This is an optional function that should only implement if the client
utilizes the snippet event mechanism.
Returns:
A JsonRpc Client object that connects to the same session as the
one on which this function is called.
"""
raise NotImplementedError()
# Rest of the client methods.
def connect(self, uid=UNKNOWN_UID, cmd=JsonRpcCommand.INIT):
"""Opens a connection to a JSON RPC server.
Opens a connection to a remote client. The connection attempt will time
out if it takes longer than _SOCKET_CONNECTION_TIMEOUT seconds. Each
subsequent operation over this socket will time out after
_SOCKET_READ_TIMEOUT seconds as well.
Args:
uid: int, The uid of the session to join, or UNKNOWN_UID to start a
new session.
cmd: JsonRpcCommand, The command to use for creating the connection.
Raises:
IOError: Raised when the socket times out from io error
socket.timeout: Raised when the socket waits to long for connection.
ProtocolError: Raised when there is an error in the protocol.
"""
self._counter = self._id_counter()
self._conn = socket.create_connection(('localhost', self.host_port),
_SOCKET_CONNECTION_TIMEOUT)
self._conn.settimeout(_SOCKET_READ_TIMEOUT)
self._client = self._conn.makefile(mode='brw')
resp = self._cmd(cmd, uid)
if not resp:
raise ProtocolError(ProtocolError.NO_RESPONSE_FROM_HANDSHAKE)
result = json.loads(str(resp, encoding='utf8'))
if result['status']:
self.uid = result['uid']
else:
self.uid = UNKNOWN_UID
def disconnect(self):
"""Close the connection to the remote client."""
if self._conn:
self._conn.close()
self._conn = None
def _cmd(self, command, uid=None):
"""Send a command to the server.
Args:
command: str, The name of the command to execute.
uid: int, the uid of the session to send the command to.
Returns:
The line that was written back.
"""
if not uid:
uid = self.uid
self._client.write(
json.dumps({
'cmd': command,
'uid': uid
}).encode("utf8") + b'\n')
self._client.flush()
return self._client.readline()
def _rpc(self, method, *args):
"""Sends an rpc to the app.
Args:
method: str, The name of the method to execute.
args: any, The args of the method.
Returns:
The result of the rpc.
Raises:
ProtocolError: Something went wrong with the protocol.
ApiError: The rpc went through, however executed with errors.
"""
with self._lock:
apiid = next(self._counter)
data = {'id': apiid, 'method': method, 'params': args}
request = json.dumps(data)
self._client.write(request.encode("utf8") + b'\n')
self._client.flush()
response = self._client.readline()
if not response:
raise ProtocolError(ProtocolError.NO_RESPONSE_FROM_SERVER)
result = json.loads(str(response, encoding="utf8"))
if result['error']:
raise ApiError(result['error'])
if result['id'] != apiid:
raise ProtocolError(ProtocolError.MISMATCHED_API_ID)
if result.get('callback') is not None:
if self._event_client is None:
self._event_client = self._start_event_client()
return callback_handler.CallbackHandler(
callback_id=result['callback'],
event_client=self._event_client,
ret_value=result['result'],
method_name=method)
return result['result']
def __getattr__(self, name):
"""Wrapper for python magic to turn method calls into RPC calls."""
def rpc_call(*args):
return self._rpc(name, *args)
return rpc_call
def _id_counter(self):
i = 0
while True:
yield i
i += 1
|
|
from datetime import datetime
from datetime import timedelta
import json
import unittest
from bson.objectid import ObjectId
from flask import url_for
from flask.ext import testing
from flask.ext import pymongo
import logcas.bootstrap
import db
class RequestShowTestCase(testing.TestCase):
col = db.logs
controller = '_request_show'
template = 'request_show.html'
def create_app(self):
app = logcas.bootstrap.app
app.config['TESTING'] = True
app.config['CSRF_ENABLED'] = False
return app
@classmethod
def setUpClass(cls):
now = datetime.today()
cls.now = now
onesecond = timedelta(0, 1)
for i in range(0, 20):
for level in logcas.bootstrap.LEVELMAP.keys():
cls.col.save({
"time": now,
"created": int(now.strftime("%s")),
"message": "This is a message",
"hostname": "localhost",
"levelno": level,
"levelname": logcas.bootstrap.LEVELMAP[level],
"binary": "nova-compute",
"extra": {
"request_id": str(i),
"remote_address": "127.0.0.1",
"project_name": "testproject",
"user_name": "testuser",
"user_id": "xxxxxxxx",
}
})
now = now + onesecond
@classmethod
def tearDownClass(cls):
cls.col.drop()
# no param
def test_without_params(self):
response = self.client.get(url_for(self.controller, request_id="10"))
self.assert200(response)
self.assertTemplateUsed(self.template)
# page
def test_with_page_(self):
response = self.client.get(url_for(self.controller,
request_id="10", page=""))
self.assert400(response)
def test_with_page_abc(self):
response = self.client.get(url_for(self.controller,
request_id="10", page="abc"))
self.assert400(response)
def test_with_page_0(self):
response = self.client.get(url_for(self.controller,
request_id="10", page="0"))
self.assert400(response)
def test_with_page_1(self):
response = self.client.get(url_for(self.controller,
request_id="10", page="1"))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_page_100(self):
response = self.client.get(url_for(self.controller,
request_id="10", page="100"))
self.assert200(response)
self.assertTemplateUsed(self.template)
# limit
def test_with_limit_(self):
response = self.client.get(url_for(self.controller,
request_id="10", limit=""))
self.assert400(response)
def test_with_limit_abc(self):
response = self.client.get(url_for(self.controller,
request_id="10", limit="abc"))
self.assert400(response)
def test_with_limit_9(self):
response = self.client.get(url_for(self.controller,
request_id="10", limit="9"))
self.assert400(response)
def test_with_limit_10(self):
response = self.client.get(url_for(self.controller,
request_id="10", limit="10"))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_limit_200(self):
response = self.client.get(url_for(self.controller,
request_id="10", limit="200"))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_limit_201(self):
response = self.client.get(url_for(self.controller,
request_id="10", limit="201"))
self.assert400(response)
# levelno
def test_with_levelno_(self):
response = self.client.get(url_for(self.controller,
request_id="10", levelno=""))
self.assert400(response)
def test_with_levelno_abc(self):
response = self.client.get(url_for(self.controller,
request_id="10", levelno="abc"))
self.assert400(response)
def test_with_levelno_0(self):
response = self.client.get(url_for(self.controller,
request_id="10", levelno="0"))
self.assert400(response)
def test_with_levelno_10(self):
response = self.client.get(url_for(self.controller,
request_id="10", levelno="10"))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_levelno_20(self):
response = self.client.get(url_for(self.controller,
request_id="10", levelno="20"))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_levelno_21(self):
response = self.client.get(url_for(self.controller,
request_id="10", levelno="21"))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_levelno_30(self):
response = self.client.get(url_for(self.controller,
request_id="10", levelno="30"))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_levelno_40(self):
response = self.client.get(url_for(self.controller,
request_id="10", levelno="40"))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_levelno_50(self):
response = self.client.get(url_for(self.controller,
request_id="10", levelno="50"))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_levelno_60(self):
response = self.client.get(url_for(self.controller,
request_id="10", levelno="60"))
self.assert400(response)
# style
def test_with_style_(self):
response = self.client.get(url_for(self.controller,
request_id="10", style=""))
self.assert400(response)
def test_with_style_abc(self):
response = self.client.get(url_for(self.controller,
request_id="10", style="abc"))
self.assert400(response)
def test_with_style_default(self):
response = self.client.get(url_for(self.controller,
request_id="10", style="default"))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_style_dark(self):
response = self.client.get(url_for(self.controller,
request_id="10", style="dark"))
self.assert200(response)
self.assertTemplateUsed(self.template)
# host
def test_with_host_(self):
response = self.client.get(url_for(self.controller,
request_id="10", host=""))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_host_20_characters(self):
response = self.client.get(url_for(self.controller,
request_id="10", host="a" * 20))
self.assert200(response)
self.assertTemplateUsed(self.template)
def test_with_host_21_characters(self):
response = self.client.get(url_for(self.controller,
request_id="10", host="a" * 21))
self.assert400(response)
class ArchivedRequestShowTestCase(RequestShowTestCase):
col = db.archived_logs
controller = '_archived_request_show'
template = 'archived_request_show.html'
if __name__ == '__main__':
unittest.main()
|
|
#!/usr/bin/env python
"""
@package ion.services.mi.test.test_port_agent_client
@file ion/services/mi/test/test_port_agent_client.py
@author David Everett
@brief Some unit tests for R2 port agent client
"""
__author__ = 'David Everett'
__license__ = 'Apache 2.0'
# Ensure the test class is monkey patched for gevent
from gevent import monkey; monkey.patch_all()
import gevent
import logging
import unittest
import re
import time
import datetime
import array
import struct
import ctypes
from nose.plugins.attrib import attr
from mock import Mock
from ion.agents.port.port_agent_process import PortAgentProcess
from ion.agents.port.port_agent_process import PortAgentProcessType
from mi.core.tcp_client import TcpClient
from mi.core.port_agent_simulator import TCPSimulatorServer
from mi.core.unit_test import MiUnitTest
from mi.core.unit_test import MiIntTestCase
from mi.idk.unit_test import InstrumentDriverTestCase
from mi.idk.unit_test import InstrumentDriverUnitTestCase
from mi.idk.unit_test import InstrumentDriverIntegrationTestCase
from mi.core.instrument.port_agent_client import PortAgentClient, PortAgentPacket, Listener
from mi.core.instrument.port_agent_client import HEADER_SIZE
from mi.core.instrument.instrument_driver import DriverConnectionState
from mi.core.instrument.instrument_driver import DriverProtocolState
from mi.core.exceptions import InstrumentConnectionException
from mi.instrument.seabird.sbe37smb.ooicore.driver import SBE37Driver
# MI logger
from mi.core.log import get_logger ; log = get_logger()
SYSTEM_EPOCH = datetime.date(*time.gmtime(0)[0:3])
NTP_EPOCH = datetime.date(1900, 1, 1)
NTP_DELTA = (SYSTEM_EPOCH - NTP_EPOCH).days * 24 * 3600
## Initialize the test parameters
## Use the SBE37 here because this is a generic port_agent_client test not
## necessarily associated with any driver.
InstrumentDriverTestCase.initialize(
driver_module='mi.instrument.seabird.sbe37smb.ooicore.driver',
driver_class="SBE37Driver",
instrument_agent_resource_id = '123xyz',
instrument_agent_preload_id = 'IA2',
instrument_agent_name = 'Agent007',
driver_startup_config = {}
)
@attr('UNIT', group='mi')
class PAClientUnitTestCase(InstrumentDriverUnitTestCase):
def setUp(self):
self.ipaddr = "localhost"
self.cmd_port = 9001
self.data_port = 9002
self.device_port = 9003
def resetTestVars(self):
self.rawCallbackCalled = False
self.dataCallbackCalled = False
self.errorCallbackCalled = False
self.listenerCallbackCalled = False
def myGotData(self, paPacket):
self.dataCallbackCalled = True
if paPacket.is_valid():
validity = "valid"
else:
validity = "invalid"
log.info("Got %s port agent data packet with data length %d: %s", validity, paPacket.get_data_length(), paPacket.get_data())
def myGotRaw(self, paPacket):
self.rawCallbackCalled = True
if paPacket.is_valid():
validity = "valid"
else:
validity = "invalid"
log.info("Got %s port agent raw packet with data length %d: %s", validity, paPacket.get_data_length(), paPacket.get_data())
def myGotError(self, errorString = "No error string passed in."):
self.errorCallbackCalled = True
log.info("Got error: %s", errorString)
def myGotListenerError(self, exception):
self.listenerCallbackCalled = True
log.info("Got listener exception: %s", exception)
def raiseException(self, packet):
raise Exception("Boom")
def test_handle_packet(self):
"""
Test that a default PortAgentPacket creates a DATA_FROM_DRIVER packet,
and that the handle_packet method invokes the raw callback
"""
paListener = Listener(None, None, 0, 0, 5, self.myGotData, self.myGotRaw, self.myGotListenerError, self.myGotError)
test_data = "This is a great big test"
self.resetTestVars()
paPacket = PortAgentPacket()
paPacket.attach_data(test_data)
paPacket.pack_header()
paPacket.verify_checksum()
paListener.handle_packet(paPacket)
self.assertTrue(self.rawCallbackCalled)
###
# Test DATA_FROM_INSTRUMENT; handle_packet should invoke data and raw
# callbacks.
###
self.resetTestVars()
paPacket = PortAgentPacket(PortAgentPacket.DATA_FROM_INSTRUMENT)
paPacket.attach_data(test_data)
paPacket.pack_header()
paPacket.verify_checksum()
paListener.handle_packet(paPacket)
self.assertTrue(self.rawCallbackCalled)
self.assertTrue(self.dataCallbackCalled)
self.assertFalse(self.errorCallbackCalled)
self.assertFalse(self.listenerCallbackCalled)
###
# Test PORT_AGENT_COMMAND; handle_packet should invoke raw callback.
###
self.resetTestVars()
paPacket = PortAgentPacket(PortAgentPacket.PORT_AGENT_COMMAND)
paPacket.attach_data(test_data)
paPacket.pack_header()
paPacket.verify_checksum()
paListener.handle_packet(paPacket)
self.assertTrue(self.rawCallbackCalled)
self.assertFalse(self.dataCallbackCalled)
self.assertFalse(self.errorCallbackCalled)
self.assertFalse(self.listenerCallbackCalled)
###
# Test PORT_AGENT_STATUS; handle_packet should invoke raw callback.
###
self.resetTestVars()
paPacket = PortAgentPacket(PortAgentPacket.PORT_AGENT_STATUS)
paPacket.attach_data(test_data)
paPacket.pack_header()
paPacket.verify_checksum()
paListener.handle_packet(paPacket)
self.assertTrue(self.rawCallbackCalled)
self.assertFalse(self.dataCallbackCalled)
self.assertFalse(self.errorCallbackCalled)
self.assertFalse(self.listenerCallbackCalled)
###
# Test PORT_AGENT_FAULT; handle_packet should invoke raw callback.
###
self.resetTestVars()
paPacket = PortAgentPacket(PortAgentPacket.PORT_AGENT_FAULT)
paPacket.attach_data(test_data)
paPacket.pack_header()
paPacket.verify_checksum()
paListener.handle_packet(paPacket)
self.assertTrue(self.rawCallbackCalled)
self.assertFalse(self.dataCallbackCalled)
self.assertFalse(self.errorCallbackCalled)
self.assertFalse(self.listenerCallbackCalled)
###
# Test INSTRUMENT_COMMAND; handle_packet should invoke raw callback.
###
self.resetTestVars()
paPacket = PortAgentPacket(PortAgentPacket.INSTRUMENT_COMMAND)
paPacket.attach_data(test_data)
paPacket.pack_header()
paPacket.verify_checksum()
paListener.handle_packet(paPacket)
self.assertTrue(self.rawCallbackCalled)
self.assertFalse(self.dataCallbackCalled)
self.assertFalse(self.errorCallbackCalled)
self.assertFalse(self.listenerCallbackCalled)
###
# Test HEARTBEAT; handle_packet should not invoke any callback.
###
self.resetTestVars()
paPacket = PortAgentPacket(PortAgentPacket.HEARTBEAT)
paPacket.attach_data(test_data)
paPacket.pack_header()
paPacket.verify_checksum()
paListener.handle_packet(paPacket)
self.assertFalse(self.rawCallbackCalled)
self.assertFalse(self.dataCallbackCalled)
self.assertFalse(self.errorCallbackCalled)
self.assertFalse(self.listenerCallbackCalled)
def test_heartbeat_timeout(self):
"""
Initialize the Listener with a heartbeat value, then
start the heartbeat. Wait long enough for the heartbeat
to timeout MAX_MISSED_HEARTBEATS times, and then assert
that the error_callback was called.
"""
self.resetTestVars()
test_recovery_attempts = 1
test_heartbeat = 1
test_max_missed_heartbeats = 5
paListener = Listener(None, test_recovery_attempts, None, test_heartbeat, test_max_missed_heartbeats,
self.myGotData, self.myGotRaw, self.myGotListenerError, None, self.myGotError)
paListener.start_heartbeat_timer()
gevent.sleep((test_max_missed_heartbeats * paListener.heartbeat) + 4)
self.assertFalse(self.rawCallbackCalled)
self.assertFalse(self.dataCallbackCalled)
self.assertTrue(self.errorCallbackCalled)
self.assertFalse(self.listenerCallbackCalled)
def test_set_heartbeat(self):
"""
Test the set_heart_beat function; make sure it returns False when
passed invalid values, and true when valid. Also make sure it
adds the HEARTBEAT_FUDGE
"""
self.resetTestVars()
test_recovery_attempts = 1
test_heartbeat = 0
test_max_missed_heartbeats = 5
paListener = Listener(None, test_recovery_attempts, None, test_heartbeat, test_max_missed_heartbeats,
self.myGotData, self.myGotRaw, self.myGotListenerError, None, self.myGotError)
###
# Test valid values
###
test_heartbeat = 1
retValue = paListener.set_heartbeat(test_heartbeat)
self.assertTrue(retValue)
self.assertTrue(paListener.heartbeat == test_heartbeat + paListener.HEARTBEAT_FUDGE)
test_heartbeat = paListener.MAX_HEARTBEAT_INTERVAL
retValue = paListener.set_heartbeat(test_heartbeat)
self.assertTrue(retValue)
self.assertTrue(paListener.heartbeat == test_heartbeat + paListener.HEARTBEAT_FUDGE)
###
# Test that a heartbeat value of zero results in the listener.heartbeat being zero
# (and doesn't include HEARTBEAT_FUDGE)
###
test_heartbeat = 0
retValue = paListener.set_heartbeat(test_heartbeat)
self.assertTrue(retValue)
self.assertTrue(paListener.heartbeat == test_heartbeat)
###
# Test invalid values
###
test_heartbeat = -1
retValue = paListener.set_heartbeat(test_heartbeat)
self.assertFalse(retValue)
test_heartbeat = paListener.MAX_HEARTBEAT_INTERVAL + 1
retValue = paListener.set_heartbeat(test_heartbeat)
self.assertFalse(retValue)
def test_connect_failure(self):
"""
Test that when the the port agent client cannot initially connect, it
raises an InstrumentConnectionException
"""
exceptionRaised = False
driver = SBE37Driver(self._got_data_event_callback)
current_state = driver.get_resource_state()
self.assertEqual(current_state, DriverConnectionState.UNCONFIGURED)
config = {'addr' : self.ipaddr, 'port' : self.data_port, 'cmd_port' : self.cmd_port}
driver.configure(config = config)
current_state = driver.get_resource_state()
self.assertEqual(current_state, DriverConnectionState.DISCONNECTED)
"""
Try to connect: it should not because there is no port agent running.
The state should remain DISCONNECTED, and an
InstrumentConnectionException should be caught.
"""
try:
driver.connect()
current_state = driver.get_resource_state()
self.assertEqual(current_state, DriverConnectionState.DISCONNECTED)
except InstrumentConnectionException as e:
exceptionRaised = True
"""
Give it some time to retry
"""
time.sleep(4)
self.assertTrue(exceptionRaised)
@attr('UNIT', group='mi')
class PAClientTestPortAgentPacket(MiUnitTest):
# time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.localtime(time.time()))
#
@staticmethod
def ntp_to_system_time(date):
"""convert a NTP time to system time"""
return date - NTP_DELTA
@staticmethod
def system_to_ntp_time(date):
"""convert a system time to a NTP time"""
return date + NTP_DELTA
def setUp(self):
self.pap = PortAgentPacket()
#self.test_time = time.time()
#self.ntp_time = self.system_to_ntp_time(self.test_time)
#self.pap.set_timestamp(self.ntp_time)
def test_pack_header(self):
test_data = "Only the length of this matters?"
test_data_length = len(test_data)
self.pap.attach_data(test_data)
self.pap.pack_header()
header = self.pap.get_header()
self.assertEqual(self.pap.get_data_length(), test_data_length)
def test_get_length(self):
test_length = 100
self.pap.set_data_length(test_length)
got_length = self.pap.get_data_length()
self.assertEqual(got_length, test_length)
def test_checksum(self):
"""
This tests the checksum algorithm; if somebody changes the algorithm
this test should catch it. Had to jump through some hoops to do this;
needed to add set_data_length and set_header because we're building our
own header here (the one in PortAgentPacket includes the timestamp
so the checksum is not consistent).
"""
test_data = "This tests the checksum algorithm."
test_length = len(test_data)
self.pap.attach_data(test_data)
"""
Now build a header
"""
variable_tuple = (0xa3, 0x9d, 0x7a, self.pap.DATA_FROM_DRIVER,
test_length + HEADER_SIZE, 0x0000,
0)
self.pap.set_data_length(test_length)
format = '>BBBBHHd'
size = struct.calcsize(format)
temp_header = ctypes.create_string_buffer(size)
struct.pack_into(format, temp_header, 0, *variable_tuple)
"""
Now set the header member in PortAgentPacket to the header
we built
"""
self.pap.set_header(temp_header.raw)
"""
Now get the checksum and verify it is what we expect it to be.
"""
checksum = self.pap.calculate_checksum()
self.assertEqual(checksum, 2)
def test_unpack_header(self):
self.pap = PortAgentPacket()
data_length = 32
data = self.pap.unpack_header(array.array('B', [163, 157, 122, 2, 0, data_length + HEADER_SIZE, 14, 145, 65, 234, 142, 154, 23, 155, 51, 51]))
got_timestamp = self.pap.get_timestamp()
self.assertEqual(self.pap.get_header_type(), self.pap.DATA_FROM_DRIVER)
self.assertEqual(self.pap.get_data_length(), data_length)
# FIXME -- This broke with October 2013 timestamp fix...update this!
#self.assertEqual(got_timestamp, 1105890970.110589)
self.assertEqual(self.pap.get_header_recv_checksum(), 3729)
@attr('INT', group='mi')
class PAClientIntTestCase(InstrumentDriverTestCase):
def initialize(cls, *args, **kwargs):
log.debug("initialize")
def setUp(self):
#InstrumentDriverIntegrationTestCase.setUp(self)
#self.ipaddr = "69.196.56.192"
self.ipaddr = "localhost"
self.cmd_port = 9001
self.data_port = 9002
self.device_port = 9003
self.rawCallbackCalled = False
self.dataCallbackCalled = False
self.errorCallbackCalled = False
self.paPacket = None
def tearDown(self):
"""
@brief Test teardown
"""
log.debug("PACClientIntTestCase tearDown")
InstrumentDriverTestCase.tearDown(self)
def startPortAgent(self):
pa_port = self.init_port_agent()
log.debug("port_agent started on port: %d" % (pa_port))
time.sleep(2) # give it a chance to start responding
def resetTestVars(self):
log.debug("Resetting test variables...")
self.rawCallbackCalled = False
self.dataCallbackCalled = False
self.errorCallbackCalled = False
self.listenerCallbackCalled = False
def myGotData(self, paPacket):
self.dataCallbackCalled = True
self.paPacket = paPacket
if paPacket.is_valid():
validity = "valid"
else:
validity = "invalid"
log.debug("Got %s port agent data packet with data length %s: %s", validity, paPacket.get_data_length(), paPacket.get_data())
def myGotRaw(self, paPacket):
self.rawCallbackCalled = True
if paPacket.is_valid():
validity = "valid"
else:
validity = "invalid"
log.debug("Got %s port agent raw packet with data length %s: %s", validity, paPacket.get_data_length(), paPacket.get_data())
def myGotListenerError(self, exception):
self.listenerCallbackCalled = True
log.info("Got listener exception: %s", exception)
def myGotError(self, errorString = "No error string passed in."):
self.errorCallbackCalled = True
log.info("myGotError got error: %s", errorString)
def init_instrument_simulator(self):
"""
Startup a TCP server that we can use as an instrument simulator
"""
self._instrument_simulator = TCPSimulatorServer()
self.addCleanup(self._instrument_simulator.close)
# Wait for the simulator to bind to a port
timeout = time.time() + 10
while (timeout > time.time()):
if (self._instrument_simulator.port > 0):
log.debug("Instrument simulator initialized on port %s" % self._instrument_simulator.port)
return
log.debug("waiting for simulator to bind. sleeping")
time.sleep(1)
raise IDKException("Timeout waiting for simulator to bind")
def init_port_agent(self):
"""
@brief Launch the driver process and driver client. This is used in the
integration and qualification tests. The port agent abstracts the physical
interface with the instrument.
@retval return the pid to the logger process
"""
if (self.port_agent):
log.error("Port agent already initialized")
return
log.debug("Startup Port Agent")
#comm_config = self.get_comm_config()
config = self.port_agent_config()
log.debug("port agent config: %s" % config)
port_agent = PortAgentProcess.launch_process(config, timeout = 60, test_mode = True)
port = port_agent.get_data_port()
pid = port_agent.get_pid()
log.info('Started port agent pid %s listening at port %s' % (pid, port))
self.addCleanup(self.stop_port_agent)
self.port_agent = port_agent
return port
def port_agent_config(self):
"""
Overload the default port agent configuration so that
it connects to a simulated TCP connection.
"""
config = {
'device_addr' : self.ipaddr,
'device_port' : self.device_port,
'command_port': self.cmd_port,
'data_port': self.data_port,
'process_type': PortAgentProcessType.UNIX,
'log_level': 5,
'heartbeat_interval': 3
}
# Override the instrument connection information.
config['device_addr'] = 'localhost'
config['device_port'] = self._instrument_simulator.port
return config
def test_paClient_retry(self):
"""
Test that the port agent client will not continually try to recover
when the port agent closes the connection gracefully because it has
another client connected.
"""
exceptionRaised = False
self.resetTestVars()
self.init_instrument_simulator()
self.startPortAgent()
time.sleep(2)
"""
Start a TCP client that will connect to the data port; this sets up the
situation where the Port Agent will immediately close the connection
because it already has one
"""
self.tcp_client = TcpClient("localhost", self.data_port)
time.sleep(2)
paClient = PortAgentClient(self.ipaddr, self.data_port, self.cmd_port)
try:
paClient.init_comms(self.myGotData, self.myGotRaw, self.myGotListenerError, self.myGotError)
except InstrumentConnectionException as e:
exceptionRaised = True
"""
Give it some time to retry
"""
time.sleep(4)
self.assertTrue(exceptionRaised)
def test_paClient_rx_heartbeat(self):
"""
Test that the port agent can send heartbeats when the paClient has
a heartbeat_interval of 0. The port_agent_config() method above
sets the heartbeat interval.
"""
self.resetTestVars()
self.init_instrument_simulator()
self.startPortAgent()
time.sleep(5)
paClient = PortAgentClient(self.ipaddr, self.data_port, self.cmd_port)
paClient.init_comms(self.myGotData, self.myGotRaw, self.myGotListenerError, self.myGotError)
time.sleep(10)
self.assertFalse(self.errorCallbackCalled)
def test_start_paClient_no_port_agent(self):
self.resetTestVars()
paClient = PortAgentClient(self.ipaddr, self.data_port, self.cmd_port)
self.assertRaises(InstrumentConnectionException,
paClient.init_comms,
self.myGotData, self.myGotRaw,
self.myGotListenerError, self.myGotError)
self.assertFalse(self.errorCallbackCalled)
def test_start_paClient_with_port_agent(self):
self.resetTestVars()
self.init_instrument_simulator()
self.startPortAgent()
paClient = PortAgentClient(self.ipaddr, self.data_port, self.cmd_port)
try:
paClient.init_comms(self.myGotData, self.myGotRaw, self.myGotListenerError, self.myGotError)
except InstrumentConnectionException as e:
log.error("Exception caught: %r" % (e))
exceptionCaught = True
else:
exceptionCaught = False
data = "this is a great big test"
paClient.send(data)
time.sleep(1)
self._instrument_simulator.send(data)
time.sleep(5)
paClient.stop_comms()
"""
Assert that the error_callback was not called, that an exception was not
caught, and that the data and raw callbacks were called.
"""
self.assertFalse(self.errorCallbackCalled)
self.assertFalse(exceptionCaught)
self.assertTrue(self.rawCallbackCalled)
self.assertTrue(self.dataCallbackCalled)
def test_start_paClient_no_port_agent_big_data(self):
self.resetTestVars()
logging.getLogger('mi.core.instrument.port_agent_client').setLevel(logging.DEBUG)
# I put this in here because PortAgentPacket cannot make a new packet
# with a valid checksum.
def makepacket(msgtype, timestamp, data):
from struct import Struct
SYNC = (0xA3, 0x9D, 0x7A)
HEADER_FORMAT = "!BBBBHHd"
header_struct = Struct(HEADER_FORMAT)
HEADER_SIZE = header_struct.size
def calculateChecksum(data, seed=0):
n = seed
for datum in data:
n ^= datum
return n
def pack_header(buf, msgtype, pktsize, checksum, timestamp):
sync1, sync2, sync3 = SYNC
header_struct.pack_into(buf, 0, sync1, sync2, sync3, msgtype, pktsize,
checksum, timestamp)
pktsize = HEADER_SIZE + len(data)
pkt = bytearray(pktsize)
pack_header(pkt, msgtype, pktsize, 0, timestamp)
pkt[HEADER_SIZE:] = data
checksum = calculateChecksum(pkt)
pack_header(pkt, msgtype, pktsize, checksum, timestamp)
return pkt
# Make a BIG packet
data = "A" * (2**16 - HEADER_SIZE - 1)
txpkt = makepacket(PortAgentPacket.DATA_FROM_INSTRUMENT, 0.0, data)
def handle(sock, addr):
# Send it in pieces
sock.sendall(txpkt[:1500])
time.sleep(1)
sock.sendall(txpkt[1500:])
time.sleep(10)
import gevent.server
dataserver = gevent.server.StreamServer((self.ipaddr, self.data_port), handle)
cmdserver = gevent.server.StreamServer((self.ipaddr, self.cmd_port), lambda x, y: None)
paClient = PortAgentClient(self.ipaddr, self.data_port, self.cmd_port)
try:
dataserver.start()
cmdserver.start()
paClient.init_comms(self.myGotData, self.myGotRaw, self.myGotListenerError, self.myGotError)
except InstrumentConnectionException as e:
log.error("Exception caught: %r" % (e))
raise
else:
time.sleep(5)
finally:
paClient.stop_comms()
dataserver.kill()
cmdserver.kill()
"""
Assert that the error_callback was not called, that an exception was not
caught, and that the data and raw callbacks were called.
"""
self.assertFalse(self.errorCallbackCalled)
self.assertTrue(self.rawCallbackCalled)
self.assertTrue(self.dataCallbackCalled)
self.assertEquals(self.paPacket.get_data_length(), len(data))
self.assertEquals(len(self.paPacket.get_data()), len(data))
# don't use assertEquals b/c it will print 64kb
self.assert_(self.paPacket.get_data() == data)
def test_start_paClient_lost_port_agent_tx_rx(self):
"""
This test starts the port agent and the instrument_simulator and
tests that data is sent and received first; then it stops the port
agent and tests that the error_callback was called.
"""
self.resetTestVars()
self.init_instrument_simulator()
self.startPortAgent()
paClient = PortAgentClient(self.ipaddr, self.data_port, self.cmd_port)
paClient.init_comms(self.myGotData, self.myGotRaw, self.myGotListenerError, self.myGotError)
"""
Now send some data; there should be no errors.
"""
try:
data = "this is a great big test"
paClient.send(data)
time.sleep(1)
self._instrument_simulator.send(data)
except InstrumentConnectionException as e:
log.error("Exception caught: %r" % (e))
exceptionCaught = True
else:
exceptionCaught = False
time.sleep(1)
"""
Assert that the error_callback was NOT called, that an exception was NOT
caught, and that the data and raw callbacks WERE called.
"""
self.assertFalse(self.errorCallbackCalled)
self.assertFalse(exceptionCaught)
self.assertTrue(self.rawCallbackCalled)
self.assertTrue(self.dataCallbackCalled)
"""
Now reset the test variables and try again; this time after stopping
the port agent. Should be errors
"""
self.resetTestVars()
try:
self.stop_port_agent()
log.debug("Port agent stopped")
data = "this is another great big test"
paClient.send(data)
time.sleep(1)
log.debug("Sending from simulator")
self._instrument_simulator.send(data)
except InstrumentConnectionException as e:
log.error("Exception caught: %r" % (e))
exceptionCaught = True
else:
exceptionCaught = False
time.sleep(5)
"""
Assert that the error_callback WAS called. The listener usually
is seeing the error first, and that does not call the exception, so
only assert that the error callback was called.
"""
self.assertTrue(self.errorCallbackCalled)
def test_start_paClient_lost_port_agent_rx(self):
"""
This test starts the port agent and then stops the port agent and
verifies that the error callback was called (because the listener
is the only one that will see the error, since there is no send
operation).
"""
self.resetTestVars()
self.init_instrument_simulator()
self.startPortAgent()
paClient = PortAgentClient(self.ipaddr, self.data_port, self.cmd_port)
paClient.init_comms(self.myGotData, self.myGotRaw, self.myGotListenerError, self.myGotError)
try:
self.stop_port_agent()
except InstrumentConnectionException as e:
log.error("Exception caught: %r" % (e))
exceptionCaught = True
else:
exceptionCaught = False
time.sleep(5)
"""
Assert that the error_callback was called. At this moment the listener
is seeing the error first, and that does not call the exception, so
don't test for that yet.
"""
self.assertTrue(self.errorCallbackCalled)
@unittest.skip('Skip; this test does not work consistently.')
def test_start_paClient_lost_port_agent_tx(self):
"""
This test starts the port agent and then starts the port agent client
in a special way that will not start the listener thread. This will
guarantee that the send context is the one the sees the error.
"""
self.resetTestVars()
self.init_instrument_simulator()
self.startPortAgent()
paClient = PortAgentClient(self.ipaddr, self.data_port, self.cmd_port)
"""
Give the port agent time to initialize
"""
time.sleep(5)
paClient.init_comms(self.myGotData, self.myGotRaw, self.myGotError, self.myGotListenerError, start_listener = False)
try:
self.stop_port_agent()
data = "this big ol' test should cause send context to fail"
paClient.send(data)
time.sleep(1)
except InstrumentConnectionException as e:
log.error("Exception caught: %r" % (e))
exceptionCaught = True
else:
exceptionCaught = False
time.sleep(5)
"""
Assert that the error_callback was called. For this test the listener
should not be running, so the send context should see the error, and that
should throw an exception. Assert that the callback WAS called and that
an exception WAS thrown.
"""
self.assertTrue(self.errorCallbackCalled)
self.assertTrue(exceptionCaught)
|
|
import datetime
import json
import requests
import sys
import csv
import os
import us
from .managers import PriorityDepartmentsManager
from django.conf import settings
from django.contrib.gis.db import models
from django.contrib.gis.geos import Point, MultiPolygon
from django.core.validators import MaxValueValidator
from django.utils.text import slugify
from firecares.firecares_core.models import RecentlyUpdatedMixin
from django.core.urlresolvers import reverse
from django.db.transaction import rollback
from django.db.utils import IntegrityError
from django.utils.functional import cached_property
from firecares.firecares_core.models import Address
from phonenumber_field.modelfields import PhoneNumberField
from firecares.firecares_core.models import Country
from genericm2m.models import RelatedObjectsDescriptor
class USGSStructureData(models.Model):
"""
Models structure data from the USGS National Map.
Schema from: http://services.nationalmap.gov/arcgis/rest/services/structures/MapServer/1?f=json
"""
DATA_SECURITY_CHOICES = [(0, 'Unknown'),
(1, 'Top Secret'),
(2, 'Secret'),
(3, 'Confidential'),
(4, 'Restricted'),
(5, 'Unclassified'),
(6, 'Sensitive')]
DISTRIBUTION_POLICY_CHOICES = [('A1', 'Emergency Service Provider - Internal Use Only'),
('A2', 'Emergency Service Provider - Bitmap Display Via Web'),
('A3', 'Emergency Service Provider - Free Distribution to Third Parties'),
('A4', 'Emergency Service Provider - Free Distribution to Third Parties Via'
' Internet'),
('B1', 'Government Agencies or Their Delegated Agents - Internal Use Only'),
('B2', 'Government Agencies or Their Delegated Agents - Bitmap Display Via Web'),
('B3', 'Government Agencies or Their Delegated Agents - Free Distribution to Third'
' Parties'),
('B4', 'Government Agencies or Their Delegated Agents - Free Distribution to Third'
' Parties Via Internet'),
('C1', 'Other Public or Educational Institutions - Internal Use Only'),
('C2', 'Other Public or Educational Institutions - Bitmap Display Via Web'),
('C3', 'Other Public or Educational Institutions - Free Distribution to Third'
' Parties'),
('C4', 'Other Public or Educational Institutions - Free Distribution to Third'
' Parties Via Internet'),
('D1', 'Data Contributors - Internal Use Only'), ('D2', 'Data Contributors - '
'Bitmap Display Via Web'),
('D3', 'Data Contributors - Free Distribution to Third Parties'),
('D4', 'Data Contributors - Free Distribution to Third Parties Via Internet'),
('E1', 'Public Domain - Internal Use Only'), ('E2', 'Public Domain - Bitmap'
' Display Via Web'),
('E3', 'Public Domain - Free Distribution to Third Parties'),
('E4', 'Public Domain - Free Distribution to Third Parties Via Internet')]
FCODE_CHOICES = [(81000, 'Transportation Facility'),
(81006, 'Airport Terminal'),
(81008, 'Air Support / Maintenance Facility'),
(81010, 'Air Traffic Control Center / Command Center'),
(81011, 'Boat Ramp / Dock'),
(81012, 'Bridge'),
(81014, 'Bridge: Light Rail / Subway'),
(81016, 'Bridge: Railroad'),
(81018, 'Bridge: Road'),
(81020, 'Border Crossing / Port of Entry'),
(81022, 'Bus Station / Dispatch Facility'),
(81024, 'Ferry Terminal / Dispatch Facility'),
(81025, 'Harbor / Marina'),
(81026, 'Helipad / Heliport / Helispot'),
(81028, 'Launch Facility'),
(81030, 'Launch Pad'),
(81032, 'Light Rail Power Substation'),
(81034, 'Light Rail Station'),
(81036, 'Park and Ride / Commuter Lot'),
(81038, 'Parking Lot Structure / Garage'),
(81040, 'Pier / Wharf / Quay / Mole'),
(81042, 'Port Facility'),
(81044, 'Port Facility: Commercial Port'),
(81046, 'Port Facility: Crane'),
(81048, 'Port Facility: Maintenance and Fuel Facility'),
(81050, 'Port Facility: Modal Transfer Facility'),
(81052, 'Port Facility: Passenger Terminal'),
(81054, 'Port Facility: Warehouse Storage / Container Yard'),
(81056, 'Railroad Facility'),
(81058, 'Railroad Command / Control Facility'),
(81060, 'Railroad Freight Loading Facility'),
(81062, 'Railroad Maintenance / Fuel Facility'),
(81064, 'Railroad Roundhouse / Turntable'),
(81066, 'Railroad Station'),
(81068, 'Railroad Yard'),
(81070, 'Rest Stop / Roadside Park'),
(81072, 'Seaplane Anchorage / Base'),
(81073, 'Snowshed'),
(81074, 'Subway Station'),
(81076, 'Toll Booth / Plaza'),
(81078, 'Truck Stop'),
(81080, 'Tunnel'),
(81082, 'Tunnel: Light Rail / Subway'),
(81084, 'Tunnel: Road'),
(81086, 'Tunnel: Railroad'),
(81088, 'Weigh Station / Inspection Station')]
ISLANDMARK_CHOICES = [(1, 'Yes'),
(2, 'No'),
(0, 'Unknown')]
POINTLOCATIONTYPE_CHOICES = [(0, 'Unknown'),
(1, 'Centroid'),
(2, 'Egress or Entrance'),
(3, 'Turn-off location'),
(4, 'Approximate')]
ADMINTYPE_CHOICES = [(0, 'Unknown'),
(1, 'Federal'),
(2, 'Tribal'),
(3, 'State'),
(4, 'Regional'),
(5, 'County'),
(6, 'Municipal'),
(7, 'Private')]
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
objectid = models.IntegerField(unique=True, null=True, blank=True)
permanent_identifier = models.CharField(max_length=40, null=True, blank=True)
source_featureid = models.CharField(max_length=40, null=True, blank=True)
source_datasetid = models.CharField(max_length=40, null=True, blank=True)
source_datadesc = models.CharField(max_length=100, null=True, blank=True)
source_originator = models.CharField(max_length=130, null=True, blank=True)
data_security = models.IntegerField(blank=True, null=True, choices=DATA_SECURITY_CHOICES)
distribution_policy = models.CharField(max_length=4, choices=DISTRIBUTION_POLICY_CHOICES, null=True, blank=True)
loaddate = models.DateTimeField(null=True, blank=True)
ftype = models.CharField(blank=True, null=True, max_length=50)
fcode = models.IntegerField(blank=True, null=True, choices=FCODE_CHOICES)
name = models.CharField(max_length=100, null=True, blank=True)
islandmark = models.IntegerField(null=True, blank=True, choices=ISLANDMARK_CHOICES, verbose_name='Landmark')
pointlocationtype = models.IntegerField(null=True, blank=True, choices=POINTLOCATIONTYPE_CHOICES,
verbose_name='Point Type')
admintype = models.IntegerField(null=True, blank=True, choices=ADMINTYPE_CHOICES)
addressbuildingname = models.CharField(max_length=60, null=True, blank=True, verbose_name='Building Name')
address = models.CharField(max_length=75, null=True, blank=True)
city = models.CharField(max_length=40, null=True, blank=True)
state = models.CharField(max_length=2, null=True, blank=True)
zipcode = models.CharField(max_length=10, null=True, blank=True)
gnis_id = models.CharField(max_length=10, null=True, blank=True)
foot_id = models.CharField(max_length=40, null=True, blank=True)
complex_id = models.CharField(max_length=40, null=True, blank=True)
globalid = models.CharField(max_length=38, null=True, blank=True)
geom = models.PointField()
objects = models.GeoManager()
def __unicode__(self):
return u'{state}, {city}, {name}'.format(name=self.name, state=self.state, city=self.city)
def full_address(self):
return u'{address}, {city}, {state}, {zipcode}'.format(address=self.address, city=self.city, state=self.state,
zipcode=self.zipcode)
class Meta:
ordering = ('state', 'city', 'name')
@classmethod
def count_differential(cls):
"""
Reports the count differential between the upstream service and this table.
"""
url = 'http://services.nationalmap.gov/arcgis/rest/services/govunits/MapServer/{0}/query?' \
'where=1%3D1&text=&objectIds=&time=&geometry=&geometryType=esriGeometryEnvelope&inSR=&' \
'spatialRel=esriSpatialRelIntersects&relationParam=&outFields=&returnGeometry=true' \
'&maxAllowableOffset=&geometryPrecision=&outSR=&returnIdsOnly=false&returnCountOnly=true&orderByFields=' \
'&groupByFieldsForStatistics=&outStatistics=&returnZ=false&returnM=false&gdbVersion=&' \
'returnDistinctValues=false&f=pjson'
response = requests.get(url.format(cls.service_id))
if response.ok:
response_js = json.loads(response.content)
upstream_count = response_js.get('count')
if upstream_count:
local_count = cls.objects.all().count()
print 'The upstream service has: {0} features.'.format(upstream_count)
print 'The local model {1} has: {0} features.'.format(local_count, cls.__name__)
return local_count - upstream_count
class FireDepartment(RecentlyUpdatedMixin, models.Model):
"""
Models Fire Departments.
"""
DEPARTMENT_TYPE_CHOICES = [
('Volunteer', 'Volunteer'),
('Mostly Volunteer', 'Mostly Volunteer'),
('Career', 'Career'),
('Mostly Career', 'Mostly Career'),
]
REGION_CHOICES = [
('Northeast', 'Northeast'),
('West', 'West'),
('South', 'South'),
('Midwest', 'Midwest'),
(None, '')
]
created = models.DateTimeField(auto_now=True)
modified = models.DateTimeField(auto_now=True)
fdid = models.CharField(max_length=10)
name = models.CharField(max_length=100)
headquarters_address = models.ForeignKey(Address, null=True, blank=True, related_name='firedepartment_headquarters')
mail_address = models.ForeignKey(Address, null=True, blank=True)
headquarters_phone = PhoneNumberField(null=True, blank=True)
headquarters_fax = PhoneNumberField(null=True, blank=True)
department_type = models.CharField(max_length=20, choices=DEPARTMENT_TYPE_CHOICES, null=True, blank=True)
organization_type = models.CharField(max_length=75, null=True, blank=True)
website = models.URLField(null=True, blank=True)
state = models.CharField(max_length=2)
region = models.CharField(max_length=20, choices=REGION_CHOICES, null=True, blank=True)
geom = models.MultiPolygonField(null=True, blank=True)
objects = models.GeoManager()
priority_departments = PriorityDepartmentsManager()
dist_model_score = models.FloatField(null=True, blank=True, editable=False, db_index=True)
government_unit = RelatedObjectsDescriptor()
population = models.IntegerField(null=True, blank=True)
featured = models.BooleanField(default=False, db_index=True)
class Meta:
ordering = ('name',)
index_together = [
['population', 'id', 'region'],
['population', 'region']
]
@property
def government_unit_objects(self):
"""
Memoize the government_unit generic key lookup.
"""
if not getattr(self, '_government_unit_objects', None):
self._government_unit_objects = self.government_unit.all().generic_objects()
return self._government_unit_objects
@property
def fips(self):
objs = self.government_unit_objects
if objs:
return [obj.fips for obj in objs if hasattr(obj, 'fips')]
return []
@property
def geom_area(self):
"""
Project the department's geometry into north america lambert conformal conic
Returns km2
"""
if self.geom:
try:
return self.geom.transform(102009, clone=True).area / 1000000
except:
return
@property
def similar_departments(self, ignore_regions_min=1000000):
"""
Identifies similar departments based on the protected population size and region.
"""
params = {}
if self.population >= 250000:
params['population__gte'] = 250000
elif self.population < 2500:
params['population__lt'] = 2500
else:
community_sizes = [
(100000, 249999),
(50000, 99999),
(25000, 49999),
(10000, 24999),
(5000, 9999),
(2500, 4999)]
for lower_bound, upper_bound in community_sizes:
if lower_bound <= self.population <= upper_bound:
params['population__lte'] = upper_bound
params['population__gte'] = lower_bound
break
similar = FireDepartment.objects.filter(**params)\
.exclude(id=self.id)\
.extra(select={'difference': "abs(population - %s)"}, select_params=[self.population])\
.extra(order_by=['difference'])
# Large departments may not have similar departments in their region.
if self.population < ignore_regions_min:
similar = similar.filter(region=self.region)
return similar
@property
def thumbnail_name(self):
return slugify(' '.join(['us', self.state, self.name])) + '.jpg'
@property
def thumbnail_name_no_marker(self):
return slugify(' '.join(['us', self.state, self.name, 'no marker'])) + '.jpg'
@property
def thumbnail(self):
return 'https://s3.amazonaws.com/firecares-static/department-thumbnails/{0}'.format(self.thumbnail_name)
@property
def thumbnail_no_marker(self):
return 'https://s3.amazonaws.com/firecares-static/department-thumbnails/{0}' \
.format(self.thumbnail_name_no_marker)
def generate_thumbnail(self, marker=True):
geom = None
marker = ''
if self.geom:
geom = self.geom.centroid
elif self.headquarters_address and self.headquarters_address.geom:
geom = self.headquarters_address.geom
else:
return '/static/firestation/theme/assets/images/content/property-1.jpg'
if marker:
marker = 'pin-l-embassy+0074D9({geom.x},{geom.y})/'
return 'http://api.tiles.mapbox.com/v4/garnertb.mmlochkh/{marker}' \
'{geom.x},{geom.y},8/500x300.png?access_token={access_token}'.format(marker=marker,
geom=geom,
access_token=getattr(settings, 'MAPBOX_ACCESS_TOKEN', ''))
def set_geometry_from_government_unit(self):
objs = self.government_unit_objects
if objs:
self.geom = MultiPolygon([obj.geom for obj in objs if getattr(obj, 'geom', None)])
self.save()
def set_population_from_government_unit(self):
"""
Stores the population of government units on the FD object to speed up querying.
"""
objs = self.government_unit_objects
if objs:
for gov_unit in objs:
pop = getattr(gov_unit, 'population', None)
if pop is not None:
if self.population is None:
self.population = 0
self.population += pop
else:
self.population = None
self.save()
def set_region(self, region):
validate_choice(FireDepartment.REGION_CHOICES)(region)
self.region = region
self.save()
@cached_property
def description(self):
"""
A text description of the department used for displaying on the client side.
"""
name = self.name
if not self.name.lower().endswith('department') and not self.name.lower().endswith('district'):
name += ' fire department'
return "The {name} is a {department_type} department located in the {object.region} NFPA region and headquartered in " \
"{object.headquarters_address.city}, {object.headquarters_address.state_province}."\
.format(name=name,
department_type=self.department_type.lower(),
object=self).strip()
def residential_structure_fire_counts(self):
return self.nfirsstatistic_set.filter(metric='residential_structure_fires')\
.extra(select={
'year_max': 'SELECT MAX(COUNT) FROM firestation_nfirsstatistic b WHERE b.year = firestation_nfirsstatistic.year and b.metric=firestation_nfirsstatistic.metric'
})\
.extra(select={
'year_min': 'SELECT MIN(COUNT) FROM firestation_nfirsstatistic b WHERE b.year = firestation_nfirsstatistic.year and b.metric=firestation_nfirsstatistic.metric'
})
@classmethod
def load_from_usfa_csv(cls):
"""
Loads Fire Departments from http://apps.usfa.fema.gov/census-download.
"""
us, _ = Country.objects.get_or_create(name='United States of America', iso_code='US')
with open(os.path.join(os.path.dirname(__file__), 'scripts/usfa-census-national.csv'), 'r') as csvfile:
# This only runs once, since there isn't a good key to identify duplicates
if not cls.objects.all().count():
reader = csv.DictReader(csvfile)
counter = 0
for row in reader:
# only run once.
hq_address_params = {}
hq_address_params['address_line1'] = row.get('HQ Addr1')
hq_address_params['address_line2'] = row.get('HQ Addr2')
hq_address_params['city'] = row.get('HQ City')
hq_address_params['state_province'] = row.get('HQ State')
hq_address_params['postal_code'] = row.get('HQ Zip')
hq_address_params['country'] = us
headquarters_address, _ = Address.objects.get_or_create(**hq_address_params)
headquarters_address.save()
mail_address_params = {}
mail_address_params['address_line1'] = row.get('Mail Addr1')
mail_address_params['address_line2'] = row.get('Mail Addr2') or row.get('Mail PO Box')
mail_address_params['city'] = row.get('Mail City')
mail_address_params['state_province'] = row.get('Mail State')
mail_address_params['postal_code'] = row.get('Mail Zip')
mail_address_params['country'] = us
mail_address, _ = Address.objects.get_or_create(**mail_address_params)
mail_address.save()
params = {}
params['fdid'] = row.get('FDID')
params['name'] = row.get('Fire Dept Name')
params['headquarters_phone'] = row.get('HQ Phone')
params['headquarters_fax'] = row.get('HQ Fax')
params['department_type'] = row.get('Dept Type')
params['organization_type'] = row.get('Organization Type')
params['website'] = row.get('Website')
params['headquarters_address'] = headquarters_address
params['mail_address'] = mail_address
params['state'] = row.get('HQ State')
cls.objects.create(**params)
counter += 1
assert counter == cls.objects.all().count()
@cached_property
def slug(self):
return slugify(self.name)
def get_absolute_url(self):
return reverse('firedepartment_detail_slug', kwargs=dict(pk=self.id, slug=self.slug))
def find_jurisdiction(self):
from firecares.usgs.models import CountyorEquivalent, IncorporatedPlace, UnincorporatedPlace
counties = CountyorEquivalent.objects.filter(state_name='Virginia')
for county in counties:
incorporated = IncorporatedPlace.objects.filter(geom__intersects=county.geom)
unincoporated = UnincorporatedPlace.objects.filter(geom__intersects=county.geom)
station = FireStation.objects.filter(geom__intersects=county.geom)
print 'County', county.name
print 'Incorporated Place', incorporated.count()
print 'Unincorporated Place', unincoporated.count()
print 'Stations:', station
def __unicode__(self):
return self.name
class FireStation(USGSStructureData):
"""
Fire Stations.
"""
service_id = 7
fdid = models.CharField(max_length=10, null=True, blank=True)
department = models.ForeignKey(FireDepartment, null=True, blank=True, on_delete=models.SET_NULL)
station_number = models.IntegerField(null=True, blank=True)
station_address = models.ForeignKey(Address, null=True, blank=True)
district = models.MultiPolygonField(null=True, blank=True)
objects = models.GeoManager()
@classmethod
def populate_address(cls):
us, _ = Country.objects.get_or_create(iso_code='US')
for obj in cls.objects.filter(station_address__isnull=True, address__isnull=False, zipcode__isnull=False):
try:
addr, _ = Address.objects.get_or_create(address_line1=obj.address, city=obj.city,
state_province=obj.state, postal_code=obj.zipcode,
country=us, defaults=dict(geom=obj.geom))
except Address.MultipleObjectsReturned:
objs = Address.objects.filter(address_line1=obj.address, city=obj.city, state_province=obj.state, postal_code=obj.zipcode,
country=us)
import ipdb; ipdb.set_trace()
obj.station_address = addr
obj.save()
@property
def origin_uri(self):
"""
This object's URI (from the national map).
"""
return 'http://services.nationalmap.gov/arcgis/rest/services/structures/MapServer/7/{0}?f=json' \
.format(self.objectid)
@classmethod
def load_data(cls):
objects = requests.get('http://services.nationalmap.gov/arcgis/rest/services/structures/MapServer/7/query?'
'where=1%3D1&text=&objectIds=&time=&geometry=&geometryType=esriGeometryEnvelope&inSR=&'
'spatialRel=esriSpatialRelIntersects&relationParam=&outFields=&returnGeometry=true&'
'maxAllowableOffset=&geometryPrecision=&outSR=&returnIdsOnly=true&returnCountOnly=false&'
'orderByFields=&groupByFieldsForStatistics=&outStatistics=&returnZ=false&returnM=false&'
'gdbVersion=&returnDistinctValues=false&f=json')
current_ids = set(FireStation.objects.all().values_list('objectid', flat=True))
object_ids = set(json.loads(objects.content)['objectIds']) - current_ids
url = 'http://services.nationalmap.gov/arcgis/rest/services/structures/MapServer/7/{0}?f=json'
us, _ = Country.objects.get_or_create(iso_code='US')
for object in object_ids:
try:
if FireStation.objects.filter(objectid=object):
continue
obj = requests.get(url.format(object))
obj = json.loads(obj.content)
data = dict((k.lower(), v) for k, v in obj['feature']['attributes'].iteritems())
if obj['feature'].get('geometry'):
data['geom'] = Point(obj['feature']['geometry']['x'], obj['feature']['geometry']['y'])
data['loaddate'] = datetime.datetime.fromtimestamp(data['loaddate']/1000.0)
feat = cls.objects.create(**data)
feat.save()
print 'Saved object: {0}'.format(data.get('name'))
print '{0} Firestations loaded.'.format(FireStation.objects.all().count())
except KeyError:
print '{0} failed.'.format(object)
print url.format(object)
except IntegrityError:
print '{0} failed.'.format(object)
print url.format(object)
print sys.exc_info()
try:
rollback()
except:
pass
except:
print '{0} failed.'.format(object)
print url.format(object)
print sys.exc_info()
@property
def district_area(self):
"""
Project the district's geometry into north america lambert conformal conic
Returns km2
"""
if self.district:
try:
return self.district.transform(102009, clone=True).area / 1000000
except:
return
def get_absolute_url(self):
return reverse('firestation_detail', kwargs=dict(pk=self.id))
class Meta:
verbose_name = 'Fire Station'
class Staffing(models.Model):
"""
Models response capabilities (apparatus and responders).
"""
APPARATUS_CHOICES = [('Engine', 'Engine'),
('Ladder/Truck/Aerial', 'Ladder/Truck/Aerial'),
('Quint', 'Quint'),
('Ambulance/ALS', 'Ambulance/ALS'),
('Ambulance/BLS', 'Ambulance/BLS'),
('Heavy Rescue', 'Heavy Rescue'),
('Boat', 'Boat'),
('Hazmat', 'Hazmat'),
('Chief', 'Chief'),
('Other', 'Other')]
int_field_defaults = dict(null=True, blank=True, default=0, validators=[MaxValueValidator(99)])
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
firestation = models.ForeignKey(FireStation)
apparatus = models.CharField(choices=APPARATUS_CHOICES, max_length=20, default='Engine')
firefighter = models.PositiveIntegerField(**int_field_defaults)
firefighter_emt = models.PositiveIntegerField(verbose_name='Firefighter EMT', **int_field_defaults)
firefighter_paramedic = models.PositiveIntegerField(verbose_name='Firefighter Paramedic', **int_field_defaults)
ems_emt = models.PositiveIntegerField(verbose_name='EMS-Only EMT', **int_field_defaults)
ems_paramedic = models.PositiveIntegerField(verbose_name='EMS-Only Paramedic', **int_field_defaults)
officer = models.PositiveIntegerField(verbose_name='Company/Unit Officer', **int_field_defaults)
officer_paramedic = models.PositiveIntegerField(verbose_name='Company/Unit Officer Paramedic', **int_field_defaults)
ems_supervisor = models.PositiveIntegerField(verbose_name='EMS Supervisor', **int_field_defaults)
chief_officer = models.PositiveIntegerField(verbose_name='Cheif Officer', **int_field_defaults)
def __unicode__(self):
return '{0} response capability for {1}'.format(self.apparatus, self.firestation)
class Meta:
verbose_name_plural = 'Response Capabilities'
class NFIRSStatistic(models.Model):
"""
Caches NFIRS stats.
"""
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
fire_department = models.ForeignKey(FireDepartment)
metric = models.CharField(max_length=50, db_index=True)
year = models.PositiveSmallIntegerField(db_index=True)
count = models.PositiveSmallIntegerField(db_index=True, null=True)
class Meta:
unique_together = ['fire_department', 'year', 'metric']
ordering = ['-year',]
|
|
import sys
import pytest
import numpy as np
from datetime import date
import dateutil
import pandas as pd
import pandas.util.testing as tm
from pandas.compat import lrange
from pandas import (DatetimeIndex, Index, date_range, DataFrame,
Timestamp, offsets)
from pandas.util.testing import assert_almost_equal
randn = np.random.randn
class TestDatetimeIndex(object):
def test_roundtrip_pickle_with_tz(self):
# GH 8367
# round-trip of timezone
index = date_range('20130101', periods=3, tz='US/Eastern', name='foo')
unpickled = tm.round_trip_pickle(index)
tm.assert_index_equal(index, unpickled)
def test_reindex_preserves_tz_if_target_is_empty_list_or_array(self):
# GH7774
index = date_range('20130101', periods=3, tz='US/Eastern')
assert str(index.reindex([])[0].tz) == 'US/Eastern'
assert str(index.reindex(np.array([]))[0].tz) == 'US/Eastern'
def test_time_loc(self): # GH8667
from datetime import time
from pandas._libs.index import _SIZE_CUTOFF
ns = _SIZE_CUTOFF + np.array([-100, 100], dtype=np.int64)
key = time(15, 11, 30)
start = key.hour * 3600 + key.minute * 60 + key.second
step = 24 * 3600
for n in ns:
idx = pd.date_range('2014-11-26', periods=n, freq='S')
ts = pd.Series(np.random.randn(n), index=idx)
i = np.arange(start, n, step)
tm.assert_numpy_array_equal(ts.index.get_loc(key), i,
check_dtype=False)
tm.assert_series_equal(ts[key], ts.iloc[i])
left, right = ts.copy(), ts.copy()
left[key] *= -10
right.iloc[i] *= -10
tm.assert_series_equal(left, right)
def test_time_overflow_for_32bit_machines(self):
# GH8943. On some machines NumPy defaults to np.int32 (for example,
# 32-bit Linux machines). In the function _generate_regular_range
# found in tseries/index.py, `periods` gets multiplied by `strides`
# (which has value 1e9) and since the max value for np.int32 is ~2e9,
# and since those machines won't promote np.int32 to np.int64, we get
# overflow.
periods = np.int_(1000)
idx1 = pd.date_range(start='2000', periods=periods, freq='S')
assert len(idx1) == periods
idx2 = pd.date_range(end='2000', periods=periods, freq='S')
assert len(idx2) == periods
def test_nat(self):
assert DatetimeIndex([np.nan])[0] is pd.NaT
def test_week_of_month_frequency(self):
# GH 5348: "ValueError: Could not evaluate WOM-1SUN" shouldn't raise
d1 = date(2002, 9, 1)
d2 = date(2013, 10, 27)
d3 = date(2012, 9, 30)
idx1 = DatetimeIndex([d1, d2])
idx2 = DatetimeIndex([d3])
result_append = idx1.append(idx2)
expected = DatetimeIndex([d1, d2, d3])
tm.assert_index_equal(result_append, expected)
result_union = idx1.union(idx2)
expected = DatetimeIndex([d1, d3, d2])
tm.assert_index_equal(result_union, expected)
# GH 5115
result = date_range("2013-1-1", periods=4, freq='WOM-1SAT')
dates = ['2013-01-05', '2013-02-02', '2013-03-02', '2013-04-06']
expected = DatetimeIndex(dates, freq='WOM-1SAT')
tm.assert_index_equal(result, expected)
def test_hash_error(self):
index = date_range('20010101', periods=10)
with tm.assert_raises_regex(TypeError, "unhashable type: %r" %
type(index).__name__):
hash(index)
def test_stringified_slice_with_tz(self):
# GH2658
import datetime
start = datetime.datetime.now()
idx = DatetimeIndex(start=start, freq="1d", periods=10)
df = DataFrame(lrange(10), index=idx)
df["2013-01-14 23:44:34.437768-05:00":] # no exception here
def test_append_join_nondatetimeindex(self):
rng = date_range('1/1/2000', periods=10)
idx = Index(['a', 'b', 'c', 'd'])
result = rng.append(idx)
assert isinstance(result[0], Timestamp)
# it works
rng.join(idx, how='outer')
def test_map(self):
rng = date_range('1/1/2000', periods=10)
f = lambda x: x.strftime('%Y%m%d')
result = rng.map(f)
exp = Index([f(x) for x in rng], dtype='<U8')
tm.assert_index_equal(result, exp)
@tm.capture_stderr
def test_map_fallthrough(self):
# GH#22067, check we don't get warnings about silently ignored errors
dti = date_range('2017-01-01', '2018-01-01', freq='B')
dti.map(lambda x: pd.Period(year=x.year, month=x.month, freq='M'))
cv = sys.stderr.getvalue()
assert cv == ''
def test_iteration_preserves_tz(self):
# see gh-8890
index = date_range("2012-01-01", periods=3, freq='H', tz='US/Eastern')
for i, ts in enumerate(index):
result = ts
expected = index[i]
assert result == expected
index = date_range("2012-01-01", periods=3, freq='H',
tz=dateutil.tz.tzoffset(None, -28800))
for i, ts in enumerate(index):
result = ts
expected = index[i]
assert result._repr_base == expected._repr_base
assert result == expected
# 9100
index = pd.DatetimeIndex(['2014-12-01 03:32:39.987000-08:00',
'2014-12-01 04:12:34.987000-08:00'])
for i, ts in enumerate(index):
result = ts
expected = index[i]
assert result._repr_base == expected._repr_base
assert result == expected
@pytest.mark.parametrize('periods', [0, 9999, 10000, 10001])
def test_iteration_over_chunksize(self, periods):
# GH21012
index = date_range('2000-01-01 00:00:00', periods=periods, freq='min')
num = 0
for stamp in index:
assert index[num] == stamp
num += 1
assert num == len(index)
def test_misc_coverage(self):
rng = date_range('1/1/2000', periods=5)
result = rng.groupby(rng.day)
assert isinstance(list(result.values())[0][0], Timestamp)
idx = DatetimeIndex(['2000-01-03', '2000-01-01', '2000-01-02'])
assert not idx.equals(list(idx))
non_datetime = Index(list('abc'))
assert not idx.equals(list(non_datetime))
def test_string_index_series_name_converted(self):
# #1644
df = DataFrame(np.random.randn(10, 4),
index=date_range('1/1/2000', periods=10))
result = df.loc['1/3/2000']
assert result.name == df.index[2]
result = df.T['1/3/2000']
assert result.name == df.index[2]
def test_get_duplicates(self):
idx = DatetimeIndex(['2000-01-01', '2000-01-02', '2000-01-02',
'2000-01-03', '2000-01-03', '2000-01-04'])
with tm.assert_produces_warning(FutureWarning):
# Deprecated - see GH20239
result = idx.get_duplicates()
ex = DatetimeIndex(['2000-01-02', '2000-01-03'])
tm.assert_index_equal(result, ex)
def test_argmin_argmax(self):
idx = DatetimeIndex(['2000-01-04', '2000-01-01', '2000-01-02'])
assert idx.argmin() == 1
assert idx.argmax() == 0
def test_sort_values(self):
idx = DatetimeIndex(['2000-01-04', '2000-01-01', '2000-01-02'])
ordered = idx.sort_values()
assert ordered.is_monotonic
ordered = idx.sort_values(ascending=False)
assert ordered[::-1].is_monotonic
ordered, dexer = idx.sort_values(return_indexer=True)
assert ordered.is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([1, 2, 0], dtype=np.intp))
ordered, dexer = idx.sort_values(return_indexer=True, ascending=False)
assert ordered[::-1].is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([0, 2, 1], dtype=np.intp))
def test_map_bug_1677(self):
index = DatetimeIndex(['2012-04-25 09:30:00.393000'])
f = index.asof
result = index.map(f)
expected = Index([f(index[0])])
tm.assert_index_equal(result, expected)
def test_groupby_function_tuple_1677(self):
df = DataFrame(np.random.rand(100),
index=date_range("1/1/2000", periods=100))
monthly_group = df.groupby(lambda x: (x.year, x.month))
result = monthly_group.mean()
assert isinstance(result.index[0], tuple)
def test_append_numpy_bug_1681(self):
# another datetime64 bug
dr = date_range('2011/1/1', '2012/1/1', freq='W-FRI')
a = DataFrame()
c = DataFrame({'A': 'foo', 'B': dr}, index=dr)
result = a.append(c)
assert (result['B'] == dr).all()
def test_isin(self):
index = tm.makeDateIndex(4)
result = index.isin(index)
assert result.all()
result = index.isin(list(index))
assert result.all()
assert_almost_equal(index.isin([index[2], 5]),
np.array([False, False, True, False]))
def test_does_not_convert_mixed_integer(self):
df = tm.makeCustomDataframe(10, 10,
data_gen_f=lambda *args, **kwargs: randn(),
r_idx_type='i', c_idx_type='dt')
cols = df.columns.join(df.index, how='outer')
joined = cols.join(df.columns)
assert cols.dtype == np.dtype('O')
assert cols.dtype == joined.dtype
tm.assert_numpy_array_equal(cols.values, joined.values)
def test_join_self(self, join_type):
index = date_range('1/1/2000', periods=10)
joined = index.join(index, how=join_type)
assert index is joined
def assert_index_parameters(self, index):
assert index.freq == '40960N'
assert index.inferred_freq == '40960N'
def test_ns_index(self):
nsamples = 400
ns = int(1e9 / 24414)
dtstart = np.datetime64('2012-09-20T00:00:00')
dt = dtstart + np.arange(nsamples) * np.timedelta64(ns, 'ns')
freq = ns * offsets.Nano()
index = pd.DatetimeIndex(dt, freq=freq, name='time')
self.assert_index_parameters(index)
new_index = pd.DatetimeIndex(start=index[0], end=index[-1],
freq=index.freq)
self.assert_index_parameters(new_index)
def test_join_with_period_index(self, join_type):
df = tm.makeCustomDataframe(
10, 10, data_gen_f=lambda *args: np.random.randint(2),
c_idx_type='p', r_idx_type='dt')
s = df.iloc[:5, 0]
with tm.assert_raises_regex(ValueError,
'can only call with other '
'PeriodIndex-ed objects'):
df.columns.join(s.index, how=join_type)
def test_factorize(self):
idx1 = DatetimeIndex(['2014-01', '2014-01', '2014-02', '2014-02',
'2014-03', '2014-03'])
exp_arr = np.array([0, 0, 1, 1, 2, 2], dtype=np.intp)
exp_idx = DatetimeIndex(['2014-01', '2014-02', '2014-03'])
arr, idx = idx1.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
arr, idx = idx1.factorize(sort=True)
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
# tz must be preserved
idx1 = idx1.tz_localize('Asia/Tokyo')
exp_idx = exp_idx.tz_localize('Asia/Tokyo')
arr, idx = idx1.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
idx2 = pd.DatetimeIndex(['2014-03', '2014-03', '2014-02', '2014-01',
'2014-03', '2014-01'])
exp_arr = np.array([2, 2, 1, 0, 2, 0], dtype=np.intp)
exp_idx = DatetimeIndex(['2014-01', '2014-02', '2014-03'])
arr, idx = idx2.factorize(sort=True)
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
exp_arr = np.array([0, 0, 1, 2, 0, 2], dtype=np.intp)
exp_idx = DatetimeIndex(['2014-03', '2014-02', '2014-01'])
arr, idx = idx2.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
# freq must be preserved
idx3 = date_range('2000-01', periods=4, freq='M', tz='Asia/Tokyo')
exp_arr = np.array([0, 1, 2, 3], dtype=np.intp)
arr, idx = idx3.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, idx3)
def test_factorize_tz(self, tz_naive_fixture):
tz = tz_naive_fixture
# GH#13750
base = pd.date_range('2016-11-05', freq='H', periods=100, tz=tz)
idx = base.repeat(5)
exp_arr = np.arange(100, dtype=np.intp).repeat(5)
for obj in [idx, pd.Series(idx)]:
arr, res = obj.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(res, base)
def test_factorize_dst(self):
# GH 13750
idx = pd.date_range('2016-11-06', freq='H', periods=12,
tz='US/Eastern')
for obj in [idx, pd.Series(idx)]:
arr, res = obj.factorize()
tm.assert_numpy_array_equal(arr, np.arange(12, dtype=np.intp))
tm.assert_index_equal(res, idx)
idx = pd.date_range('2016-06-13', freq='H', periods=12,
tz='US/Eastern')
for obj in [idx, pd.Series(idx)]:
arr, res = obj.factorize()
tm.assert_numpy_array_equal(arr, np.arange(12, dtype=np.intp))
tm.assert_index_equal(res, idx)
@pytest.mark.parametrize('arr, expected', [
(pd.DatetimeIndex(['2017', '2017']), pd.DatetimeIndex(['2017'])),
(pd.DatetimeIndex(['2017', '2017'], tz='US/Eastern'),
pd.DatetimeIndex(['2017'], tz='US/Eastern')),
])
def test_unique(self, arr, expected):
result = arr.unique()
tm.assert_index_equal(result, expected)
# GH 21737
# Ensure the underlying data is consistent
assert result[0] == expected[0]
|
|
#!/usr/bin/python
#
# Copyright (c) 2019 Zim Kalinowski, (@zikalino)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_gallery
version_added: '2.9'
short_description: Manage Azure Shared Image Gallery instance.
description:
- 'Create, update and delete instance of Azure Shared Image Gallery (SIG).'
options:
resource_group:
description:
- The name of the resource group.
required: true
type: str
name:
description:
- >-
The name of the Shared Image Gallery.
Valid names consist of less than 80 alphanumeric characters, underscores and periods.
required: true
type: str
location:
description:
- Resource location
type: str
description:
description:
- >-
The description of this Shared Image Gallery resource. This property is
updatable.
type: str
state:
description:
- Assert the state of the Gallery.
- >-
Use C(present) to create or update an Gallery and C(absent) to delete
it.
default: present
type: str
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: Create or update a simple gallery.
azure_rm_gallery:
resource_group: myResourceGroup
name: myGallery1283
location: West US
description: This is the gallery description.
'''
RETURN = '''
id:
description:
- Resource Id
returned: always
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myGallery1283"
'''
import time
import json
import re
from ansible.module_utils.azure_rm_common_ext import AzureRMModuleBaseExt
from ansible.module_utils.azure_rm_common_rest import GenericRestClient
from copy import deepcopy
try:
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
class Actions:
NoAction, Create, Update, Delete = range(4)
class AzureRMGalleries(AzureRMModuleBaseExt):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
updatable=False,
disposition='resourceGroupName',
required=True
),
name=dict(
type='str',
updatable=False,
disposition='galleryName',
required=True
),
location=dict(
type='str',
updatable=False,
disposition='/'
),
description=dict(
type='str',
disposition='/properties/*'
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
)
)
self.resource_group = None
self.name = None
self.gallery = None
self.results = dict(changed=False)
self.mgmt_client = None
self.state = None
self.url = None
self.status_code = [200, 201, 202]
self.to_do = Actions.NoAction
self.body = {}
self.query_parameters = {}
self.query_parameters['api-version'] = '2019-07-01'
self.header_parameters = {}
self.header_parameters['Content-Type'] = 'application/json; charset=utf-8'
super(AzureRMGalleries, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()):
if hasattr(self, key):
setattr(self, key, kwargs[key])
elif kwargs[key] is not None:
self.body[key] = kwargs[key]
self.inflate_parameters(self.module_arg_spec, self.body, 0)
old_response = None
response = None
self.mgmt_client = self.get_mgmt_svc_client(GenericRestClient,
base_url=self._cloud_environment.endpoints.resource_manager)
resource_group = self.get_resource_group(self.resource_group)
if 'location' not in self.body:
self.body['location'] = resource_group.location
self.url = ('/subscriptions' +
'/{{ subscription_id }}' +
'/resourceGroups' +
'/{{ resource_group }}' +
'/providers' +
'/Microsoft.Compute' +
'/galleries' +
'/{{ gallery_name }}')
self.url = self.url.replace('{{ subscription_id }}', self.subscription_id)
self.url = self.url.replace('{{ resource_group }}', self.resource_group)
self.url = self.url.replace('{{ gallery_name }}', self.name)
old_response = self.get_resource()
if not old_response:
self.log("Gallery instance doesn't exist")
if self.state == 'absent':
self.log("Old instance didn't exist")
else:
self.to_do = Actions.Create
else:
self.log('Gallery instance already exists')
if self.state == 'absent':
self.to_do = Actions.Delete
else:
modifiers = {}
self.create_compare_modifiers(self.module_arg_spec, '', modifiers)
self.results['modifiers'] = modifiers
self.results['compare'] = []
if not self.default_compare(modifiers, self.body, old_response, '', self.results):
self.to_do = Actions.Update
self.body['properties'].pop('identifier', None)
if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):
self.log('Need to Create / Update the Gallery instance')
if self.check_mode:
self.results['changed'] = True
return self.results
response = self.create_update_resource()
# if not old_response:
self.results['changed'] = True
# else:
# self.results['changed'] = old_response.__ne__(response)
self.log('Creation / Update done')
elif self.to_do == Actions.Delete:
self.log('Gallery instance deleted')
self.results['changed'] = True
if self.check_mode:
return self.results
self.delete_resource()
# make sure instance is actually deleted, for some Azure resources, instance is hanging around
# for some time after deletion -- this should be really fixed in Azure
while self.get_resource():
time.sleep(20)
else:
self.log('Gallery instance unchanged')
self.results['changed'] = False
response = old_response
if response:
self.results["id"] = response["id"]
return self.results
def create_update_resource(self):
# self.log('Creating / Updating the Gallery instance {0}'.format(self.))
try:
response = self.mgmt_client.query(self.url,
'PUT',
self.query_parameters,
self.header_parameters,
self.body,
self.status_code,
600,
30)
except CloudError as exc:
self.log('Error attempting to create the Gallery instance.')
self.fail('Error creating the Gallery instance: {0}'.format(str(exc)))
try:
response = json.loads(response.text)
except Exception:
response = {'text': response.text}
return response
def delete_resource(self):
# self.log('Deleting the Gallery instance {0}'.format(self.))
try:
response = self.mgmt_client.query(self.url,
'DELETE',
self.query_parameters,
self.header_parameters,
None,
self.status_code,
600,
30)
except CloudError as e:
self.log('Error attempting to delete the Gallery instance.')
self.fail('Error deleting the Gallery instance: {0}'.format(str(e)))
return True
def get_resource(self):
# self.log('Checking if the Gallery instance {0} is present'.format(self.))
found = False
try:
response = self.mgmt_client.query(self.url,
'GET',
self.query_parameters,
self.header_parameters,
None,
self.status_code,
600,
30)
response = json.loads(response.text)
found = True
self.log("Response : {0}".format(response))
# self.log("AzureFirewall instance : {0} found".format(response.name))
except CloudError as e:
self.log('Did not find the AzureFirewall instance.')
if found is True:
return response
return False
def main():
AzureRMGalleries()
if __name__ == '__main__':
main()
|
|
# -*- coding: utf-8 -*-
from functools import partial
from json import dumps
from logging import getLogger
from time import sleep
from urllib import quote
from rfc6266 import build_header
from urlparse import urlparse, parse_qs
from base64 import b64encode
from cornice.resource import resource
from cornice.util import json_error
from couchdb.http import ResourceConflict
from openprocurement.api.models import Revision, get_now
from openprocurement.api.utils import (update_logging_context, context_unpack, get_revision_changes,
apply_data_patch, generate_id, DOCUMENT_BLACKLISTED_FIELDS,get_filename )
from openprocurement.planning.api.models import Plan
from openprocurement.planning.api.traversal import factory
from schematics.exceptions import ModelValidationError
from pkg_resources import get_distribution
PKG = get_distribution(__package__)
LOGGER = getLogger(PKG.project_name)
def generate_plan_id(ctime, db, server_id=''):
""" Generate ID for new plan in format "UA-P-YYYY-MM-DD-NNNNNN" + ["-server_id"]
YYYY - year, MM - month (start with 1), DD - day, NNNNNN - sequence number per 1 day
and save plans count per day in database document with _id = "planID" as { key, value } = { "2015-12-03": 2 }
:param ctime: system date-time
:param db: couchdb database object
:param server_id: server mark (for claster mode)
:return: planID in "UA-2015-05-08-000005"
"""
key = ctime.date().isoformat() # key (YYYY-MM-DD)
plan_id_doc = 'planID_' + server_id if server_id else 'planID' # document _id
index = 1
while True:
try:
plan_id = db.get(plan_id_doc, {'_id': plan_id_doc}) # find root document
index = plan_id.get(key, 1)
plan_id[key] = index + 1 # count of plans in db (+1 ?)
db.save(plan_id)
except ResourceConflict: # pragma: no cover
pass
except Exception: # pragma: no cover
sleep(1)
else:
break
return 'UA-P-{:04}-{:02}-{:02}-{:06}{}'.format(ctime.year, ctime.month, ctime.day, index,
server_id and '-' + server_id)
def plan_serialize(request, plan_data, fields):
plan = request.plan_from_data(plan_data, raise_error=False)
return dict([(i, j) for i, j in plan.serialize("view").items() if i in fields])
def save_plan(request):
""" Save plan object to database
:param request:
:return: True if Ok
"""
plan = request.validated['plan']
patch = get_revision_changes(plan.serialize("plain"), request.validated['plan_src'])
if patch:
plan.revisions.append(Revision({'author': request.authenticated_userid, 'changes': patch, 'rev': plan.rev}))
old_date_modified = plan.dateModified
plan.dateModified = get_now()
try:
plan.store(request.registry.db)
except ModelValidationError, e: # pragma: no cover
for i in e.message:
request.errors.add('body', i, e.message[i])
request.errors.status = 422
except Exception, e: # pragma: no cover
request.errors.add('body', 'data', str(e))
else:
LOGGER.info('Saved plan {}: dateModified {} -> {}'.format(plan.id,
old_date_modified and old_date_modified.isoformat(),
plan.dateModified.isoformat()),
extra=context_unpack(request, {'MESSAGE_ID': 'save_plan'}, {'PLAN_REV': plan.rev}))
return True
def apply_patch(request, data=None, save=True, src=None):
data = request.validated['data'] if data is None else data
patch = data and apply_data_patch(src or request.context.serialize(), data)
if patch:
request.context.import_data(patch)
if save:
return save_plan(request)
def error_handler(errors, request_params=True):
params = {
'ERROR_STATUS': errors.status
}
if request_params:
params['ROLE'] = str(errors.request.authenticated_role)
if errors.request.params:
params['PARAMS'] = str(dict(errors.request.params))
if errors.request.matchdict:
for x, j in errors.request.matchdict.items():
params[x.upper()] = j
LOGGER.info('Error on processing request "{}"'.format(dumps(errors, indent=4)),
extra=context_unpack(errors.request, {'MESSAGE_ID': 'error_handler'}, params))
return json_error(errors)
opresource = partial(resource, error_handler=error_handler, factory=factory)
class APIResource(object):
def __init__(self, request, context):
self.context = context
self.request = request
self.db = request.registry.db
self.server_id = request.registry.server_id
self.server = request.registry.couchdb_server
self.LOGGER = getLogger(type(self).__module__)
def set_logging_context(event):
request = event.request
params = {}
if 'plan' in request.validated:
params['PLAN_REV'] = request.validated['plan'].rev
params['PLANID'] = request.validated['plan'].planID
update_logging_context(request, params)
def extract_plan_adapter(request, plan_id):
db = request.registry.db
doc = db.get(plan_id)
if doc is None:
request.errors.add('url', 'plan_id', 'Not Found')
request.errors.status = 404
raise error_handler(request.errors)
return request.plan_from_data(doc)
def extract_plan(request):
plan_id = request.matchdict['plan_id']
return extract_plan_adapter(request, plan_id)
def plan_from_data(request, data, raise_error=True, create=True):
if create:
return Plan(data)
return Plan
def upload_file(request, blacklisted_fields=DOCUMENT_BLACKLISTED_FIELDS):
first_document = request.validated['documents'][0] if 'documents' in request.validated and request.validated['documents'] else None
if request.content_type == 'multipart/form-data':
data = request.validated['file']
filename = get_filename(data)
content_type = data.type
in_file = data.file
else:
filename = first_document.title
content_type = request.content_type
in_file = request.body_file
if hasattr(request.context, "documents"):
# upload new document
model = type(request.context).documents.model_class
else:
# update document
model = type(request.context)
document = model({'title': filename, 'format': content_type})
document.__parent__ = request.context
if 'document_id' in request.validated:
document.id = request.validated['document_id']
if first_document:
for attr_name in type(first_document)._fields:
if attr_name not in blacklisted_fields:
setattr(document, attr_name, getattr(first_document, attr_name))
key = generate_id()
document_route = request.matched_route.name.replace("collection_", "")
document_path = request.current_route_path(_route_name=document_route, document_id=document.id, _query={'download': key})
document.url = '/' + '/'.join(document_path.split('/')[3:])
conn = getattr(request.registry, 's3_connection', None)
if conn:
bucket = conn.get_bucket(request.registry.bucket_name)
filename = "{}/{}/{}".format(request.validated['plan_id'], document.id, key)
key = bucket.new_key(filename)
key.set_metadata('Content-Type', document.format)
key.set_metadata("Content-Disposition", build_header(document.title, filename_compat=quote(document.title.encode('utf-8'))))
key.set_contents_from_file(in_file)
key.set_acl('private')
else:
filename = "{}_{}".format(document.id, key)
request.validated['plan']['_attachments'][filename] = {
"content_type": document.format,
"data": b64encode(in_file.read())
}
update_logging_context(request, {'file_size': in_file.tell()})
return document
def update_file_content_type(request):
conn = getattr(request.registry, 's3_connection', None)
if conn:
document = request.validated['document']
key = parse_qs(urlparse(document.url).query).get('download').pop()
bucket = conn.get_bucket(request.registry.bucket_name)
filename = "{}/{}/{}".format(request.validated['plan_id'], document.id, key)
key = bucket.get_key(filename)
if key.content_type != document.format:
key.set_remote_metadata({'Content-Type': document.format}, {}, True)
def get_file(request):
plan_id = request.validated['plan_id']
document = request.validated['document']
key = request.params.get('download')
conn = getattr(request.registry, 's3_connection', None)
filename = "{}_{}".format(document.id, key)
if conn and filename not in request.validated['plan']['_attachments']:
filename = "{}/{}/{}".format(plan_id, document.id, key)
url = conn.generate_url(method='GET', bucket=request.registry.bucket_name, key=filename, expires_in=300)
request.response.content_type = document.format.encode('utf-8')
request.response.content_disposition = build_header(document.title, filename_compat=quote(document.title.encode('utf-8')))
request.response.status = '302 Moved Temporarily'
request.response.location = url
return url
else:
filename = "{}_{}".format(document.id, key)
data = request.registry.db.get_attachment(plan_id, filename)
if data:
request.response.content_type = document.format.encode('utf-8')
request.response.content_disposition = build_header(document.title, filename_compat=quote(document.title.encode('utf-8')))
request.response.body_file = data
return request.response
request.errors.add('url', 'download', 'Not Found')
request.errors.status = 404
|
|
from __future__ import print_function
import sys
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import os
import time
# Options for mode 'lower_level'
MODE = 'S-4mu_WigD'
label_size = 28
################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################
mpl.rc('font', family='serif', size=34, serif="Times New Roman")
#mpl.rcParams['text.usetex'] = True
#mpl.rcParams['text.latex.preamble'] = [r'\boldmath']
mpl.rcParams['legend.fontsize'] = "medium"
mpl.rc('savefig', format ="pdf")
mpl.rcParams['xtick.labelsize'] = label_size
mpl.rcParams['ytick.labelsize'] = label_size
mpl.rcParams['figure.figsize'] = 8, 6
mpl.rcParams['lines.linewidth'] = 3
def binomial_error(l1):
err_list = []
for item in l1:
if item==1. or item==0.: err_list.append(np.sqrt(100./101.*(1.-100./101.)/101.))
else: err_list.append(np.sqrt(item*(1.-item)/100.))
return err_list
################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################
# S - 4 mu WigD
################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################################
if MODE == 'S-4mu_WigD':
#param_list = [0.1,0.08,0.06,0.04,0.02,0.0]
param_list = [0.1,0.08,0.06,0.04]
param_list = [0.0,0.01,0.02,0.03,0.04,0.05,0.06,0.07,0.08,0.09,0.1]
param_list = [0.05,0.1,0.3,0.5,0.7,1.]
ml_classifiers = ['nn','bdt']
ml_classifiers_colors = ['green','magenta','cyan']
ml_classifiers_bin = 5
chi2_color = 'red'
chi2_splits = [1,2,3,4,5,6,7,8,9,10]
#chi2_splits = [8]
ml_folder_name = "S-4mu_WigD/evaluation_S-VV-4mu_WigD_updated10"
chi2_folder_name = "S-4mu_WigD"
#chi2_folder_name = "event_shapes_lower_level_without_Mult"
ml_file_name = "{1}_S-VV-4mu_WigD_updated10_{0}_syst_0_01__chi2scoring_5_p_values"
chi2_file_name = "S-4mu_WigD_updated10_{0}D_chi2_{1}_splits_p_values"
#chi2_file_name = "event_shapes_lower_level_syst_0_01_attempt4_without_Mult__{0}D_chi2_{1}_splits_p_values"
chi2_1D_file_name = "S-4mu_WigD_updated10_1D_{0}D_chi2_{1}_splits_p_values"
chi2_m1D_file_name = "S-4mu_WigD_updated10_m1D_{0}D_chi2_{1}_splits_p_values"
title = "S-4mu"
name = "S-4mu"
CL = 0.95
ml_classifiers_dict={}
chi2_splits_dict={}
chi2_1D_splits_dict={}
chi2_m1D_splits_dict={}
#xwidth = [0.5]*len(param_list)
xwidth = np.subtract(param_list[1:],param_list[:-1])/2.
xwidth_left = np.append(xwidth[0] , xwidth)
xwidth_right = np.append(xwidth,xwidth[-1])
print("xwidth : ", xwidth)
fig = plt.figure()
ax = fig.add_axes([0.2,0.15,0.75,0.8])
if False:
for ml_classifier_index, ml_classifier in enumerate(ml_classifiers):
ml_classifiers_dict[ml_classifier]= []
for param in param_list:
p_values = np.loadtxt(os.environ['learningml']+"/GoF/optimisation_and_evaluation/"+ml_folder_name+"/"+ml_classifier+"/"+ml_file_name.format(param,ml_classifier,ml_classifiers_bin)).tolist()
p_values_in_CL = sum(i < (1-CL) for i in p_values)
ml_classifiers_dict[ml_classifier].append(p_values_in_CL)
ml_classifiers_dict[ml_classifier]= np.divide(ml_classifiers_dict[ml_classifier],100.)
ax.errorbar(param_list,ml_classifiers_dict['nn'], yerr=binomial_error(ml_classifiers_dict['nn']), linestyle='-', marker='s', markeredgewidth=0.0, markersize=12, color=ml_classifiers_colors[0], label=r'$ANN$',clip_on=False)
print("bdt : ", ml_classifiers_dict['bdt'])
ax.errorbar(param_list,ml_classifiers_dict['bdt'], yerr=binomial_error(ml_classifiers_dict['bdt']), linestyle='-', marker='o', markeredgewidth=0.0, markersize=12, color=ml_classifiers_colors[1], label=r'$BDT$', clip_on=False)
for chi2_split_index, chi2_split in enumerate(chi2_splits):
chi2_splits_dict[str(chi2_split)]=[]
chi2_best = []
for param in param_list:
chi2_best_dim = []
for chi2_split_index, chi2_split in enumerate(chi2_splits):
p_values = np.loadtxt(os.environ['learningml']+"/GoF/chi2/"+chi2_folder_name+"/"+chi2_file_name.format(param,chi2_split)).tolist()
p_values_in_CL = sum(i < (1-CL) for i in p_values)
temp = float(p_values_in_CL) /100.
chi2_splits_dict[str(chi2_split)].append(temp)
chi2_best_dim.append(temp)
temp_best = np.max(chi2_best_dim)
#print(str(dim)+"D chi2_best_dim : ", chi2_best_dim)
#print(str(dim)+"D temp_best : ",np.max(temp_best))
chi2_best.append(temp_best)
#print("chi2_best : ",chi2_best)
for chi2_split_index, chi2_split in enumerate(chi2_splits):
chi2_1D_splits_dict[str(chi2_split)]=[]
chi2_1D_best = []
for param in param_list:
chi2_1D_best_dim = []
for chi2_split_index, chi2_split in enumerate(chi2_splits):
p_values = np.loadtxt(os.environ['learningml']+"/GoF/chi2/"+chi2_folder_name+"/"+chi2_1D_file_name.format(param,chi2_split)).tolist()
p_values_in_CL = sum(i < (1-CL) for i in p_values)
temp = float(p_values_in_CL) /100.
chi2_1D_splits_dict[str(chi2_split)].append(temp)
chi2_1D_best_dim.append(temp)
temp_best = np.max(chi2_1D_best_dim)
#print(str(dim)+"D chi2_best_dim : ", chi2_best_dim)
#print(str(dim)+"D temp_best : ",np.max(temp_best))
chi2_1D_best.append(temp_best)
#print("chi2_best : ",chi2_best)
for chi2_split_index, chi2_split in enumerate(chi2_splits):
chi2_m1D_splits_dict[str(chi2_split)]=[]
chi2_m1D_best = []
for param in param_list:
chi2_m1D_best_dim = []
for chi2_split_index, chi2_split in enumerate(chi2_splits):
p_values = np.loadtxt(os.environ['learningml']+"/GoF/chi2/"+chi2_folder_name+"/"+chi2_m1D_file_name.format(param,chi2_split)).tolist()
p_values_in_CL = sum(i < (1-CL) for i in p_values)
temp = float(p_values_in_CL) /100.
chi2_m1D_splits_dict[str(chi2_split)].append(temp)
chi2_m1D_best_dim.append(temp)
temp_best = np.max(chi2_m1D_best_dim)
#print(str(dim)+"D chi2_best_dim : ", chi2_best_dim)
#print(str(dim)+"D temp_best : ",np.max(temp_best))
chi2_m1D_best.append(temp_best)
#print("chi2_best : ",chi2_best)
print("param_list : ",param_list)
print("chi2_best : ", chi2_best)
print("chi2_splits_dict : ", chi2_splits_dict)
ax.errorbar(param_list,chi2_best, yerr=binomial_error(chi2_best), linestyle='--', marker='$\chi$', markeredgecolor='none', markersize=18, color='black', label=r'$\chi^2 w/\_mass$', clip_on=False)
ax.errorbar(param_list,chi2_1D_best, yerr=binomial_error(chi2_1D_best), linestyle='--', marker='$\chi$', markeredgecolor='none', markersize=18, color='blue', label=r'$\chi^2 only\_mass$', clip_on=False)
ax.errorbar(param_list,chi2_m1D_best, yerr=binomial_error(chi2_m1D_best), linestyle='--', marker='$\chi$', markeredgecolor='none', markersize=18, color='red', label=r'$\chi^2 w/o\_mass$', clip_on=False)
print("ml_classifiers_dict : ",ml_classifiers_dict)
print("chi2_best : ", chi2_best)
#ax.plot((0.1365,0.1365),(0.,1.),c="grey",linestyle="--")
ax.set_xlim([0.,1.])
#ax.set_xlim([0.129,0.1405])
ax.set_ylim([0.,1.])
ax.set_xlabel(r"$p_{signal}$")
ax.set_ylabel("Fraction rejected")
plt.legend(frameon=False, numpoints=1)
#a, b, c = [0.130,0.133], [0.1365],[0.14]
#ax.set_xticks(a+b+c)
#xx, locs = plt.xticks()
#ll = ['%.3f' % y for y in a] + ['%.4f' % y for y in b] + ['%.3f' % y for y in c]
#plt.xticks(xx, ll)
#ax.legend(loc='lower left', frameon=False, numpoints=1)
fig_leg = plt.figure(figsize=(8,2.7))
ax_leg = fig_leg.add_axes([0.0,0.0,1.0,1.0])
plt.tick_params(axis='x',which='both',bottom='off', top='off', labelbottom='off')
plt.tick_params(axis='y',which='both',bottom='off', top='off', labelbottom='off')
ax_leg.yaxis.set_ticks_position('none')
ax_leg.set_frame_on(False)
plt.figlegend(*ax.get_legend_handles_labels(), loc = 'upper left',frameon=False, numpoints=1,ncol=2)
fig_leg.savefig("S-4mu_WigD_updated10_analysis_legend.pdf")
#fig_name=name+"_alphaSvalue_analysis"
fig_name="S-4mu_WigD_updated10_analysis"
fig.savefig(fig_name+".pdf")
fig.savefig(fig_name+"_"+time.strftime("%b_%d_%Y")+".pdf")
print("Saved the figure as" , fig_name+".pdf")
|
|
# Copyright (c) 2006-2013 Regents of the University of Minnesota.
# For licensing terms, see the file LICENSE.
import hashlib
from lxml import etree
import conf
import g
from grax.access_level import Access_Level
from gwis.query_filters import Query_Filters
from item import item_base
from item import item_versioned
from item import item_user_watching
from item import link_value
from item.util import revision
from item.util.item_type import Item_Type
from util_ import gml
from util_ import misc
__all__ = ['One', 'Many']
log = g.log.getLogger('geofeature')
class One(item_user_watching.One):
item_type_id = Item_Type.GEOFEATURE
item_type_table = 'geofeature'
item_gwis_abbrev = 'ft'
# This is a little coupled: all this class's derived classes' item_types.
child_item_types = (
Item_Type.GEOFEATURE,
Item_Type.BYWAY,
Item_Type.REGION,
Item_Type.TERRAIN,
Item_Type.WAYPOINT,
Item_Type.ROUTE,
Item_Type.TRACK,
# Item_Type.BRANCH,
)
item_save_order = 3
# BUG nnnn: routed resources: only make item_base class attrs for those
# values we care about. and do we need geometry for route_step?
local_defns = [
# py/psql name, deft, send?, pkey?, pytyp, reqv, abbrev
('geometry', None, False, False, str,),
('geometry_wkt', None, False, None, str,),
('geometry_svg', None, False, None, str,),
# BUG nnnn: Restrict z to 130 to 138, inclusive, for byways, since
# tilecache does the same amount of work for each zoom level
# (so maybe restrict z to just, e.g., five values instead).
# RENAME: 'z' is hard to search usages on, since a) it's a single char,
# and b) it gets confused with other things, like 'zoom level'.
# 'z' should be called 'bridge_level'.
('z', None, True, False, int, 1),
('geofeature_layer_id', None, True, False, int, 1, 'gflid'),
('st_line_locate_point', None, False), # for route finder
('annotation_cnt', None, True, None, int, None, 'nann'),
('discussion_cnt', None, True, None, int, None, 'ndis'),
# The is_disconnected bool is just for byway, but it's here because the
# db column is not null.
('is_disconnected', False, False, True, bool, None, 'dcnn'),
# EXPLAIN: split_from_stack_id is not needed to be persisted? We just
# need it to clone the split-into byways and their link_values
# from the split-from byway, right? But, whatever, it's stored
# in the table nonetheless.
('split_from_stack_id', None, False, False, int, 0, 'splt'),
]
attr_defns = item_user_watching.One.attr_defns + local_defns
psql_defns = item_user_watching.One.psql_defns
gwis_defns = item_base.One.attr_defns_reduce_for_gwis(attr_defns)
#
private_defns = item_user_watching.One.psql_defns + local_defns
#
cols_copy_nok = item_user_watching.One.cols_copy_nok + (
[
# copy_from is used for split-from and split-into byways. For the
# latter, while we're just deleting split-from byways, the geometry is
# technically the same, so we copy it; so the latter, split-into byways
# will inherit the split-from byway's geometry but we'll soon replace
# it.
# NO: 'geometry',
# NO: 'geometry_wkt',
# NO: 'geometry_svg',
# NO: 'z',
'st_line_locate_point',
'annotation_cnt',
'discussion_cnt',
])
__slots__ = [
'geometry_changed',
] + [attr_defn[0] for attr_defn in local_defns]
# *** Constructor
def __init__(self, qb=None, row=None, req=None, copy_from=None):
item_user_watching.One.__init__(self, qb, row, req, copy_from)
# *** Built-in Function definitions
def __str_verbose__(self):
return ('%s, attrs [%s], tags [%s], lvals [%s]'
% (item_user_watching.One.__str_verbose__(self),
self.attrs,
self.tagged,
getattr(self, 'link_values', None)
))
# *** GML/XML Processing
#
# Note that elem is the outer container, to which item_base will append new.
def append_gml(self, elem, need_digest, new=None, extra_attrs=None,
include_input_only_attrs=False):
# We're called by item_base.Many for each record it found. It created
# a master XML document, and it wants us to create a child document to
# contain the geofeature.
g.assurt(not new)
if new is None:
# NOTE This code CxPx from item_versioned.append_gml
g.assurt(self.item_type_id != '')
new = etree.Element(Item_Type.id_to_str(self.item_type_id))
if need_digest:
# NOTE The GML elem 'dg' is the _d_igest for _g_eometry
# EXPLAIN: How is the digest used?
# FIXME: Can I use any of the geometries?? Probably not...
g.assurt(self.geometry_svg)
# MAYBE: Depending on the geometry representation, it's a diff digest.
geometry = self.geometry_svg or self.geometry_wkt
# or self.geometry_raw
misc.xa_set(new, 'dg', hashlib.md5(geometry).hexdigest())
if self.attrs:
attrs = etree.Element('attrs')
for attr_name, attr_val in self.attrs.iteritems():
attr = etree.Element('a')
misc.xa_set(attr, 'k', attr_name)
# FIXME: Does this value need to be encoded?
# Test with </t> in a tag name. or <t>
misc.xa_set(attr, 'v', attr_val)
attrs.append(attr)
new.append(attrs)
if self.tagged:
# We can't just use a comma-separated list because some tags include
# commas. It's easiest just to make another subdocument.
# NO: misc.xa_set(new, 'tags', ', '.join(self.tagged))
tags = etree.Element('tags')
for tag_name in self.tagged:
tag = etree.Element('t')
tag.text = tag_name
tags.append(tag)
new.append(tags)
self.append_gml_geometry(new)
return item_user_watching.One.append_gml(self, elem, need_digest, new,
extra_attrs, include_input_only_attrs)
#
def append_gml_geometry(self, new):
# NOTE: Derived classes do not call this fcn; they only override it.
# This is called when a client calls checkout with the intermediate item
# type, Geofeature, with some geofeature's stack ID and wants to lazy-
# load remaining item details, like annotations and discussions counts.
# So client already has item geometry and/or doesn't care about geometry.
#
# DEVS: Here's an interesting cmd.:
# ./ccp.py -r -t geofeature -I 1400013 -G \
# -f include_item_stack 1 \
# -f do_load_lval_counts 1 \
# -f dont_load_feat_attcs 1
pass
#
def from_gml(self, qb, elem):
item_user_watching.One.from_gml(self, qb, elem)
# FIXME: Derived classes should verify self.geofeature_layer_id.
#
def set_geometry_wkt(self, geometry_wkt, is_changed=None):
# NOTE: There's one other place where geometry_wkt is set: when we
# consume a 'row' on __init__. Search for the SQL using:
# ST_AsSVG ... AS geometry_svg and ST_AsText ... AS geometry_wkt.
self.geometry_wkt = geometry_wkt
# If we read the geometry from GML, we don't know if it's changed or not
# (we have to read from the database first); but if the geometry is set
# locally, we can let the caller tell us so.
if is_changed is not None:
self.geometry_changed = is_changed
else:
# Set to None so we know we don't know.
self.geometry_changed = None
# *** Validating Saving
#
def validize(self, qb, is_new_item, dirty_reason, ref_item):
item_user_watching.One.validize(self, qb, is_new_item, dirty_reason,
ref_item)
self.validize_geom(qb, is_new_item, ref_item)
#
def validize_geom(self, qb, is_new_item, ref_item):
# The byway class needs to know if the geometry changes so it can decide
# whether to update node_byway and node_endpoint.
if is_new_item:
g.assurt(self.geometry or self.geometry_wkt)
try:
g.assurt(self.geometry_changed is not False)
except AttributeError:
pass
self.geometry_changed = True
else:
g.assurt(self.geometry_wkt)
try:
self.geometry_changed
except AttributeError:
# This happens when cloning an item prior to marking it deleted.
g.assurt(ref_item is not None)
g.assurt(self.geometry == ref_item.geometry)
g.assurt(self.geometry_wkt == ref_item.geometry_wkt)
self.geometry_changed = False
if self.geometry_changed is None:
g.assurt(not qb.db.dont_fetchall)
if self.branch_id == ref_item.branch_id:
# 2012.08.14: The item hasn't had version_finalize_and_increment
# called yet.
g.assurt(self.version == ref_item.version)
# MAYBE: Do we always prefix the SRID like we should?
# SRID=26915;POINT(468503.87 4964887.96)
# If not, postgis will complain.
# MAYBE: Put this in geometry.py? See also: db_glue.py and
# geofeature.py.
self_geom = ('SRID=%s;%s'
% (conf.default_srid, self.geometry_wkt,))
rows = qb.db.sql(
"SELECT ST_GeomFromEWKT(%s) = %s AS is_equal",
(self_geom, ref_item.geometry,))
g.assurt(len(rows) == 1)
self.geometry_changed = not rows[0]['is_equal']
else:
self.geometry_changed = False
log.verbose('validize_geom: geom_changed: %s'
% (self.geometry_changed,))
# *** Saving to the Database
#
def load_all_link_values(self, qb):
# The base class shouldn't import link_value, so send it one, er, many.
links = link_value.Many()
self.load_all_link_values_(qb, links, lhs=False, rhs=True, heavywt=True)
#
def save(self, qb, rid):
item_user_watching.One.save(self, qb, rid)
self.geometry_changed = False
#
def save_core(self, qb):
g.assurt(self.z >= 0)
item_user_watching.One.save_core(self, qb)
self.save_insert(qb, One.item_type_table, One.private_defns)
#
def save_insert(self, qb, table, psql_defns, do_update=False):
# If self.geometry is set, it's the raw geometry loaded from the
# database. Otherwise, we've got WKT geometry.
# NOTE: We always preference the WKT geometry! We have code to manipulate
# (parse, play around with, etc.) the WKT format, bot not so much the raw
# PostGIS format (though [lb] thinks there's probably a way, but it
# probably just converts the hexadecimal format to WKT... I mean, it's
# not like GDAL makes working with raw geometry easy, does it?).
if self.geometry_wkt:
if not self.geometry_wkt.startswith('SRID='):
# Always preferences the WKT format, since that's what we edit.
self.geometry = 'SRID=%s;%s' % (conf.default_srid,
self.geometry_wkt,)
else:
self.geometry = self.geometry_wkt
else:
# else, self.geometry is set from an existing database record and
# we're just copying that.
# There's a not-null constraint on geometry so might as well check
# here, too.
if not do_update:
g.assurt(self.geometry)
# Insert to the database.
item_user_watching.One.save_insert(self, qb, table, psql_defns,
do_update)
#
if self.geometry_wkt:
self.geometry = None
#
def save_update(self, qb):
g.assurt(False) # Not impl. for geofeature.
item_user_watching.One.save_update(self, qb)
self.save_insert(qb, One.item_type_table, One.private_defns,
do_update=True)
# ***
#
def diff_compare(self, other):
different = item_user_watching.One.diff_compare(self, other)
if not different:
if ((self.attrs != other.attrs)
or (self.tagged != other.tagged)):
different = True
return different
# ***
#
def ensure_zed(self):
# Derived classes should override this.
if not self.z:
self.z = self.gfl_types.Z_DEFAULT
#
@staticmethod
def as_insert_expression(qb, item):
item.ensure_zed()
insert_expr = (
"(%d, %d, %d, %d, %d, %d, %s, %s, '%s'::GEOMETRY)"
% (item.system_id,
#? qb.branch_hier[0][0],
# or:
item.branch_id,
item.stack_id,
item.version,
item.geofeature_layer_id,
#item.control_of_access,
item.z,
#item.one_way,
item.split_from_stack_id or "NULL",
#item.beg_node_id,
#item.fin_node_id,
item.is_disconnected or "FALSE",
#item.geometry,
item.geometry_wkt,
))
return insert_expr
# ***
# ***
class Many(item_user_watching.Many):
one_class = One
# Modify the SQL clauses for getting everything about an item
sql_clauses_cols_all = item_user_watching.Many.sql_clauses_cols_all.clone()
sql_clauses_cols_all.inner.shared += (
"""
, gf.geometry
, gf.z
, gf.geofeature_layer_id
""")
# EXPLAIN/MAYBE: Why are we always joining geofeature_layer?
sql_clauses_cols_all.inner.join += (
"""
JOIN geofeature AS gf
ON (gia.item_id = gf.system_id)
JOIN geofeature_layer AS gfl
ON (gf.geofeature_layer_id = gfl.id)
""")
sql_clauses_cols_all.outer.shared += (
"""
, group_item.z
, group_item.geofeature_layer_id
""")
# We wait to add the geometry columns until we've filtered by branch_id,
# etc., so we don't call Postgis a lot on data we don't care about.
g.assurt(not sql_clauses_cols_all.outer.enabled)
sql_clauses_cols_all.outer.enabled = True
sql_clauses_cols_all.outer.geometry_needed = True
# No one should enable the outer select unless they have to, since it has
# the potential to slow down the query significantly (depending on
# pageination, etc.).
g.assurt(not sql_clauses_cols_all.outer.group_by_enable)
sql_clauses_cols_all.outer.group_by += (
"""
, group_item.geometry
""")
# Modify the SQL clauses for getting the names of items
# NOTE Cloning the SQL for getting just an item's name (so we'll get
# just its name and also its geometry)
sql_clauses_cols_geom = (
item_user_watching.Many.sql_clauses_cols_name.clone())
sql_clauses_cols_geom.inner.shared += (
"""
, gf.geometry
""")
sql_clauses_cols_geom.inner.join += (
"""
JOIN geofeature AS gf
ON (gia.item_id = gf.system_id)
""")
# Note that we're only setting the outer clause "just in case."
g.assurt(not sql_clauses_cols_geom.outer.enabled)
sql_clauses_cols_geom.outer.shared += (
"""
, group_item.geometry
""")
# *** Constructor
__slots__ = ()
def __init__(self):
item_user_watching.Many.__init__(self)
# *** Query Builder routines
# Here we convert the binary geometry from the database into an SVG object
# if it's being sent to the client, or a WKT object if we want to use it
# internally.
# EXPLAIN: Why do we scale the geometry?
def sql_outer_select_extra(self, qb):
extra_select = item_user_watching.Many.sql_outer_select_extra(self, qb)
if qb.sql_clauses.outer.enabled and qb.sql_clauses.outer.geometry_needed:
if not qb.filters.skip_geometry_raw:
extra_select += (
"""
, group_item.geometry AS geometry
""")
if not qb.filters.skip_geometry_svg:
extra_select += (
"""
, ST_AsSVG(ST_Scale(group_item.geometry, 1, -1, 1), 0, %d)
AS geometry_svg
""" % (conf.db_fetch_precision,))
if not qb.filters.make_geometry_ewkt:
# ST_AsText doesn't include SRID=, which we want, since you
# cannot insert geometry back into the db with srid information.
# , ST_AsText(group_item.geometry) AS geometry_wkt
extra_select += (
"""
, ST_AsEWKT(group_item.geometry) AS geometry_wkt
""")
elif not qb.filters.skip_geometry_wkt:
extra_select += (
"""
, ST_AsTEXT(group_item.geometry) AS geometry_wkt
""")
return extra_select
# Called by revision_get
def sql_geometry_by_item_name(self, qb, item_name):
g.assurt(False) # This fcn. not called.
qb.sql_clauses = self.sql_clauses_cols_geom.clone()
# FIXME Should this be LIKE or ~?
qb.sql_clauses.where += (
"AND gia.name LIKE %s" % qb.db.quoted((item_name,)))
qb.use_filters_and_viewport = False # FIXME: Is this right?
return self.search_get_sql(qb)
# FIXME: Instead of LIKE, use tilde-operator ~ ?
# Called by revision_get
def sql_geometry_by_items_watched(self, qb):
g.assurt(False) # This fcn. not called.
g.assurt(qb.username != conf.anonymous_username)
qb.sql_clauses = self.sql_clauses_cols_geom.clone()
qb.sql_clauses.join += (
"""
FIXME: deprecated:
JOIN item_watcher AS iw
ON (gia.stack_id = iw.stack_id
AND gia.branch_id = iw.branch_id
AND u.username = iw.username)
""")
# AND iw.enable_email = TRUE)
qb.sql_clauses.where += "AND iw.enable_email = TRUE"
qb.use_filters_and_viewport = False # FIXME: Is this right?
return self.search_get_sql(qb)
#
def sql_apply_query_filters(self, qb, where_clause="", conjunction=""):
g.assurt((not conjunction) or (conjunction == "AND"))
return item_user_watching.Many.sql_apply_query_filters(
self, qb, where_clause, conjunction)
#
def sql_apply_query_viewport(self, qb, geo_table_name=None):
where_c = item_user_watching.Many.sql_apply_query_viewport(
self, qb, "gf")
return where_c
# ***
#
def search_for_items_clever(self, *args, **kwargs):
# From ccp.py and checkout.py, we're called without a dont_fetchall.
# We optimize the search on their behalf, getting link_values for the
# geofeatures. This saves time in the long run.
qb = self.query_builderer(*args, **kwargs)
if ((qb.filters.dont_load_feat_attcs)
and (not qb.filters.do_load_lval_counts)):
item_user_watching.Many.search_for_items_clever(self, qb)
else:
g.assurt(not qb.db.dont_fetchall)
# don't need: qb.sql_clauses = self.sql_clauses_cols_all.clone()
if (isinstance(qb.revision, revision.Current)
or isinstance(qb.revision, revision.Historic)):
qb.item_mgr.load_feats_and_attcs(qb, self,
feat_search_fcn='search_for_items',
processing_fcn=self.search_get_items_add_item_cb,
prog_log=None, heavyweight=False, fetch_size=0,
keep_running=None, diff_group=None)
elif isinstance(qb.revision, revision.Updated):
# This is only allowed via ccp.py, for testing.
g.assurt(qb.request_is_local and qb.request_is_script)
qb.item_mgr.update_feats_and_attcs(qb, self,
feat_search_fcn='search_for_items',
processing_fcn=self.search_get_items_add_item_cb,
prog_log=None, heavyweight=False, fetch_size=0,
keep_running=None)
elif isinstance(qb.revision, revision.Diff):
self.search_for_items_diff(qb)
# I don't think we should definalize, in case the caller wants to use the
# query_builder again with the same filters.
#qb.definalize()
qb.db.curs_recycle()
#
def search_for_items_diff_search(self, qb, diff_group):
qb.item_mgr.load_feats_and_attcs(qb, self,
feat_search_fcn='search_for_items',
processing_fcn=self.search_get_items_by_group_consume,
prog_log=None, heavyweight=False, fetch_size=0,
keep_running=None, diff_group=diff_group)
#
def search_get_item_counterparts_search(self, qb, diff_group):
qb.item_mgr.load_feats_and_attcs(qb, self,
feat_search_fcn='search_for_items',
processing_fcn=self.search_get_item_counterparts_consume,
prog_log=None, heavyweight=False, fetch_size=0,
keep_running=None, diff_group=diff_group)
# ***
# For the route finder, to geocode endpoints.
def search_by_distance(self, qb, point_sql, radius):
g.assurt(not qb.sql_clauses)
g.assurt(qb.finalized) # Weird that 'clauses isn't.
g.assurt(not qb.db.dont_fetchall)
# finalize() has been called, by qb doesn't have any geom filters;
# rather, we're about to add one to the where clause.
g.assurt(not qb.confirm_leafiness)
if len(qb.branch_hier) > 1:
qb.confirm_leafiness = True
# We want to find the nearest geofeature to another geofeature (for the
# route finder, when we geocode, we're looking for the nearest byway to
# a point).
#
# But we can't just call search_get_items: if we're on a leafy branch, we
# need to let item_user_access confirm_leafiness, so we can't order-by
# closest distance until after searching for items.
self.sql_clauses_cols_setup(qb)
if radius is not None:
# Pre-Postgis vSomething: " AND gf.geometry && ST_Expand(%s, %g) "
qb.sql_clauses.inner.where += (
"""
AND ST_DWithin(gf.geometry, %s, %g)
AND gf.is_disconnected IS FALSE
""" % (point_sql, radius,))
g.assurt(qb.sql_clauses.outer.enabled)
qb.sql_clauses.outer.select += (
"""
, ST_line_locate_point(group_item.geometry, %s)
""" % (point_sql,))
g.assurt(not qb.sql_clauses.outer.order_by_enable)
qb.sql_clauses.outer.order_by_enable = True
g.assurt(not qb.sql_clauses.outer.order_by)
qb.sql_clauses.outer.order_by = (
"""
ST_Distance(group_item.geometry, %s) ASC
""" % (point_sql,))
self.search_get_items(qb)
qb.sql_clauses = None
# ***
#
@staticmethod
def bulk_insert_rows(qb, gf_rows_to_insert):
g.assurt(qb.request_is_local)
g.assurt(qb.request_is_script)
g.assurt(qb.cp_maint_lock_owner or ('revision' in qb.db.locked_tables))
if gf_rows_to_insert:
insert_sql = (
"""
INSERT INTO %s.%s (
system_id
, branch_id
, stack_id
, version
, geofeature_layer_id
--, control_of_access
, z
--, one_way
, split_from_stack_id
--, beg_node_id
--, fin_node_id
, is_disconnected
, geometry
) VALUES
%s
""" % (conf.instance_name,
One.item_type_table,
','.join(gf_rows_to_insert),))
qb.db.sql(insert_sql)
# ***
# ***
|
|
'''
Test cases for class AnnotateCommand
The construction of the test case are driven by the fact that the target cigar
only has three types of regions: M(M_0), D(D_0), and I(I_0). For a region mapped
to a target genome, its start and end position will always be in M_0 or I_0,
because only M_0 and I_0 have target positions. Thus, D_0 will be sandwiched by
M_0 and I_0.
There nine types of sandwiches:
M, MDM, MDI, (M)I, (M)IDM, (M)IDI, (D)I, (D)IDM, (D)IDI
A read can starts at the beginning or the middle of a M/I region, while
it can ends at the middle or end of a M/I region.
The tests will generally verify: the new cigar, the new start and end in
reference coordinate, the number of snps, insertions, deletions of the target
being observed.
Created on Oct 3, 2012
@author: Shunping Huang
'''
import unittest
import StringIO
import tempfile
import os
import pysam
from lapels import annotator as annot
from lapels import cigarutils
from modtools import mod
polish = lambda x: cigarutils.toString(cigarutils.simplify(x))
class Read:
'''Class for simulating reads from a bam file'''
def __init__(self, start, end, cigar=None, qlen=None):
self.qname = 'unknown'
self.pos = start
self.aend = end #one base after the actual end
self.tags = dict()
if cigar is None:
self.cigar = [(0, self.aend - self.pos)]
else:
self.cigar = cigar
if qlen is None:
self.qlen = 0
for op,length in self.cigar:
if op == 0 or op == 7 or op == 8 or op == 1:
self.qlen += length
else:
self.qlen = qlen
class TestGetReadOffset(unittest.TestCase):
'''Test class for getReadOffset() method '''
def setUp(self):
pass
def test1(self):
r = Read(10, 50, [(0,10),(1,5),(0,10),(2,10),(0,10)])
self.assertRaisesRegexp(ValueError, 'underflows', annot.getReadOffset, r, 1)
self.assertEquals(annot.getReadOffset(r, 10), 0)
self.assertEquals(annot.getReadOffset(r, 19), 9)
self.assertEquals(annot.getReadOffset(r, 20), 15)
self.assertEquals(annot.getReadOffset(r, 29), 24)
self.assertRaisesRegexp(ValueError, 'deletion', annot.getReadOffset, r, 30)
self.assertRaisesRegexp(ValueError, 'deletion', annot.getReadOffset, r, 39)
self.assertEquals(annot.getReadOffset(r, 40), 25)
self.assertEquals(annot.getReadOffset(r, 49), 34)
self.assertRaisesRegexp(ValueError, 'overflows', annot.getReadOffset, r, 50)
def test2(self):
# qlen is set wrongly on purpose
r = Read(10, 50, [(0,10),(1,5),(0,10),(2,10),(0,10)], 30)
self.assertRaisesRegexp(ValueError, 'underflows', annot.getReadOffset, r, 1)
self.assertEquals(annot.getReadOffset(r, 10), 0)
self.assertEquals(annot.getReadOffset(r, 19), 9)
self.assertEquals(annot.getReadOffset(r, 20), 15)
self.assertEquals(annot.getReadOffset(r, 29), 24)
self.assertRaisesRegexp(ValueError, 'deletion', annot.getReadOffset, r, 30)
self.assertRaisesRegexp(ValueError, 'deletion', annot.getReadOffset, r, 39)
self.assertEquals(annot.getReadOffset(r, 40), 25)
self.assertEquals(annot.getReadOffset(r, 44), 29)
self.assertRaisesRegexp(ValueError, 'conflict', annot.getReadOffset, r, 45)
self.assertRaisesRegexp(ValueError, 'conflict', annot.getReadOffset, r, 49)
self.assertRaisesRegexp(ValueError, 'conflict', annot.getReadOffset, r, 50)
class TestAnnotator(unittest.TestCase):
''' Test class for Annotator '''
def setUp(self):
annot.TESTING = 1
annot.VERBOSITY = 1
def batchTestHelper(self, modFile, pool, refLens):
tmpName = tempfile.mkstemp('.tsv')[1]
tmpfp = open(tmpName, 'wb')
for line in modFile:
tmpfp.write(line)
tmpfp.close()
pysam.tabix_index(tmpName, force=True, seq_col=1, start_col=2, end_col=2,
meta_char='#', zerobased=True)
tmpName += '.gz'
modFile.close()
self.chromoID = '1'
self.modobj = mod.Mod(tmpName)
self.modobj.load(self.chromoID)
for tup in pool:
bamIter=[Read(tup[0], tup[1]+1, tup[2]) for tup in pool]
a = annot.Annotator(self.chromoID, refLens[self.chromoID],
self.modobj, bamIter)
results = a.execute()
for i,res in enumerate(results):
self.assertEqual(polish(res[0]),pool[i][3])
self.assertEqual(res[1], pool[i][4])
self.assertEqual(res[2], pool[i][5])
self.assertEqual(res[3], pool[i][6])
self.assertEqual(res[4], pool[i][7])
os.remove(tmpName)
os.remove(tmpName+'.tbi')
def test1(self):
'''Test case for (D)I, (D)ID*M, and (M)ID*M
10M | 5D | 10I | 10D | 10M | 5I | 10D | 10M
Ref : 0-9 | 10-14 | -14 | 15-24 | 25-34 | -34 | 35-44 | 45-54
Tgt : 0-9 | -9 | 10-19 | -19 | 20-29 | 30-34 | -34 | 35-44
read0 : ==
read1 : ==
read2 : =======
read3 : ================
read4 : =======================
read5 : ========================
read6 : ===============================
read7 : =============================================================
read8 : ==***************==***********===========
read9 : ==...............==...........============
read10: ==**********==***************====
'''
annot.LOG = 1
refLens = {'1':55}
modFile = StringIO.StringIO('''d\t1\t10\t1
d\t1\t11\t2
d\t1\t12\t3
d\t1\t13\t4
d\t1\t14\t5
i\t1\t14\tabcdefghij
d\t1\t15\t1
d\t1\t16\t2
d\t1\t17\t3
d\t1\t18\t4
d\t1\t19\t5
d\t1\t20\t6
d\t1\t21\t7
d\t1\t22\t8
d\t1\t23\t9
d\t1\t24\t0
i\t1\t34\tabcde
d\t1\t35\t1
d\t1\t36\t2
d\t1\t37\t3
d\t1\t38\t4
d\t1\t39\t5
d\t1\t40\t6
d\t1\t41\t7
d\t1\t42\t8
d\t1\t43\t9
d\t1\t44\t0
''')
pool = [
(2, 6, None, '5M', 2, 0, 0, 0),
(12, 16, None, '5I', -1, 0, 5, 0),
(10, 19, None, '10I', -1, 0, 10 ,0),
(13, 22, None, '7I,10D,3M', 25, 0, 7, 10),
(10, 29, None, '10I,10D,10M', 25, 0, 10, 10),
(23, 37, None, '7M,5I,10D,3M', 28, 0, 5, 10),
(20, 44, None, '10M,5I,10D,10M', 25, 0, 5, 10),
(0, 44, None, '10M,5D,10I,10D,10M,5I,10D,10M', 0, 0, 15, 25),
(13, 37, [(0,4), (2,5), (0, 6), (2, 7), (0,3)], '4I,12D,6M,12D,3M', 27, 0 , 4 ,0),
(13, 37, [(0,4), (3,5), (0, 6), (3, 7), (0,3)], '4I,12N,6M,12N,3M', 27, 0 , 4 ,0),
#(13, 37, [(0,4), (3,5), (0, 6), (3, 7), (0,3)], '4I,12N,6M,2N,10D,3M', 27, 0 , 4 ,0),
(2, 28, [(0,4),(2,4),(0,5),(2,5),(0,9)], '4M,9D,5I,10D,9M', 2, 0, 5, 0)
]
self.batchTestHelper(modFile, pool, refLens)
def test2(self):
'''Test case for M, MDM, MDI
10M | 5D | 10M | 10D | 10I
Ref : 0-9 | 10-14 | 15-24 | 25-34 | -34
Tgt : 0-9 | -9 | 10-19 | -19 | 20-29
Read0 : ==
Read1 : ================
Read2 : =====================
Read3 : ================
Read4 : =======================
'''
refLens = {'1':35}
modFile = StringIO.StringIO('''d\t1\t10\t1
d\t1\t11\t2
d\t1\t12\t3
d\t1\t13\t4
d\t1\t14\t5
d\t1\t25\t1
d\t1\t26\t2
d\t1\t27\t3
d\t1\t28\t4
d\t1\t29\t5
d\t1\t30\t6
d\t1\t31\t7
d\t1\t32\t8
d\t1\t33\t9
d\t1\t34\t0
i\t1\t34\tabcdefghij
''')
pool = [ (2, 6, None, '5M', 2, 0, 0, 0),
(3, 12, None, '7M,5D,3M', 3, 0, 0, 5),
(0, 19, None, '10M,5D,10M', 0, 0, 0, 5),
(13, 22, None, '7M,10D,3I', 18, 0, 3, 10),
(10, 29, None, '10M,10D,10I', 15, 0, 10, 10),
]
self.batchTestHelper(modFile, pool, refLens)
def test3(self):
'''Test case for (M)I, (M)IDI, (D)IDI
10M | 10I | 10D | 10I | 5D | 10I
Ref : 0-9 | -9 | 10-19 | -19 | 20-24 | -24
Tgt : 0-9 | 10-19 | -19 | 20-29 | -29 | 30-39
Read1 : ===
Read2 : =======
Read3 : ===============
Read4 : =======================
Read5 : ================
Read6 : =======================
Read7 : =======================================
'''
refLens = {'1':40}
modFile = StringIO.StringIO('''i\t1\t9\tabcdefghij
d\t1\t10\t1
d\t1\t11\t2
d\t1\t12\t3
d\t1\t13\t4
d\t1\t14\t5
d\t1\t15\t6
d\t1\t16\t7
d\t1\t17\t8
d\t1\t18\t9
d\t1\t19\t0
i\t1\t19\tabcdefghij
d\t1\t20\t1
d\t1\t21\t2
d\t1\t22\t3
d\t1\t23\t4
d\t1\t24\t5
i\t1\t24\tabcdefghij
''')
pool = [(12, 16, None, '5I', -1, 0, 5, 0),
(10, 19, None, '10I', -1, 0, 10, 0),
(15, 24, None, '5I,10D,5I', -1, 0, 10, 10),
(10, 29, None, '10I,10D,10I', -1, 0, 20, 10),
(25, 34, None, '5I,5D,5I', -1, 0, 10, 5),
(20, 39, None, '10I,5D,10I', -1, 0, 20, 5),
(5, 34, None, '5M,10I,10D,10I,5D,5I', 5, 0, 25, 15)
]
self.batchTestHelper(modFile, pool, refLens)
class TestAnnotator2(unittest.TestCase):
'''
Test case for insertions/deletion/splicing junction in read
10M | 10I | 10M | 5D | 10M | 5I | 5D | 5I | 10M
Ref : 0-9 | -9 | 10-19 | 20-24 | 25-34 | -34 | 35-39 | -39 | 40-49
Tgt : 0-9 | 10-19 | 20-29 | -29 | 30-39 | 40-44| -44 | 45-49| 50-59
Read1 : =^=
Read2 : =^=
Read3 : =^=
Read4 : =^=
Read5 : =^=
Read6 : =^=
Read7 : =^=
Read8 : =^=
Read9: =^=
Read10: =^=
'''
def setUp(self):
annot.TESTING = 1
annot.VERBOSITY = 1
annot.LOG = 1
self.refLens = {'1':50}
self.modFile = StringIO.StringIO('''i\t1\t9\tabcdefghij
d\t1\t20\t1
d\t1\t21\t2
d\t1\t22\t3
d\t1\t23\t4
d\t1\t24\t5
i\t1\t34\tklmno
d\t1\t35\t6
d\t1\t36\t7
d\t1\t37\t8
d\t1\t38\t9
d\t1\t39\t0
i\t1\t39\tpqrst
''')
def batchTestHelper(self, modFile, pool, refLens):
tmpName = tempfile.mkstemp('.tsv')[1]
tmpfp = open(tmpName, 'wb')
for line in modFile:
tmpfp.write(line)
tmpfp.close()
pysam.tabix_index(tmpName, force=True, seq_col=1, start_col=2, end_col=2,
meta_char='#', zerobased=True)
tmpName += '.gz'
modFile.close()
self.chromoID = '1'
self.modobj = mod.Mod(tmpName)
self.modobj.load(self.chromoID)
for tup in pool:
bamIter=[Read(tup[0], tup[1]+1, tup[2]) for tup in pool]
a = annot.Annotator(self.chromoID, refLens[self.chromoID],
self.modobj, bamIter)
results = a.execute()
for i,res in enumerate(results):
self.assertEqual(polish(res[0]),pool[i][3])
self.assertEqual(res[1], pool[i][4])
self.assertEqual(res[2], pool[i][5])
self.assertEqual(res[3], pool[i][6])
self.assertEqual(res[4], pool[i][7])
os.remove(tmpName)
os.remove(tmpName+'.tbi')
def test4(self):
cigar = [(0,2),(1,1),(0,2)] #MIM
pool = [(2,5,cigar,'2M,1I,2M', 2, 0, 0, 0),
(8,11,cigar,'2M,3I', 8, 0, 2, 0),
(12,15,cigar,'5I', -1, 0, 4, 0),
(18,21,cigar,'3I,2M', 10, 0, 2, 0),
(28,31,cigar,'2M,1I,5D,2M', 18, 0, 0, 0), #########
(38,41,cigar,'2M,3I', 33, 0, 2, 0),
(41,44,cigar,'5I', -1, 0, 4, 0),
(43,46,cigar,'3I,5D,2I', -1, 0, 4, 0), ########
(45,48,cigar,'5I', -1, 0, 4, 0),
(48,51,cigar,'3I,2M', 40, 0, 2, 0),
]
self.batchTestHelper(self.modFile, pool, self.refLens)
def test5(self):
cigar = [(0,1),(2,1),(1,1),(2,1),(0,1)] #MDIDM
pool = [
(2,5,cigar,'1M,1D,1I,1D,1M', 2, 0, 0, 0),
(8,11,cigar,'1M,1D,2I', 8, 0, 1, 0),
(12,15,cigar,'3I', -1, 0, 2, 0),
(18,21,cigar,'2I,1D,1M', 11, 0, 1, 0),
(28,31,cigar,'1M,1D,1I,6D,1M', 18, 0, 0, 0), #########
(38,41,cigar,'1M,1D,2I', 33, 0, 1, 0),
(41,44,cigar,'3I', -1, 0, 2, 0),
(43,46,cigar,'2I,5D,1I', -1, 0, 2, 0), ########
(45,48,cigar,'3I', -1, 0, 2, 0),
(48,51,cigar,'2I,1D,1M', 41, 0, 1, 0),
]
self.batchTestHelper(self.modFile, pool, self.refLens)
# def test5alt(self):
# cigar = [(0,1),(2,1),(1,1),(2,1),(0,1)] #MDIDM
# pool = [
# (2,5,cigar,'1M,2D,1I,1M', 2, 0, 0, 0),
# (8,11,cigar,'1M,1D,2I', 8, 0, 1, 0),
# (12,15,cigar,'3I', -1, 0, 2, 0),
# (18,21,cigar,'1I,1D,1I,1M', 11, 0, 1, 0),
# (28,31,cigar,'1M,7D,1I,1M', 18, 0, 0, 0), #########
# (38,41,cigar,'1M,1D,2I', 33, 0, 1, 0),
# (41,44,cigar,'3I', -1, 0, 2, 0),
# (43,46,cigar,'1I,5D,2I', -1, 0, 2, 0), ########
# (45,48,cigar,'3I', -1, 0, 2, 0),
# (48,51,cigar,'1I,1D,1I,1M', 41, 0, 1, 0),
# ]
# self.batchTestHelper(self.modFile, pool, self.refLens)
def test6(self):
cigar = [(2,2),(1,1),(2,1),(0,1)]
pool = [(2,5,cigar,'2D,1I,1D,1M', 5, 0, 0, 0),
(8,11,cigar,'2D,2I', -1, 0, 1, 0),
(12,15,cigar,'2I', -1, 0, 1, 0),
(18,21,cigar,'1I,1D,1M', 11, 0, 0, 0),
(28,31,cigar,'2D,1I,6D,1M', 26, 0, 0, 0), #########
(38,41,cigar,'2D,2I', -1, 0, 1, 0),
(41,44,cigar,'2I', -1, 0, 1, 0),
(43,46,cigar,'1I,5D,1I', -1, 0, 1, 0), ########
(45,48,cigar,'2I', -1, 0, 1, 0),
(48,51,cigar,'1I,1D,1M', 41, 0, 0, 0),
]
self.batchTestHelper(self.modFile, pool, self.refLens)
# cigar = [(0,1),(2,1),(1,1),(2,2)]
# pool = [(2,5,cigar,'2M,1I,2M', 2, 0, 0, 0),
# (8,11,cigar,'2M,3I', 8, 0, 2, 0),
# (12,15,cigar,'5I', -1, 0, 4, 0),
# (18,21,cigar,'3I,2M', 10, 0, 2, 0),
# (28,31,cigar,'2M,1I,5D,2M', 18, 0, 0, 0), #########
# (38,41,cigar,'2M,3I', 33, 0, 2, 0),
# (41,44,cigar,'5I', -1, 0, 4, 0),
# (43,46,cigar,'3I,5D,2I', -1, 0, 4, 0), ########
# (45,48,cigar,'5I', -1, 0, 4, 0),
# (48,51,cigar,'3I,2M', 40, 0, 2, 0),
# ]
#
# #cigar = [(1,1),(0,4)]
# pool = [(2,5,cigar,'2M,1I,2M', 2, 0, 0, 0),
# (8,11,cigar,'2M,3I', 8, 0, 2, 0),
# (12,15,cigar,'5I', -1, 0, 4, 0),
# (18,21,cigar,'3I,2M', 10, 0, 2, 0),
# (28,31,cigar,'2M,1I,5D,2M', 18, 0, 0, 0), #########
# (38,41,cigar,'2M,3I', 33, 0, 2, 0),
# (41,44,cigar,'5I', -1, 0, 4, 0),
# (43,46,cigar,'3I,5D,2I', -1, 0, 4, 0), ########
# (45,48,cigar,'5I', -1, 0, 4, 0),
# (48,51,cigar,'3I,2M', 40, 0, 2, 0),
# ]
#
# #cigar = [(0,4),(1,1)]
# pool = [(2,5,cigar,'2M,1I,2M', 2, 0, 0, 0),
# (8,11,cigar,'2M,3I', 8, 0, 2, 0),
# (12,15,cigar,'5I', -1, 0, 4, 0),
# (18,21,cigar,'3I,2M', 10, 0, 2, 0),
# (28,31,cigar,'2M,1I,5D,2M', 18, 0, 0, 0), #########
# (38,41,cigar,'2M,3I', 33, 0, 2, 0),
# (41,44,cigar,'5I', -1, 0, 4, 0),
# (43,46,cigar,'3I,5D,2I', -1, 0, 4, 0), ########
# (45,48,cigar,'5I', -1, 0, 4, 0),
# (48,51,cigar,'3I,2M', 40, 0, 2, 0),
# ]
#
# self.batchTestHelper(modFile, pool, refLens)
if __name__ == '__main__':
unittest.main()
|
|
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
LVM class for performing LVM operations.
"""
import itertools
import math
import os
import re
from os_brick import executor
from oslo_concurrency import processutils as putils
from oslo_log import log as logging
from oslo_utils import excutils
from cinder import exception
from cinder.i18n import _LE, _LI
from cinder import utils
LOG = logging.getLogger(__name__)
class LVM(executor.Executor):
"""LVM object to enable various LVM related operations."""
LVM_CMD_PREFIX = ['env', 'LC_ALL=C']
def __init__(self, vg_name, root_helper, create_vg=False,
physical_volumes=None, lvm_type='default',
executor=putils.execute, lvm_conf=None):
"""Initialize the LVM object.
The LVM object is based on an LVM VolumeGroup, one instantiation
for each VolumeGroup you have/use.
:param vg_name: Name of existing VG or VG to create
:param root_helper: Execution root_helper method to use
:param create_vg: Indicates the VG doesn't exist
and we want to create it
:param physical_volumes: List of PVs to build VG on
:param lvm_type: VG and Volume type (default, or thin)
:param executor: Execute method to use, None uses common/processutils
"""
super(LVM, self).__init__(execute=executor, root_helper=root_helper)
self.vg_name = vg_name
self.pv_list = []
self.vg_size = 0.0
self.vg_free_space = 0.0
self.vg_lv_count = 0
self.vg_uuid = None
self.vg_thin_pool = None
self.vg_thin_pool_size = 0.0
self.vg_thin_pool_free_space = 0.0
self._supports_snapshot_lv_activation = None
self._supports_lvchange_ignoreskipactivation = None
self.vg_provisioned_capacity = 0.0
if create_vg and physical_volumes is not None:
self.pv_list = physical_volumes
try:
self._create_vg(physical_volumes)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error creating Volume Group'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise exception.VolumeGroupCreationFailed(vg_name=self.vg_name)
if self._vg_exists() is False:
LOG.error(_LE('Unable to locate Volume Group %s'), vg_name)
raise exception.VolumeGroupNotFound(vg_name=vg_name)
# NOTE: we assume that the VG has been activated outside of Cinder
if lvm_type == 'thin':
pool_name = "%s-pool" % self.vg_name
if self.get_volume(pool_name) is None:
try:
self.create_thin_pool(pool_name)
except putils.ProcessExecutionError:
# Maybe we just lost the race against another copy of
# this driver being in init in parallel - e.g.
# cinder-volume and cinder-backup starting in parallel
if self.get_volume(pool_name) is None:
raise
self.vg_thin_pool = pool_name
self.activate_lv(self.vg_thin_pool)
self.pv_list = self.get_all_physical_volumes(root_helper, vg_name)
if lvm_conf and os.path.isfile(lvm_conf):
LVM.LVM_CMD_PREFIX = ['env',
'LC_ALL=C',
'LVM_SYSTEM_DIR=/etc/cinder']
def _vg_exists(self):
"""Simple check to see if VG exists.
:returns: True if vg specified in object exists, else False
"""
exists = False
cmd = LVM.LVM_CMD_PREFIX + ['vgs', '--noheadings',
'-o', 'name', self.vg_name]
(out, _err) = self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
if out is not None:
volume_groups = out.split()
if self.vg_name in volume_groups:
exists = True
return exists
def _create_vg(self, pv_list):
cmd = ['vgcreate', self.vg_name, ','.join(pv_list)]
self._execute(*cmd, root_helper=self._root_helper, run_as_root=True)
def _get_vg_uuid(self):
cmd = LVM.LVM_CMD_PREFIX + ['vgs', '--noheadings',
'-o', 'uuid', self.vg_name]
(out, _err) = self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
if out is not None:
return out.split()
else:
return []
def _get_thin_pool_free_space(self, vg_name, thin_pool_name):
"""Returns available thin pool free space.
:param vg_name: the vg where the pool is placed
:param thin_pool_name: the thin pool to gather info for
:returns: Free space in GB (float), calculated using data_percent
"""
cmd = LVM.LVM_CMD_PREFIX +\
['lvs', '--noheadings', '--unit=g',
'-o', 'size,data_percent', '--separator',
':', '--nosuffix']
# NOTE(gfidente): data_percent only applies to some types of LV so we
# make sure to append the actual thin pool name
cmd.append("/dev/%s/%s" % (vg_name, thin_pool_name))
free_space = 0.0
try:
(out, err) = self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
if out is not None:
out = out.strip()
data = out.split(':')
pool_size = float(data[0])
data_percent = float(data[1])
consumed_space = pool_size / 100 * data_percent
free_space = pool_size - consumed_space
free_space = round(free_space, 2)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error querying thin pool about data_percent'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
return free_space
@staticmethod
def get_lvm_version(root_helper):
"""Static method to get LVM version from system.
:param root_helper: root_helper to use for execute
:returns: version 3-tuple
"""
cmd = LVM.LVM_CMD_PREFIX + ['vgs', '--version']
(out, _err) = putils.execute(*cmd,
root_helper=root_helper,
run_as_root=True)
lines = out.split('\n')
for line in lines:
if 'LVM version' in line:
version_list = line.split()
# NOTE(gfidente): version is formatted as follows:
# major.minor.patchlevel(library API version)[-customisation]
version = version_list[2]
version_filter = r"(\d+)\.(\d+)\.(\d+).*"
r = re.search(version_filter, version)
version_tuple = tuple(map(int, r.group(1, 2, 3)))
return version_tuple
@staticmethod
def supports_thin_provisioning(root_helper):
"""Static method to check for thin LVM support on a system.
:param root_helper: root_helper to use for execute
:returns: True if supported, False otherwise
"""
return LVM.get_lvm_version(root_helper) >= (2, 2, 95)
@property
def supports_snapshot_lv_activation(self):
"""Property indicating whether snap activation changes are supported.
Check for LVM version >= 2.02.91.
(LVM2 git: e8a40f6 Allow to activate snapshot)
:returns: True/False indicating support
"""
if self._supports_snapshot_lv_activation is not None:
return self._supports_snapshot_lv_activation
self._supports_snapshot_lv_activation = (
self.get_lvm_version(self._root_helper) >= (2, 2, 91))
return self._supports_snapshot_lv_activation
@property
def supports_lvchange_ignoreskipactivation(self):
"""Property indicating whether lvchange can ignore skip activation.
Check for LVM version >= 2.02.99.
(LVM2 git: ab789c1bc add --ignoreactivationskip to lvchange)
"""
if self._supports_lvchange_ignoreskipactivation is not None:
return self._supports_lvchange_ignoreskipactivation
self._supports_lvchange_ignoreskipactivation = (
self.get_lvm_version(self._root_helper) >= (2, 2, 99))
return self._supports_lvchange_ignoreskipactivation
@staticmethod
def get_lv_info(root_helper, vg_name=None, lv_name=None):
"""Retrieve info about LVs (all, in a VG, or a single LV).
:param root_helper: root_helper to use for execute
:param vg_name: optional, gathers info for only the specified VG
:param lv_name: optional, gathers info for only the specified LV
:returns: List of Dictionaries with LV info
"""
cmd = LVM.LVM_CMD_PREFIX + ['lvs', '--noheadings', '--unit=g',
'-o', 'vg_name,name,size', '--nosuffix']
if lv_name is not None and vg_name is not None:
cmd.append("%s/%s" % (vg_name, lv_name))
elif vg_name is not None:
cmd.append(vg_name)
try:
(out, _err) = putils.execute(*cmd,
root_helper=root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
with excutils.save_and_reraise_exception(reraise=True) as ctx:
if "not found" in err.stderr or "Failed to find" in err.stderr:
ctx.reraise = False
LOG.info(_LI("Logical Volume not found when querying "
"LVM info. (vg_name=%(vg)s, lv_name=%(lv)s"),
{'vg': vg_name, 'lv': lv_name})
out = None
lv_list = []
if out is not None:
volumes = out.split()
for vg, name, size in itertools.izip(*[iter(volumes)] * 3):
lv_list.append({"vg": vg, "name": name, "size": size})
return lv_list
def get_volumes(self, lv_name=None):
"""Get all LV's associated with this instantiation (VG).
:returns: List of Dictionaries with LV info
"""
return self.get_lv_info(self._root_helper,
self.vg_name,
lv_name)
def get_volume(self, name):
"""Get reference object of volume specified by name.
:returns: dict representation of Logical Volume if exists
"""
ref_list = self.get_volumes(name)
for r in ref_list:
if r['name'] == name:
return r
return None
@staticmethod
def get_all_physical_volumes(root_helper, vg_name=None):
"""Static method to get all PVs on a system.
:param root_helper: root_helper to use for execute
:param vg_name: optional, gathers info for only the specified VG
:returns: List of Dictionaries with PV info
"""
field_sep = '|'
cmd = LVM.LVM_CMD_PREFIX + ['pvs', '--noheadings',
'--unit=g',
'-o', 'vg_name,name,size,free',
'--separator', field_sep,
'--nosuffix']
(out, _err) = putils.execute(*cmd,
root_helper=root_helper,
run_as_root=True)
pvs = out.split()
if vg_name is not None:
pvs = [pv for pv in pvs if vg_name == pv.split(field_sep)[0]]
pv_list = []
for pv in pvs:
fields = pv.split(field_sep)
pv_list.append({'vg': fields[0],
'name': fields[1],
'size': float(fields[2]),
'available': float(fields[3])})
return pv_list
def get_physical_volumes(self):
"""Get all PVs associated with this instantiation (VG).
:returns: List of Dictionaries with PV info
"""
self.pv_list = self.get_all_physical_volumes(self._root_helper,
self.vg_name)
return self.pv_list
@staticmethod
def get_all_volume_groups(root_helper, vg_name=None):
"""Static method to get all VGs on a system.
:param root_helper: root_helper to use for execute
:param vg_name: optional, gathers info for only the specified VG
:returns: List of Dictionaries with VG info
"""
cmd = LVM.LVM_CMD_PREFIX + ['vgs', '--noheadings',
'--unit=g', '-o',
'name,size,free,lv_count,uuid',
'--separator', ':',
'--nosuffix']
if vg_name is not None:
cmd.append(vg_name)
(out, _err) = putils.execute(*cmd,
root_helper=root_helper,
run_as_root=True)
vg_list = []
if out is not None:
vgs = out.split()
for vg in vgs:
fields = vg.split(':')
vg_list.append({'name': fields[0],
'size': float(fields[1]),
'available': float(fields[2]),
'lv_count': int(fields[3]),
'uuid': fields[4]})
return vg_list
def update_volume_group_info(self):
"""Update VG info for this instantiation.
Used to update member fields of object and
provide a dict of info for caller.
:returns: Dictionaries of VG info
"""
vg_list = self.get_all_volume_groups(self._root_helper, self.vg_name)
if len(vg_list) != 1:
LOG.error(_LE('Unable to find VG: %s'), self.vg_name)
raise exception.VolumeGroupNotFound(vg_name=self.vg_name)
self.vg_size = float(vg_list[0]['size'])
self.vg_free_space = float(vg_list[0]['available'])
self.vg_lv_count = int(vg_list[0]['lv_count'])
self.vg_uuid = vg_list[0]['uuid']
total_vols_size = 0.0
if self.vg_thin_pool is not None:
# NOTE(xyang): If providing only self.vg_name,
# get_lv_info will output info on the thin pool and all
# individual volumes.
# get_lv_info(self._root_helper, 'stack-vg')
# sudo lvs --noheadings --unit=g -o vg_name,name,size
# --nosuffix stack-vg
# stack-vg stack-pool 9.51
# stack-vg volume-13380d16-54c3-4979-9d22-172082dbc1a1 1.00
# stack-vg volume-629e13ab-7759-46a5-b155-ee1eb20ca892 1.00
# stack-vg volume-e3e6281c-51ee-464c-b1a7-db6c0854622c 1.00
#
# If providing both self.vg_name and self.vg_thin_pool,
# get_lv_info will output only info on the thin pool, but not
# individual volumes.
# get_lv_info(self._root_helper, 'stack-vg', 'stack-pool')
# sudo lvs --noheadings --unit=g -o vg_name,name,size
# --nosuffix stack-vg/stack-pool
# stack-vg stack-pool 9.51
#
# We need info on both the thin pool and the volumes,
# therefore we should provide only self.vg_name, but not
# self.vg_thin_pool here.
for lv in self.get_lv_info(self._root_helper,
self.vg_name):
lvsize = lv['size']
# get_lv_info runs "lvs" command with "--nosuffix".
# This removes "g" from "1.00g" and only outputs "1.00".
# Running "lvs" command without "--nosuffix" will output
# "1.00g" if "g" is the unit.
# Remove the unit if it is in lv['size'].
if not lv['size'][-1].isdigit():
lvsize = lvsize[:-1]
if lv['name'] == self.vg_thin_pool:
self.vg_thin_pool_size = lvsize
tpfs = self._get_thin_pool_free_space(self.vg_name,
self.vg_thin_pool)
self.vg_thin_pool_free_space = tpfs
else:
total_vols_size = total_vols_size + float(lvsize)
total_vols_size = round(total_vols_size, 2)
self.vg_provisioned_capacity = total_vols_size
def _calculate_thin_pool_size(self):
"""Calculates the correct size for a thin pool.
Ideally we would use 100% of the containing volume group and be done.
But the 100%VG notation to lvcreate is not implemented and thus cannot
be used. See https://bugzilla.redhat.com/show_bug.cgi?id=998347
Further, some amount of free space must remain in the volume group for
metadata for the contained logical volumes. The exact amount depends
on how much volume sharing you expect.
:returns: An lvcreate-ready string for the number of calculated bytes.
"""
# make sure volume group information is current
self.update_volume_group_info()
# leave 5% free for metadata
return "%sg" % (self.vg_free_space * 0.95)
def create_thin_pool(self, name=None, size_str=None):
"""Creates a thin provisioning pool for this VG.
The syntax here is slightly different than the default
lvcreate -T, so we'll just write a custom cmd here
and do it.
:param name: Name to use for pool, default is "<vg-name>-pool"
:param size_str: Size to allocate for pool, default is entire VG
:returns: The size string passed to the lvcreate command
"""
if not self.supports_thin_provisioning(self._root_helper):
LOG.error(_LE('Requested to setup thin provisioning, '
'however current LVM version does not '
'support it.'))
return None
if name is None:
name = '%s-pool' % self.vg_name
vg_pool_name = '%s/%s' % (self.vg_name, name)
if not size_str:
size_str = self._calculate_thin_pool_size()
cmd = ['lvcreate', '-T', '-L', size_str, vg_pool_name]
LOG.debug("Creating thin pool '%(pool)s' with size %(size)s of "
"total %(free)sg", {'pool': vg_pool_name,
'size': size_str,
'free': self.vg_free_space})
self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
self.vg_thin_pool = name
return size_str
def create_volume(self, name, size_str, lv_type='default', mirror_count=0):
"""Creates a logical volume on the object's VG.
:param name: Name to use when creating Logical Volume
:param size_str: Size to use when creating Logical Volume
:param lv_type: Type of Volume (default or thin)
:param mirror_count: Use LVM mirroring with specified count
"""
if lv_type == 'thin':
pool_path = '%s/%s' % (self.vg_name, self.vg_thin_pool)
cmd = ['lvcreate', '-T', '-V', size_str, '-n', name, pool_path]
else:
cmd = ['lvcreate', '-n', name, self.vg_name, '-L', size_str]
if mirror_count > 0:
cmd.extend(['-m', mirror_count, '--nosync',
'--mirrorlog', 'mirrored'])
terras = int(size_str[:-1]) / 1024.0
if terras >= 1.5:
rsize = int(2 ** math.ceil(math.log(terras) / math.log(2)))
# NOTE(vish): Next power of two for region size. See:
# http://red.ht/U2BPOD
cmd.extend(['-R', str(rsize)])
try:
self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error creating Volume'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise
@utils.retry(putils.ProcessExecutionError)
def create_lv_snapshot(self, name, source_lv_name, lv_type='default'):
"""Creates a snapshot of a logical volume.
:param name: Name to assign to new snapshot
:param source_lv_name: Name of Logical Volume to snapshot
:param lv_type: Type of LV (default or thin)
"""
source_lvref = self.get_volume(source_lv_name)
if source_lvref is None:
LOG.error(_LE("Trying to create snapshot by non-existent LV: %s"),
source_lv_name)
raise exception.VolumeDeviceNotFound(device=source_lv_name)
cmd = ['lvcreate', '--name', name,
'--snapshot', '%s/%s' % (self.vg_name, source_lv_name)]
if lv_type != 'thin':
size = source_lvref['size']
cmd.extend(['-L', '%sg' % (size)])
try:
self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error creating snapshot'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise
def _mangle_lv_name(self, name):
# Linux LVM reserves name that starts with snapshot, so that
# such volume name can't be created. Mangle it.
if not name.startswith('snapshot'):
return name
return '_' + name
def deactivate_lv(self, name):
lv_path = self.vg_name + '/' + self._mangle_lv_name(name)
cmd = ['lvchange', '-a', 'n']
cmd.append(lv_path)
try:
self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error deactivating LV'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise
def activate_lv(self, name, is_snapshot=False):
"""Ensure that logical volume/snapshot logical volume is activated.
:param name: Name of LV to activate
:raises: putils.ProcessExecutionError
"""
# This is a no-op if requested for a snapshot on a version
# of LVM that doesn't support snapshot activation.
# (Assume snapshot LV is always active.)
if is_snapshot and not self.supports_snapshot_lv_activation:
return
lv_path = self.vg_name + '/' + self._mangle_lv_name(name)
# Must pass --yes to activate both the snap LV and its origin LV.
# Otherwise lvchange asks if you would like to do this interactively,
# and fails.
cmd = ['lvchange', '-a', 'y', '--yes']
if self.supports_lvchange_ignoreskipactivation:
cmd.append('-K')
cmd.append(lv_path)
try:
self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error activating LV'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise
@utils.retry(putils.ProcessExecutionError)
def delete(self, name):
"""Delete logical volume or snapshot.
:param name: Name of LV to delete
"""
def run_udevadm_settle():
self._execute('udevadm', 'settle',
root_helper=self._root_helper, run_as_root=True,
check_exit_code=False)
# LV removal seems to be a race with other writers or udev in
# some cases (see LP #1270192), so we enable retry deactivation
LVM_CONFIG = 'activation { retry_deactivation = 1} '
try:
self._execute(
'lvremove',
'--config', LVM_CONFIG,
'-f',
'%s/%s' % (self.vg_name, name),
root_helper=self._root_helper, run_as_root=True)
except putils.ProcessExecutionError as err:
LOG.debug('Error reported running lvremove: CMD: %(command)s, '
'RESPONSE: %(response)s',
{'command': err.cmd, 'response': err.stderr})
LOG.debug('Attempting udev settle and retry of lvremove...')
run_udevadm_settle()
# The previous failing lvremove -f might leave behind
# suspended devices; when lvmetad is not available, any
# further lvm command will block forever.
# Therefore we need to skip suspended devices on retry.
LVM_CONFIG += 'devices { ignore_suspended_devices = 1}'
self._execute(
'lvremove',
'--config', LVM_CONFIG,
'-f',
'%s/%s' % (self.vg_name, name),
root_helper=self._root_helper, run_as_root=True)
LOG.debug('Successfully deleted volume: %s after '
'udev settle.', name)
def revert(self, snapshot_name):
"""Revert an LV from snapshot.
:param snapshot_name: Name of snapshot to revert
"""
self._execute('lvconvert', '--merge',
snapshot_name, root_helper=self._root_helper,
run_as_root=True)
def lv_has_snapshot(self, name):
cmd = LVM.LVM_CMD_PREFIX + ['lvdisplay', '--noheading', '-C', '-o',
'Attr', '%s/%s' % (self.vg_name, name)]
out, _err = self._execute(*cmd,
root_helper=self._root_helper,
run_as_root=True)
if out:
out = out.strip()
if (out[0] == 'o') or (out[0] == 'O'):
return True
return False
def extend_volume(self, lv_name, new_size):
"""Extend the size of an existing volume."""
# Volumes with snaps have attributes 'o' or 'O' and will be
# deactivated, but Thin Volumes with snaps have attribute 'V'
# and won't be deactivated because the lv_has_snapshot method looks
# for 'o' or 'O'
if self.lv_has_snapshot(lv_name):
self.deactivate_lv(lv_name)
try:
self._execute('lvextend', '-L', new_size,
'%s/%s' % (self.vg_name, lv_name),
root_helper=self._root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error extending Volume'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise
def vg_mirror_free_space(self, mirror_count):
free_capacity = 0.0
disks = []
for pv in self.pv_list:
disks.append(float(pv['available']))
while True:
disks = sorted([a for a in disks if a > 0.0], reverse=True)
if len(disks) <= mirror_count:
break
# consume the smallest disk
disk = disks[-1]
disks = disks[:-1]
# match extents for each mirror on the largest disks
for index in list(range(mirror_count)):
disks[index] -= disk
free_capacity += disk
return free_capacity
def vg_mirror_size(self, mirror_count):
return (self.vg_free_space / (mirror_count + 1))
def rename_volume(self, lv_name, new_name):
"""Change the name of an existing volume."""
try:
self._execute('lvrename', self.vg_name, lv_name, new_name,
root_helper=self._root_helper,
run_as_root=True)
except putils.ProcessExecutionError as err:
LOG.exception(_LE('Error renaming logical volume'))
LOG.error(_LE('Cmd :%s'), err.cmd)
LOG.error(_LE('StdOut :%s'), err.stdout)
LOG.error(_LE('StdErr :%s'), err.stderr)
raise
|
|
# -*- coding: utf-8 -*-
"""
======================
Laplacian segmentation
======================
This notebook implements the laplacian segmentation method of
`McFee and Ellis, 2014 <http://bmcfee.github.io/papers/ismir2014_spectral.pdf>`_,
with a couple of minor stability improvements.
Throughout the example, we will refer to equations in the paper by number, so it will be
helpful to read along.
"""
# Code source: Brian McFee
# License: ISC
###################################
# Imports
# - numpy for basic functionality
# - scipy for graph Laplacian
# - matplotlib for visualization
# - sklearn.cluster for K-Means
#
from __future__ import print_function
import numpy as np
import scipy
import matplotlib.pyplot as plt
import sklearn.cluster
import librosa
import librosa.display
#############################
# First, we'll load in a song
y, sr = librosa.load('audio/Karissa_Hobbs_-_09_-_Lets_Go_Fishin.mp3')
##############################################
# Next, we'll compute and plot a log-power CQT
BINS_PER_OCTAVE = 12 * 3
N_OCTAVES = 7
C = librosa.amplitude_to_db(librosa.cqt(y=y, sr=sr,
bins_per_octave=BINS_PER_OCTAVE,
n_bins=N_OCTAVES * BINS_PER_OCTAVE),
ref=np.max)
plt.figure(figsize=(12, 4))
librosa.display.specshow(C, y_axis='cqt_hz', sr=sr,
bins_per_octave=BINS_PER_OCTAVE,
x_axis='time')
plt.tight_layout()
##########################################################
# To reduce dimensionality, we'll beat-synchronous the CQT
tempo, beats = librosa.beat.beat_track(y=y, sr=sr, trim=False)
Csync = librosa.util.sync(C, beats, aggregate=np.median)
# For plotting purposes, we'll need the timing of the beats
# we fix_frames to include non-beat frames 0 and C.shape[1] (final frame)
beat_times = librosa.frames_to_time(librosa.util.fix_frames(beats,
x_min=0,
x_max=C.shape[1]),
sr=sr)
plt.figure(figsize=(12, 4))
librosa.display.specshow(Csync, bins_per_octave=12*3,
y_axis='cqt_hz', x_axis='time',
x_coords=beat_times)
plt.tight_layout()
#####################################################################
# Let's build a weighted recurrence matrix using beat-synchronous CQT
# (Equation 1)
# width=3 prevents links within the same bar
# mode='affinity' here implements S_rep (after Eq. 8)
R = librosa.segment.recurrence_matrix(Csync, width=3, mode='affinity',
sym=True)
# Enhance diagonals with a median filter (Equation 2)
df = librosa.segment.timelag_filter(scipy.ndimage.median_filter)
Rf = df(R, size=(1, 7))
###################################################################
# Now let's build the sequence matrix (S_loc) using mfcc-similarity
#
# :math:`R_\text{path}[i, i\pm 1] = \exp(-\|C_i - C_{i\pm 1}\|^2 / \sigma^2)`
#
# Here, we take :math:`\sigma` to be the median distance between successive beats.
#
mfcc = librosa.feature.mfcc(y=y, sr=sr)
Msync = librosa.util.sync(mfcc, beats)
path_distance = np.sum(np.diff(Msync, axis=1)**2, axis=0)
sigma = np.median(path_distance)
path_sim = np.exp(-path_distance / sigma)
R_path = np.diag(path_sim, k=1) + np.diag(path_sim, k=-1)
##########################################################
# And compute the balanced combination (Equations 6, 7, 9)
deg_path = np.sum(R_path, axis=1)
deg_rec = np.sum(Rf, axis=1)
mu = deg_path.dot(deg_path + deg_rec) / np.sum((deg_path + deg_rec)**2)
A = mu * Rf + (1 - mu) * R_path
###########################################################
# Plot the resulting graphs (Figure 1, left and center)
plt.figure(figsize=(8, 4))
plt.subplot(1, 3, 1)
librosa.display.specshow(Rf, cmap='inferno_r', y_axis='time',
y_coords=beat_times)
plt.title('Recurrence similarity')
plt.subplot(1, 3, 2)
librosa.display.specshow(R_path, cmap='inferno_r')
plt.title('Path similarity')
plt.subplot(1, 3, 3)
librosa.display.specshow(A, cmap='inferno_r')
plt.title('Combined graph')
plt.tight_layout()
#####################################################
# Now let's compute the normalized Laplacian (Eq. 10)
L = scipy.sparse.csgraph.laplacian(A, normed=True)
# and its spectral decomposition
evals, evecs = scipy.linalg.eigh(L)
# We can clean this up further with a median filter.
# This can help smooth over small discontinuities
evecs = scipy.ndimage.median_filter(evecs, size=(9, 1))
# cumulative normalization is needed for symmetric normalize laplacian eigenvectors
Cnorm = np.cumsum(evecs**2, axis=1)**0.5
# If we want k clusters, use the first k normalized eigenvectors.
# Fun exercise: see how the segmentation changes as you vary k
k = 5
X = evecs[:, :k] / Cnorm[:, k-1:k]
# Plot the resulting representation (Figure 1, center and right)
plt.figure(figsize=(8, 4))
plt.subplot(1, 2, 2)
librosa.display.specshow(Rf, cmap='inferno_r')
plt.title('Recurrence matrix')
plt.subplot(1, 2, 1)
librosa.display.specshow(X,
y_axis='time',
y_coords=beat_times)
plt.title('Structure components')
plt.tight_layout()
#############################################################
# Let's use these k components to cluster beats into segments
# (Algorithm 1)
KM = sklearn.cluster.KMeans(n_clusters=k)
seg_ids = KM.fit_predict(X)
# and plot the results
plt.figure(figsize=(12, 4))
colors = plt.get_cmap('Paired', k)
plt.subplot(1, 3, 2)
librosa.display.specshow(Rf, cmap='inferno_r')
plt.title('Recurrence matrix')
plt.subplot(1, 3, 1)
librosa.display.specshow(X,
y_axis='time',
y_coords=beat_times)
plt.title('Structure components')
plt.subplot(1, 3, 3)
librosa.display.specshow(np.atleast_2d(seg_ids).T, cmap=colors)
plt.title('Estimated segments')
plt.colorbar(ticks=range(k))
plt.tight_layout()
###############################################################
# Locate segment boundaries from the label sequence
bound_beats = 1 + np.flatnonzero(seg_ids[:-1] != seg_ids[1:])
# Count beat 0 as a boundary
bound_beats = librosa.util.fix_frames(bound_beats, x_min=0)
# Compute the segment label for each boundary
bound_segs = list(seg_ids[bound_beats])
# Convert beat indices to frames
bound_frames = beats[bound_beats]
# Make sure we cover to the end of the track
bound_frames = librosa.util.fix_frames(bound_frames,
x_min=None,
x_max=C.shape[1]-1)
###################################################
# And plot the final segmentation over original CQT
# sphinx_gallery_thumbnail_number = 5
import matplotlib.patches as patches
plt.figure(figsize=(12, 4))
bound_times = librosa.frames_to_time(bound_frames)
freqs = librosa.cqt_frequencies(n_bins=C.shape[0],
fmin=librosa.note_to_hz('C1'),
bins_per_octave=BINS_PER_OCTAVE)
librosa.display.specshow(C, y_axis='cqt_hz', sr=sr,
bins_per_octave=BINS_PER_OCTAVE,
x_axis='time')
ax = plt.gca()
for interval, label in zip(zip(bound_times, bound_times[1:]), bound_segs):
ax.add_patch(patches.Rectangle((interval[0], freqs[0]),
interval[1] - interval[0],
freqs[-1],
facecolor=colors(label),
alpha=0.50))
plt.tight_layout()
plt.show()
|
|
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for the API /nodes/ methods.
"""
import datetime
import json
import mock
from oslo_config import cfg
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from six.moves.urllib import parse as urlparse
from testtools.matchers import HasLength
from wsme import types as wtypes
from ironic.api.controllers import base as api_base
from ironic.api.controllers import v1 as api_v1
from ironic.api.controllers.v1 import node as api_node
from ironic.api.controllers.v1 import utils as api_utils
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common import states
from ironic.conductor import rpcapi
from ironic import objects
from ironic.tests.api import base as test_api_base
from ironic.tests.api import utils as test_api_utils
from ironic.tests import base
from ironic.tests.objects import utils as obj_utils
class TestNodeObject(base.TestCase):
def test_node_init(self):
node_dict = test_api_utils.node_post_data()
del node_dict['instance_uuid']
node = api_node.Node(**node_dict)
self.assertEqual(wtypes.Unset, node.instance_uuid)
class TestListNodes(test_api_base.FunctionalTest):
def setUp(self):
super(TestListNodes, self).setUp()
self.chassis = obj_utils.create_test_chassis(self.context)
p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
self.mock_gtf = p.start()
self.mock_gtf.return_value = 'test-topic'
self.addCleanup(p.stop)
def _create_association_test_nodes(self):
# create some unassociated nodes
unassociated_nodes = []
for id in range(3):
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid())
unassociated_nodes.append(node.uuid)
# created some associated nodes
associated_nodes = []
for id in range(4):
node = obj_utils.create_test_node(
self.context, uuid=uuidutils.generate_uuid(),
instance_uuid=uuidutils.generate_uuid())
associated_nodes.append(node.uuid)
return {'associated': associated_nodes,
'unassociated': unassociated_nodes}
def test_empty(self):
data = self.get_json('/nodes')
self.assertEqual([], data['nodes'])
def test_one(self):
node = obj_utils.create_test_node(self.context,
chassis_id=self.chassis.id)
data = self.get_json('/nodes',
headers={api_base.Version.string: str(api_v1.MAX_VER)})
self.assertIn('instance_uuid', data['nodes'][0])
self.assertIn('maintenance', data['nodes'][0])
self.assertIn('power_state', data['nodes'][0])
self.assertIn('provision_state', data['nodes'][0])
self.assertIn('uuid', data['nodes'][0])
self.assertEqual(node.uuid, data['nodes'][0]["uuid"])
self.assertNotIn('driver', data['nodes'][0])
self.assertNotIn('driver_info', data['nodes'][0])
self.assertNotIn('driver_internal_info', data['nodes'][0])
self.assertNotIn('extra', data['nodes'][0])
self.assertNotIn('properties', data['nodes'][0])
self.assertNotIn('chassis_uuid', data['nodes'][0])
self.assertNotIn('reservation', data['nodes'][0])
self.assertNotIn('console_enabled', data['nodes'][0])
self.assertNotIn('target_power_state', data['nodes'][0])
self.assertNotIn('target_provision_state', data['nodes'][0])
self.assertNotIn('provision_updated_at', data['nodes'][0])
self.assertNotIn('maintenance_reason', data['nodes'][0])
# never expose the chassis_id
self.assertNotIn('chassis_id', data['nodes'][0])
def test_get_one(self):
node = obj_utils.create_test_node(self.context,
chassis_id=self.chassis.id)
data = self.get_json('/nodes/%s' % node.uuid,
headers={api_base.Version.string: str(api_v1.MAX_VER)})
self.assertEqual(node.uuid, data['uuid'])
self.assertIn('driver', data)
self.assertIn('driver_info', data)
self.assertEqual('******', data['driver_info']['fake_password'])
self.assertEqual('bar', data['driver_info']['foo'])
self.assertIn('driver_internal_info', data)
self.assertIn('extra', data)
self.assertIn('properties', data)
self.assertIn('chassis_uuid', data)
self.assertIn('reservation', data)
self.assertIn('maintenance_reason', data)
self.assertIn('name', data)
self.assertIn('inspection_finished_at', data)
self.assertIn('inspection_started_at', data)
# never expose the chassis_id
self.assertNotIn('chassis_id', data)
def test_detail(self):
node = obj_utils.create_test_node(self.context,
chassis_id=self.chassis.id)
data = self.get_json('/nodes/detail',
headers={api_base.Version.string: str(api_v1.MAX_VER)})
self.assertEqual(node.uuid, data['nodes'][0]["uuid"])
self.assertIn('name', data['nodes'][0])
self.assertIn('driver', data['nodes'][0])
self.assertIn('driver_info', data['nodes'][0])
self.assertIn('extra', data['nodes'][0])
self.assertIn('properties', data['nodes'][0])
self.assertIn('chassis_uuid', data['nodes'][0])
self.assertIn('reservation', data['nodes'][0])
self.assertIn('maintenance', data['nodes'][0])
self.assertIn('console_enabled', data['nodes'][0])
self.assertIn('target_power_state', data['nodes'][0])
self.assertIn('target_provision_state', data['nodes'][0])
self.assertIn('provision_updated_at', data['nodes'][0])
self.assertIn('inspection_finished_at', data['nodes'][0])
self.assertIn('inspection_started_at', data['nodes'][0])
# never expose the chassis_id
self.assertNotIn('chassis_id', data['nodes'][0])
def test_detail_against_single(self):
node = obj_utils.create_test_node(self.context)
response = self.get_json('/nodes/%s/detail' % node.uuid,
expect_errors=True)
self.assertEqual(404, response.status_int)
def test_mask_available_state(self):
node = obj_utils.create_test_node(self.context,
provision_state=states.AVAILABLE)
data = self.get_json('/nodes/%s' % node.uuid,
headers={api_base.Version.string: str(api_v1.MIN_VER)})
self.assertEqual(states.NOSTATE, data['provision_state'])
data = self.get_json('/nodes/%s' % node.uuid,
headers={api_base.Version.string: "1.2"})
self.assertEqual(states.AVAILABLE, data['provision_state'])
def test_hide_fields_in_newer_versions_driver_internal(self):
node = obj_utils.create_test_node(self.context,
driver_internal_info={"foo": "bar"})
data = self.get_json('/nodes/%s' % node.uuid,
headers={api_base.Version.string: str(api_v1.MIN_VER)})
self.assertNotIn('driver_internal_info', data)
data = self.get_json('/nodes/%s' % node.uuid,
headers={api_base.Version.string: "1.3"})
self.assertEqual({"foo": "bar"}, data['driver_internal_info'])
def test_hide_fields_in_newer_versions_name(self):
node = obj_utils.create_test_node(self.context,
name="fish")
data = self.get_json('/nodes/%s' % node.uuid,
headers={api_base.Version.string: "1.4"})
self.assertNotIn('name', data)
data = self.get_json('/nodes/%s' % node.uuid,
headers={api_base.Version.string: "1.5"})
self.assertEqual('fish', data['name'])
def test_hide_fields_in_newer_versions_inspection(self):
some_time = datetime.datetime(2015, 3, 18, 19, 20)
node = obj_utils.create_test_node(self.context,
inspection_started_at=some_time)
data = self.get_json('/nodes/%s' % node.uuid,
headers={api_base.Version.string: str(api_v1.MIN_VER)})
self.assertNotIn('inspection_finished_at', data)
self.assertNotIn('inspection_started_at', data)
data = self.get_json('/nodes/%s' % node.uuid,
headers={api_base.Version.string: "1.6"})
started = timeutils.parse_isotime(
data['inspection_started_at']).replace(tzinfo=None)
self.assertEqual(some_time, started)
self.assertEqual(None, data['inspection_finished_at'])
def test_many(self):
nodes = []
for id in range(5):
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid())
nodes.append(node.uuid)
data = self.get_json('/nodes')
self.assertEqual(len(nodes), len(data['nodes']))
uuids = [n['uuid'] for n in data['nodes']]
self.assertEqual(sorted(nodes), sorted(uuids))
def test_many_have_names(self):
nodes = []
node_names = []
for id in range(5):
name = 'node-%s' % id
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
name=name)
nodes.append(node.uuid)
node_names.append(name)
data = self.get_json('/nodes',
headers={api_base.Version.string: "1.5"})
names = [n['name'] for n in data['nodes']]
self.assertEqual(len(nodes), len(data['nodes']))
self.assertEqual(sorted(node_names), sorted(names))
def test_links(self):
uuid = uuidutils.generate_uuid()
obj_utils.create_test_node(self.context, uuid=uuid)
data = self.get_json('/nodes/%s' % uuid)
self.assertIn('links', data.keys())
self.assertEqual(2, len(data['links']))
self.assertIn(uuid, data['links'][0]['href'])
for l in data['links']:
bookmark = l['rel'] == 'bookmark'
self.assertTrue(self.validate_link(l['href'], bookmark=bookmark))
def test_collection_links(self):
nodes = []
for id in range(5):
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid())
nodes.append(node.uuid)
data = self.get_json('/nodes/?limit=3')
self.assertEqual(3, len(data['nodes']))
next_marker = data['nodes'][-1]['uuid']
self.assertIn(next_marker, data['next'])
def test_collection_links_default_limit(self):
cfg.CONF.set_override('max_limit', 3, 'api')
nodes = []
for id in range(5):
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid())
nodes.append(node.uuid)
data = self.get_json('/nodes')
self.assertEqual(3, len(data['nodes']))
next_marker = data['nodes'][-1]['uuid']
self.assertIn(next_marker, data['next'])
def test_sort_key(self):
nodes = []
for id in range(3):
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid())
nodes.append(node.uuid)
data = self.get_json('/nodes?sort_key=uuid')
uuids = [n['uuid'] for n in data['nodes']]
self.assertEqual(sorted(nodes), uuids)
def test_sort_key_invalid(self):
invalid_key = 'foo'
response = self.get_json('/nodes?sort_key=%s' % invalid_key,
expect_errors=True)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertIn(invalid_key, response.json['error_message'])
def test_ports_subresource_link(self):
node = obj_utils.create_test_node(self.context)
data = self.get_json('/nodes/%s' % node.uuid)
self.assertIn('ports', data.keys())
def test_ports_subresource(self):
node = obj_utils.create_test_node(self.context)
for id_ in range(2):
obj_utils.create_test_port(self.context, node_id=node.id,
uuid=uuidutils.generate_uuid(),
address='52:54:00:cf:2d:3%s' % id_)
data = self.get_json('/nodes/%s/ports' % node.uuid)
self.assertEqual(2, len(data['ports']))
self.assertNotIn('next', data.keys())
# Test collection pagination
data = self.get_json('/nodes/%s/ports?limit=1' % node.uuid)
self.assertEqual(1, len(data['ports']))
self.assertIn('next', data.keys())
def test_ports_subresource_noid(self):
node = obj_utils.create_test_node(self.context)
obj_utils.create_test_port(self.context, node_id=node.id)
# No node id specified
response = self.get_json('/nodes/ports', expect_errors=True)
self.assertEqual(400, response.status_int)
def test_ports_subresource_node_not_found(self):
non_existent_uuid = 'eeeeeeee-cccc-aaaa-bbbb-cccccccccccc'
response = self.get_json('/nodes/%s/ports' % non_existent_uuid,
expect_errors=True)
self.assertEqual(404, response.status_int)
@mock.patch.object(timeutils, 'utcnow')
def test_node_states(self, mock_utcnow):
fake_state = 'fake-state'
fake_error = 'fake-error'
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
node = obj_utils.create_test_node(self.context,
power_state=fake_state,
target_power_state=fake_state,
provision_state=fake_state,
target_provision_state=fake_state,
provision_updated_at=test_time,
last_error=fake_error)
data = self.get_json('/nodes/%s/states' % node.uuid)
self.assertEqual(fake_state, data['power_state'])
self.assertEqual(fake_state, data['target_power_state'])
self.assertEqual(fake_state, data['provision_state'])
self.assertEqual(fake_state, data['target_provision_state'])
prov_up_at = timeutils.parse_isotime(
data['provision_updated_at']).replace(tzinfo=None)
self.assertEqual(test_time, prov_up_at)
self.assertEqual(fake_error, data['last_error'])
self.assertFalse(data['console_enabled'])
@mock.patch.object(timeutils, 'utcnow')
def test_node_states_by_name(self, mock_utcnow):
fake_state = 'fake-state'
fake_error = 'fake-error'
test_time = datetime.datetime(1971, 3, 9, 0, 0)
mock_utcnow.return_value = test_time
node = obj_utils.create_test_node(self.context,
name='eggs',
power_state=fake_state,
target_power_state=fake_state,
provision_state=fake_state,
target_provision_state=fake_state,
provision_updated_at=test_time,
last_error=fake_error)
data = self.get_json('/nodes/%s/states' % node.name,
headers={api_base.Version.string: "1.5"})
self.assertEqual(fake_state, data['power_state'])
self.assertEqual(fake_state, data['target_power_state'])
self.assertEqual(fake_state, data['provision_state'])
self.assertEqual(fake_state, data['target_provision_state'])
prov_up_at = timeutils.parse_isotime(
data['provision_updated_at']).replace(tzinfo=None)
self.assertEqual(test_time, prov_up_at)
self.assertEqual(fake_error, data['last_error'])
self.assertFalse(data['console_enabled'])
def test_node_by_instance_uuid(self):
node = obj_utils.create_test_node(
self.context,
uuid=uuidutils.generate_uuid(),
instance_uuid=uuidutils.generate_uuid())
instance_uuid = node.instance_uuid
data = self.get_json('/nodes?instance_uuid=%s' % instance_uuid,
headers={api_base.Version.string: "1.5"})
self.assertThat(data['nodes'], HasLength(1))
self.assertEqual(node['instance_uuid'],
data['nodes'][0]["instance_uuid"])
def test_node_by_instance_uuid_wrong_uuid(self):
obj_utils.create_test_node(
self.context, uuid=uuidutils.generate_uuid(),
instance_uuid=uuidutils.generate_uuid())
wrong_uuid = uuidutils.generate_uuid()
data = self.get_json('/nodes?instance_uuid=%s' % wrong_uuid)
self.assertThat(data['nodes'], HasLength(0))
def test_node_by_instance_uuid_invalid_uuid(self):
response = self.get_json('/nodes?instance_uuid=fake',
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
def test_associated_nodes_insensitive(self):
associated_nodes = (self
._create_association_test_nodes()
.get('associated'))
data = self.get_json('/nodes?associated=true')
data1 = self.get_json('/nodes?associated=True')
uuids = [n['uuid'] for n in data['nodes']]
uuids1 = [n['uuid'] for n in data1['nodes']]
self.assertEqual(sorted(associated_nodes), sorted(uuids1))
self.assertEqual(sorted(associated_nodes), sorted(uuids))
def test_associated_nodes_error(self):
self._create_association_test_nodes()
response = self.get_json('/nodes?associated=blah', expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_unassociated_nodes_insensitive(self):
unassociated_nodes = (self
._create_association_test_nodes()
.get('unassociated'))
data = self.get_json('/nodes?associated=false')
data1 = self.get_json('/nodes?associated=FALSE')
uuids = [n['uuid'] for n in data['nodes']]
uuids1 = [n['uuid'] for n in data1['nodes']]
self.assertEqual(sorted(unassociated_nodes), sorted(uuids1))
self.assertEqual(sorted(unassociated_nodes), sorted(uuids))
def test_unassociated_nodes_with_limit(self):
unassociated_nodes = (self
._create_association_test_nodes()
.get('unassociated'))
data = self.get_json('/nodes?associated=False&limit=2')
self.assertThat(data['nodes'], HasLength(2))
self.assertTrue(data['nodes'][0]['uuid'] in unassociated_nodes)
def test_next_link_with_association(self):
self._create_association_test_nodes()
data = self.get_json('/nodes/?limit=3&associated=True')
self.assertThat(data['nodes'], HasLength(3))
self.assertIn('associated=True', data['next'])
def test_detail_with_association_filter(self):
associated_nodes = (self
._create_association_test_nodes()
.get('associated'))
data = self.get_json('/nodes/detail?associated=true')
self.assertIn('driver', data['nodes'][0])
self.assertEqual(len(associated_nodes), len(data['nodes']))
def test_next_link_with_association_with_detail(self):
self._create_association_test_nodes()
data = self.get_json('/nodes/detail?limit=3&associated=true')
self.assertThat(data['nodes'], HasLength(3))
self.assertIn('driver', data['nodes'][0])
self.assertIn('associated=True', data['next'])
def test_detail_with_instance_uuid(self):
node = obj_utils.create_test_node(
self.context,
uuid=uuidutils.generate_uuid(),
instance_uuid=uuidutils.generate_uuid(),
chassis_id=self.chassis.id)
instance_uuid = node.instance_uuid
data = self.get_json('/nodes/detail?instance_uuid=%s' % instance_uuid)
self.assertEqual(node['instance_uuid'],
data['nodes'][0]["instance_uuid"])
self.assertIn('driver', data['nodes'][0])
self.assertIn('driver_info', data['nodes'][0])
self.assertIn('extra', data['nodes'][0])
self.assertIn('properties', data['nodes'][0])
self.assertIn('chassis_uuid', data['nodes'][0])
# never expose the chassis_id
self.assertNotIn('chassis_id', data['nodes'][0])
def test_maintenance_nodes(self):
nodes = []
for id in range(5):
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
maintenance=id % 2)
nodes.append(node)
data = self.get_json('/nodes?maintenance=true')
uuids = [n['uuid'] for n in data['nodes']]
test_uuids_1 = [n.uuid for n in nodes if n.maintenance]
self.assertEqual(sorted(test_uuids_1), sorted(uuids))
data = self.get_json('/nodes?maintenance=false')
uuids = [n['uuid'] for n in data['nodes']]
test_uuids_0 = [n.uuid for n in nodes if not n.maintenance]
self.assertEqual(sorted(test_uuids_0), sorted(uuids))
def test_maintenance_nodes_error(self):
response = self.get_json('/nodes?associated=true&maintenance=blah',
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_maintenance_nodes_associated(self):
self._create_association_test_nodes()
node = obj_utils.create_test_node(
self.context,
instance_uuid=uuidutils.generate_uuid(),
maintenance=True)
data = self.get_json('/nodes?associated=true&maintenance=false')
uuids = [n['uuid'] for n in data['nodes']]
self.assertNotIn(node.uuid, uuids)
data = self.get_json('/nodes?associated=true&maintenance=true')
uuids = [n['uuid'] for n in data['nodes']]
self.assertIn(node.uuid, uuids)
data = self.get_json('/nodes?associated=true&maintenance=TruE')
uuids = [n['uuid'] for n in data['nodes']]
self.assertIn(node.uuid, uuids)
def test_get_console_information(self):
node = obj_utils.create_test_node(self.context)
expected_console_info = {'test': 'test-data'}
expected_data = {'console_enabled': True,
'console_info': expected_console_info}
with mock.patch.object(rpcapi.ConductorAPI,
'get_console_information') as mock_gci:
mock_gci.return_value = expected_console_info
data = self.get_json('/nodes/%s/states/console' % node.uuid)
self.assertEqual(expected_data, data)
mock_gci.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'get_console_information')
def test_get_console_information_by_name(self, mock_gci):
node = obj_utils.create_test_node(self.context, name='spam')
expected_console_info = {'test': 'test-data'}
expected_data = {'console_enabled': True,
'console_info': expected_console_info}
mock_gci.return_value = expected_console_info
data = self.get_json('/nodes/%s/states/console' % node.name,
headers={api_base.Version.string: "1.5"})
self.assertEqual(expected_data, data)
mock_gci.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
def test_get_console_information_console_disabled(self):
node = obj_utils.create_test_node(self.context)
expected_data = {'console_enabled': False,
'console_info': None}
with mock.patch.object(rpcapi.ConductorAPI,
'get_console_information') as mock_gci:
mock_gci.side_effect = exception.NodeConsoleNotEnabled(
node=node.uuid)
data = self.get_json('/nodes/%s/states/console' % node.uuid)
self.assertEqual(expected_data, data)
mock_gci.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
def test_get_console_information_not_supported(self):
node = obj_utils.create_test_node(self.context)
with mock.patch.object(rpcapi.ConductorAPI,
'get_console_information') as mock_gci:
mock_gci.side_effect = exception.UnsupportedDriverExtension(
extension='console', driver='test-driver')
ret = self.get_json('/nodes/%s/states/console' % node.uuid,
expect_errors=True)
self.assertEqual(400, ret.status_code)
mock_gci.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'get_boot_device')
def test_get_boot_device(self, mock_gbd):
node = obj_utils.create_test_node(self.context)
expected_data = {'boot_device': boot_devices.PXE, 'persistent': True}
mock_gbd.return_value = expected_data
data = self.get_json('/nodes/%s/management/boot_device' % node.uuid)
self.assertEqual(expected_data, data)
mock_gbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'get_boot_device')
def test_get_boot_device_by_name(self, mock_gbd):
node = obj_utils.create_test_node(self.context, name='spam')
expected_data = {'boot_device': boot_devices.PXE, 'persistent': True}
mock_gbd.return_value = expected_data
data = self.get_json('/nodes/%s/management/boot_device' % node.name,
headers={api_base.Version.string: "1.5"})
self.assertEqual(expected_data, data)
mock_gbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'get_boot_device')
def test_get_boot_device_iface_not_supported(self, mock_gbd):
node = obj_utils.create_test_node(self.context)
mock_gbd.side_effect = exception.UnsupportedDriverExtension(
extension='management', driver='test-driver')
ret = self.get_json('/nodes/%s/management/boot_device' % node.uuid,
expect_errors=True)
self.assertEqual(400, ret.status_code)
self.assertTrue(ret.json['error_message'])
mock_gbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'get_supported_boot_devices')
def test_get_supported_boot_devices(self, mock_gsbd):
mock_gsbd.return_value = [boot_devices.PXE]
node = obj_utils.create_test_node(self.context)
data = self.get_json('/nodes/%s/management/boot_device/supported'
% node.uuid)
expected_data = {'supported_boot_devices': [boot_devices.PXE]}
self.assertEqual(expected_data, data)
mock_gsbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'get_supported_boot_devices')
def test_get_supported_boot_devices_by_name(self, mock_gsbd):
mock_gsbd.return_value = [boot_devices.PXE]
node = obj_utils.create_test_node(self.context, name='spam')
data = self.get_json(
'/nodes/%s/management/boot_device/supported' % node.name,
headers={api_base.Version.string: "1.5"})
expected_data = {'supported_boot_devices': [boot_devices.PXE]}
self.assertEqual(expected_data, data)
mock_gsbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'get_supported_boot_devices')
def test_get_supported_boot_devices_iface_not_supported(self, mock_gsbd):
node = obj_utils.create_test_node(self.context)
mock_gsbd.side_effect = exception.UnsupportedDriverExtension(
extension='management', driver='test-driver')
ret = self.get_json('/nodes/%s/management/boot_device/supported' %
node.uuid, expect_errors=True)
self.assertEqual(400, ret.status_code)
self.assertTrue(ret.json['error_message'])
mock_gsbd.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'validate_driver_interfaces')
def test_validate_by_uuid_using_deprecated_interface(self, mock_vdi):
# Note(mrda): The 'node_uuid' interface is deprecated in favour
# of the 'node' interface
node = obj_utils.create_test_node(self.context)
self.get_json('/nodes/validate?node_uuid=%s' % node.uuid)
mock_vdi.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'validate_driver_interfaces')
def test_validate_by_uuid(self, mock_vdi):
node = obj_utils.create_test_node(self.context)
self.get_json('/nodes/validate?node=%s' % node.uuid,
headers={api_base.Version.string: "1.5"})
mock_vdi.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'validate_driver_interfaces')
def test_validate_by_name_unsupported(self, mock_vdi):
node = obj_utils.create_test_node(self.context, name='spam')
ret = self.get_json('/nodes/validate?node=%s' % node.name,
expect_errors=True)
self.assertEqual(406, ret.status_code)
self.assertFalse(mock_vdi.called)
@mock.patch.object(rpcapi.ConductorAPI, 'validate_driver_interfaces')
def test_validate_by_name(self, mock_vdi):
node = obj_utils.create_test_node(self.context, name='spam')
self.get_json('/nodes/validate?node=%s' % node.name,
headers={api_base.Version.string: "1.5"})
# note that this should be node.uuid here as we get that from the
# rpc_node lookup and pass that downwards
mock_vdi.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
class TestPatch(test_api_base.FunctionalTest):
def setUp(self):
super(TestPatch, self).setUp()
self.chassis = obj_utils.create_test_chassis(self.context)
self.node = obj_utils.create_test_node(self.context, name='node-57',
chassis_id=self.chassis.id)
self.node_no_name = obj_utils.create_test_node(self.context,
uuid='deadbeef-0000-1111-2222-333333333333',
chassis_id=self.chassis.id)
p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
self.mock_gtf = p.start()
self.mock_gtf.return_value = 'test-topic'
self.addCleanup(p.stop)
p = mock.patch.object(rpcapi.ConductorAPI, 'update_node')
self.mock_update_node = p.start()
self.addCleanup(p.stop)
p = mock.patch.object(rpcapi.ConductorAPI, 'change_node_power_state')
self.mock_cnps = p.start()
self.addCleanup(p.stop)
def test_update_ok(self):
self.mock_update_node.return_value = self.node
(self
.mock_update_node
.return_value
.updated_at) = "2013-12-03T06:20:41.184720+00:00"
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/instance_uuid',
'value': 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
'op': 'replace'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.assertEqual(self.mock_update_node.return_value.updated_at,
timeutils.parse_isotime(response.json['updated_at']))
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_update_by_name_unsupported(self):
self.mock_update_node.return_value = self.node
(self
.mock_update_node
.return_value
.updated_at) = "2013-12-03T06:20:41.184720+00:00"
response = self.patch_json(
'/nodes/%s' % self.node.name,
[{'path': '/instance_uuid',
'value': 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
'op': 'replace'}],
expect_errors=True)
self.assertEqual(404, response.status_code)
self.assertFalse(self.mock_update_node.called)
def test_update_ok_by_name(self):
self.mock_update_node.return_value = self.node
(self
.mock_update_node
.return_value
.updated_at) = "2013-12-03T06:20:41.184720+00:00"
response = self.patch_json(
'/nodes/%s' % self.node.name,
[{'path': '/instance_uuid',
'value': 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
'op': 'replace'}],
headers={api_base.Version.string: "1.5"})
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.assertEqual(self.mock_update_node.return_value.updated_at,
timeutils.parse_isotime(response.json['updated_at']))
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_update_state(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'power_state': 'new state'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_update_fails_bad_driver_info(self):
fake_err = 'Fake Error Message'
self.mock_update_node.side_effect = exception.InvalidParameterValue(
fake_err)
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/driver_info/this',
'value': 'foo',
'op': 'add'},
{'path': '/driver_info/that',
'value': 'bar',
'op': 'add'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_update_fails_bad_driver(self):
self.mock_gtf.side_effect = exception.NoValidHost('Fake Error')
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/driver',
'value': 'bad-driver',
'op': 'replace'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
def test_update_fails_bad_state(self):
fake_err = 'Fake Power State'
self.mock_update_node.side_effect = exception.NodeInWrongPowerState(
node=self.node.uuid, pstate=fake_err)
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/instance_uuid',
'value': 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
'op': 'replace'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(409, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_add_ok(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/extra/foo',
'value': 'bar',
'op': 'add'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_add_root(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/instance_uuid',
'value': 'aaaaaaaa-1111-bbbb-2222-cccccccccccc',
'op': 'add'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_add_root_non_existent(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/foo', 'value': 'bar', 'op': 'add'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_remove_ok(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/extra',
'op': 'remove'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_remove_non_existent_property_fail(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/extra/non-existent', 'op': 'remove'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_update_state_in_progress(self):
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
target_power_state=states.POWER_OFF)
response = self.patch_json('/nodes/%s' % node.uuid,
[{'path': '/extra/foo', 'value': 'bar',
'op': 'add'}], expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(409, response.status_code)
self.assertTrue(response.json['error_message'])
def test_add_state_in_deployfail(self):
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
provision_state=states.DEPLOYFAIL,
target_provision_state=states.ACTIVE)
self.mock_update_node.return_value = node
response = self.patch_json('/nodes/%s' % node.uuid,
[{'path': '/extra/foo', 'value': 'bar',
'op': 'add'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_patch_ports_subresource(self):
response = self.patch_json('/nodes/%s/ports' % self.node.uuid,
[{'path': '/extra/foo', 'value': 'bar',
'op': 'add'}], expect_errors=True)
self.assertEqual(403, response.status_int)
def test_remove_uuid(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/uuid', 'op': 'remove'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_remove_instance_uuid_cleaning(self):
node = obj_utils.create_test_node(
self.context,
uuid=uuidutils.generate_uuid(),
provision_state=states.CLEANING,
target_provision_state=states.AVAILABLE)
self.mock_update_node.return_value = node
response = self.patch_json('/nodes/%s' % node.uuid,
[{'op': 'remove',
'path': '/instance_uuid'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_add_state_in_cleaning(self):
node = obj_utils.create_test_node(
self.context,
uuid=uuidutils.generate_uuid(),
provision_state=states.CLEANING,
target_provision_state=states.AVAILABLE)
self.mock_update_node.return_value = node
response = self.patch_json('/nodes/%s' % node.uuid,
[{'path': '/extra/foo', 'value': 'bar',
'op': 'add'}], expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(409, response.status_code)
self.assertTrue(response.json['error_message'])
def test_remove_mandatory_field(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/driver', 'op': 'remove'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_chassis_uuid(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/chassis_uuid',
'value': self.chassis.uuid,
'op': 'replace'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
def test_add_chassis_uuid(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/chassis_uuid',
'value': self.chassis.uuid,
'op': 'add'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
def test_add_chassis_id(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/chassis_id',
'value': '1',
'op': 'add'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_chassis_id(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/chassis_id',
'value': '1',
'op': 'replace'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_remove_chassis_id(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/chassis_id',
'op': 'remove'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_non_existent_chassis_uuid(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/chassis_uuid',
'value': 'eeeeeeee-dddd-cccc-bbbb-aaaaaaaaaaaa',
'op': 'replace'}], expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_remove_internal_field(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/last_error', 'op': 'remove'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_internal_field(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/power_state', 'op': 'replace',
'value': 'fake-state'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_maintenance(self):
self.mock_update_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/maintenance', 'op': 'replace',
'value': 'true'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_replace_maintenance_by_name(self):
self.mock_update_node.return_value = self.node
response = self.patch_json(
'/nodes/%s' % self.node.name,
[{'path': '/maintenance', 'op': 'replace',
'value': 'true'}],
headers={api_base.Version.string: "1.5"})
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
self.mock_update_node.assert_called_once_with(
mock.ANY, mock.ANY, 'test-topic')
def test_replace_consoled_enabled(self):
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/console_enabled',
'op': 'replace', 'value': True}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_replace_provision_updated_at(self):
test_time = '2000-01-01 00:00:00'
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/provision_updated_at',
'op': 'replace', 'value': test_time}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_patch_add_name_ok(self):
self.mock_update_node.return_value = self.node_no_name
test_name = 'guido-van-rossum'
response = self.patch_json('/nodes/%s' % self.node_no_name.uuid,
[{'path': '/name',
'op': 'add',
'value': test_name}],
headers={api_base.Version.string: "1.5"})
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
def test_patch_add_name_invalid(self):
self.mock_update_node.return_value = self.node_no_name
test_name = 'I-AM-INVALID'
response = self.patch_json('/nodes/%s' % self.node_no_name.uuid,
[{'path': '/name',
'op': 'add',
'value': test_name}],
headers={api_base.Version.string: "1.5"},
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_patch_name_replace_ok(self):
self.mock_update_node.return_value = self.node
test_name = 'guido-van-rossum'
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/name',
'op': 'replace',
'value': test_name}],
headers={api_base.Version.string: "1.5"})
self.assertEqual('application/json', response.content_type)
self.assertEqual(200, response.status_code)
def test_patch_add_replace_invalid(self):
self.mock_update_node.return_value = self.node_no_name
test_name = 'Guido Van Error'
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/name',
'op': 'replace',
'value': test_name}],
headers={api_base.Version.string: "1.5"},
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_patch_duplicate_name(self):
node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid())
test_name = "this-is-my-node"
self.mock_update_node.side_effect = exception.DuplicateName(test_name)
response = self.patch_json('/nodes/%s' % node.uuid,
[{'path': '/name',
'op': 'replace',
'value': test_name}],
headers={api_base.Version.string: "1.5"},
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(409, response.status_code)
self.assertTrue(response.json['error_message'])
@mock.patch.object(api_utils, 'get_rpc_node')
def test_patch_update_drive_console_enabled(self, mock_rpc_node):
self.node.console_enabled = True
mock_rpc_node.return_value = self.node
response = self.patch_json('/nodes/%s' % self.node.uuid,
[{'path': '/driver',
'value': 'foo',
'op': 'add'}],
expect_errors=True)
mock_rpc_node.assert_called_once_with(self.node.uuid)
self.assertEqual('application/json', response.content_type)
self.assertEqual(409, response.status_code)
self.assertTrue(response.json['error_message'])
class TestPost(test_api_base.FunctionalTest):
def setUp(self):
super(TestPost, self).setUp()
self.chassis = obj_utils.create_test_chassis(self.context)
p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
self.mock_gtf = p.start()
self.mock_gtf.return_value = 'test-topic'
self.addCleanup(p.stop)
@mock.patch.object(timeutils, 'utcnow')
def test_create_node(self, mock_utcnow):
ndict = test_api_utils.post_get_test_node()
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
response = self.post_json('/nodes', ndict)
self.assertEqual(201, response.status_int)
result = self.get_json('/nodes/%s' % ndict['uuid'])
self.assertEqual(ndict['uuid'], result['uuid'])
self.assertFalse(result['updated_at'])
return_created_at = timeutils.parse_isotime(
result['created_at']).replace(tzinfo=None)
self.assertEqual(test_time, return_created_at)
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/nodes/%s' % ndict['uuid']
self.assertEqual(urlparse.urlparse(response.location).path,
expected_location)
def test_create_node_doesnt_contain_id(self):
# FIXME(comstud): I'd like to make this test not use the
# dbapi, however, no matter what I do when trying to mock
# Node.create(), the API fails to convert the objects.Node
# into the API Node object correctly (it leaves all fields
# as Unset).
with mock.patch.object(self.dbapi, 'create_node',
wraps=self.dbapi.create_node) as cn_mock:
ndict = test_api_utils.post_get_test_node(extra={'foo': 123})
self.post_json('/nodes', ndict)
result = self.get_json('/nodes/%s' % ndict['uuid'])
self.assertEqual(ndict['extra'], result['extra'])
cn_mock.assert_called_once_with(mock.ANY)
# Check that 'id' is not in first arg of positional args
self.assertNotIn('id', cn_mock.call_args[0][0])
def _test_jsontype_attributes(self, attr_name):
kwargs = {attr_name: {'str': 'foo', 'int': 123, 'float': 0.1,
'bool': True, 'list': [1, 2], 'none': None,
'dict': {'cat': 'meow'}}}
ndict = test_api_utils.post_get_test_node(**kwargs)
self.post_json('/nodes', ndict)
result = self.get_json('/nodes/%s' % ndict['uuid'])
self.assertEqual(ndict[attr_name], result[attr_name])
def test_create_node_valid_extra(self):
self._test_jsontype_attributes('extra')
def test_create_node_valid_properties(self):
self._test_jsontype_attributes('properties')
def test_create_node_valid_driver_info(self):
self._test_jsontype_attributes('driver_info')
def test_create_node_valid_instance_info(self):
self._test_jsontype_attributes('instance_info')
def _test_vendor_passthru_ok(self, mock_vendor, return_value=None,
is_async=True):
expected_status = 202 if is_async else 200
expected_return_value = json.dumps(return_value)
if six.PY3:
expected_return_value = expected_return_value.encode('utf-8')
node = obj_utils.create_test_node(self.context)
info = {'foo': 'bar'}
mock_vendor.return_value = (return_value, is_async)
response = self.post_json('/nodes/%s/vendor_passthru/test' % node.uuid,
info)
mock_vendor.assert_called_once_with(
mock.ANY, node.uuid, 'test', 'POST', info, 'test-topic')
self.assertEqual(expected_return_value, response.body)
self.assertEqual(expected_status, response.status_code)
def _test_vendor_passthru_ok_by_name(self, mock_vendor, return_value=None,
is_async=True):
expected_status = 202 if is_async else 200
expected_return_value = json.dumps(return_value)
if six.PY3:
expected_return_value = expected_return_value.encode('utf-8')
node = obj_utils.create_test_node(self.context, name='node-109')
info = {'foo': 'bar'}
mock_vendor.return_value = (return_value, is_async)
response = self.post_json('/nodes/%s/vendor_passthru/test' % node.name,
info,
headers={api_base.Version.string: "1.5"})
mock_vendor.assert_called_once_with(
mock.ANY, node.uuid, 'test', 'POST', info, 'test-topic')
self.assertEqual(expected_return_value, response.body)
self.assertEqual(expected_status, response.status_code)
@mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
def test_vendor_passthru_async(self, mock_vendor):
self._test_vendor_passthru_ok(mock_vendor)
@mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
def test_vendor_passthru_sync(self, mock_vendor):
return_value = {'cat': 'meow'}
self._test_vendor_passthru_ok(mock_vendor, return_value=return_value,
is_async=False)
@mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
def test_vendor_passthru_put(self, mocked_vendor_passthru):
node = obj_utils.create_test_node(self.context)
return_value = (None, 'async')
mocked_vendor_passthru.return_value = return_value
response = self.put_json(
'/nodes/%s/vendor_passthru/do_test' % node.uuid,
{'test_key': 'test_value'})
self.assertEqual(202, response.status_int)
self.assertEqual(return_value[0], response.json)
@mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
def test_vendor_passthru_by_name(self, mock_vendor):
self._test_vendor_passthru_ok_by_name(mock_vendor)
@mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
def test_vendor_passthru_get(self, mocked_vendor_passthru):
node = obj_utils.create_test_node(self.context)
return_value = ('foo', 'sync')
mocked_vendor_passthru.return_value = return_value
response = self.get_json(
'/nodes/%s/vendor_passthru/do_test' % node.uuid)
self.assertEqual(return_value[0], response)
@mock.patch.object(rpcapi.ConductorAPI, 'vendor_passthru')
def test_vendor_passthru_delete(self, mock_vendor_passthru):
node = obj_utils.create_test_node(self.context)
return_value = (None, 'async')
mock_vendor_passthru.return_value = return_value
response = self.delete(
'/nodes/%s/vendor_passthru/do_test' % node.uuid)
self.assertEqual(202, response.status_int)
self.assertEqual(return_value[0], response.json)
def test_vendor_passthru_no_such_method(self):
node = obj_utils.create_test_node(self.context)
uuid = node.uuid
info = {'foo': 'bar'}
with mock.patch.object(
rpcapi.ConductorAPI, 'vendor_passthru') as mock_vendor:
mock_vendor.side_effect = exception.UnsupportedDriverExtension(
**{'driver': node.driver, 'node': uuid, 'extension': 'test'})
response = self.post_json('/nodes/%s/vendor_passthru/test' % uuid,
info, expect_errors=True)
mock_vendor.assert_called_once_with(
mock.ANY, uuid, 'test', 'POST', info, 'test-topic')
self.assertEqual(400, response.status_code)
def test_vendor_passthru_without_method(self):
node = obj_utils.create_test_node(self.context)
response = self.post_json('/nodes/%s/vendor_passthru' % node.uuid,
{'foo': 'bar'}, expect_errors=True)
self.assertEqual('application/json', response.content_type, )
self.assertEqual(400, response.status_code)
self.assertTrue(response.json['error_message'])
def test_post_ports_subresource(self):
node = obj_utils.create_test_node(self.context)
pdict = test_api_utils.port_post_data(node_id=None)
pdict['node_uuid'] = node.uuid
response = self.post_json('/nodes/ports', pdict,
expect_errors=True)
self.assertEqual(403, response.status_int)
def test_create_node_no_mandatory_field_driver(self):
ndict = test_api_utils.post_get_test_node()
del ndict['driver']
response = self.post_json('/nodes', ndict, expect_errors=True)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_create_node_invalid_driver(self):
ndict = test_api_utils.post_get_test_node()
self.mock_gtf.side_effect = exception.NoValidHost('Fake Error')
response = self.post_json('/nodes', ndict, expect_errors=True)
self.assertEqual(400, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_create_node_no_chassis_uuid(self):
ndict = test_api_utils.post_get_test_node()
del ndict['chassis_uuid']
response = self.post_json('/nodes', ndict)
self.assertEqual('application/json', response.content_type)
self.assertEqual(201, response.status_int)
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/nodes/%s' % ndict['uuid']
self.assertEqual(urlparse.urlparse(response.location).path,
expected_location)
def test_create_node_with_chassis_uuid(self):
ndict = test_api_utils.post_get_test_node(
chassis_uuid=self.chassis.uuid)
response = self.post_json('/nodes', ndict)
self.assertEqual('application/json', response.content_type)
self.assertEqual(201, response.status_int)
result = self.get_json('/nodes/%s' % ndict['uuid'])
self.assertEqual(ndict['chassis_uuid'], result['chassis_uuid'])
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/nodes/%s' % ndict['uuid']
self.assertEqual(urlparse.urlparse(response.location).path,
expected_location)
def test_create_node_chassis_uuid_not_found(self):
ndict = test_api_utils.post_get_test_node(
chassis_uuid='1a1a1a1a-2b2b-3c3c-4d4d-5e5e5e5e5e5e')
response = self.post_json('/nodes', ndict, expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_int)
self.assertTrue(response.json['error_message'])
def test_create_node_with_internal_field(self):
ndict = test_api_utils.post_get_test_node()
ndict['reservation'] = 'fake'
response = self.post_json('/nodes', ndict, expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(400, response.status_int)
self.assertTrue(response.json['error_message'])
@mock.patch.object(rpcapi.ConductorAPI, 'get_node_vendor_passthru_methods')
def test_vendor_passthru_methods(self, get_methods_mock):
return_value = {'foo': 'bar'}
get_methods_mock.return_value = return_value
node = obj_utils.create_test_node(self.context)
path = '/nodes/%s/vendor_passthru/methods' % node.uuid
data = self.get_json(path)
self.assertEqual(return_value, data)
get_methods_mock.assert_called_once_with(mock.ANY, node.uuid,
topic=mock.ANY)
# Now let's test the cache: Reset the mock
get_methods_mock.reset_mock()
# Call it again
data = self.get_json(path)
self.assertEqual(return_value, data)
# Assert RPC method wasn't called this time
self.assertFalse(get_methods_mock.called)
class TestDelete(test_api_base.FunctionalTest):
def setUp(self):
super(TestDelete, self).setUp()
p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
self.mock_gtf = p.start()
self.mock_gtf.return_value = 'test-topic'
self.addCleanup(p.stop)
@mock.patch.object(rpcapi.ConductorAPI, 'destroy_node')
def test_delete_node(self, mock_dn):
node = obj_utils.create_test_node(self.context)
self.delete('/nodes/%s' % node.uuid)
mock_dn.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'destroy_node')
def test_delete_node_by_name_unsupported(self, mock_dn):
node = obj_utils.create_test_node(self.context, name='foo')
self.delete('/nodes/%s' % node.name,
expect_errors=True)
self.assertFalse(mock_dn.called)
@mock.patch.object(rpcapi.ConductorAPI, 'destroy_node')
def test_delete_node_by_name(self, mock_dn):
node = obj_utils.create_test_node(self.context, name='foo')
self.delete('/nodes/%s' % node.name,
headers={api_base.Version.string: "1.5"})
mock_dn.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(objects.Node, 'get_by_uuid')
def test_delete_node_not_found(self, mock_gbu):
node = obj_utils.get_test_node(self.context)
mock_gbu.side_effect = exception.NodeNotFound(node=node.uuid)
response = self.delete('/nodes/%s' % node.uuid, expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
mock_gbu.assert_called_once_with(mock.ANY, node.uuid)
@mock.patch.object(objects.Node, 'get_by_name')
def test_delete_node_not_found_by_name_unsupported(self, mock_gbn):
node = obj_utils.get_test_node(self.context, name='foo')
mock_gbn.side_effect = exception.NodeNotFound(node=node.name)
response = self.delete('/nodes/%s' % node.name,
expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertFalse(mock_gbn.called)
@mock.patch.object(objects.Node, 'get_by_name')
def test_delete_node_not_found_by_name(self, mock_gbn):
node = obj_utils.get_test_node(self.context, name='foo')
mock_gbn.side_effect = exception.NodeNotFound(node=node.name)
response = self.delete('/nodes/%s' % node.name,
headers={api_base.Version.string: "1.5"},
expect_errors=True)
self.assertEqual(404, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
mock_gbn.assert_called_once_with(mock.ANY, node.name)
def test_delete_ports_subresource(self):
node = obj_utils.create_test_node(self.context)
response = self.delete('/nodes/%s/ports' % node.uuid,
expect_errors=True)
self.assertEqual(403, response.status_int)
@mock.patch.object(rpcapi.ConductorAPI, 'destroy_node')
def test_delete_associated(self, mock_dn):
node = obj_utils.create_test_node(
self.context,
instance_uuid='aaaaaaaa-1111-bbbb-2222-cccccccccccc')
mock_dn.side_effect = exception.NodeAssociated(node=node.uuid,
instance=node.instance_uuid)
response = self.delete('/nodes/%s' % node.uuid, expect_errors=True)
self.assertEqual(409, response.status_int)
mock_dn.assert_called_once_with(mock.ANY, node.uuid, 'test-topic')
@mock.patch.object(objects.Node, 'get_by_uuid')
@mock.patch.object(rpcapi.ConductorAPI, 'update_node')
def test_delete_node_maintenance_mode(self, mock_update, mock_get):
node = obj_utils.create_test_node(self.context, maintenance=True,
maintenance_reason='blah')
mock_get.return_value = node
response = self.delete('/nodes/%s/maintenance' % node.uuid)
self.assertEqual(202, response.status_int)
self.assertEqual(b'', response.body)
self.assertEqual(False, node.maintenance)
self.assertEqual(None, node.maintenance_reason)
mock_get.assert_called_once_with(mock.ANY, node.uuid)
mock_update.assert_called_once_with(mock.ANY, mock.ANY,
topic='test-topic')
@mock.patch.object(objects.Node, 'get_by_name')
@mock.patch.object(rpcapi.ConductorAPI, 'update_node')
def test_delete_node_maintenance_mode_by_name(self, mock_update,
mock_get):
node = obj_utils.create_test_node(self.context, maintenance=True,
maintenance_reason='blah',
name='foo')
mock_get.return_value = node
response = self.delete('/nodes/%s/maintenance' % node.name,
headers={api_base.Version.string: "1.5"})
self.assertEqual(202, response.status_int)
self.assertEqual(b'', response.body)
self.assertEqual(False, node.maintenance)
self.assertEqual(None, node.maintenance_reason)
mock_get.assert_called_once_with(mock.ANY, node.name)
mock_update.assert_called_once_with(mock.ANY, mock.ANY,
topic='test-topic')
class TestPut(test_api_base.FunctionalTest):
def setUp(self):
super(TestPut, self).setUp()
self.node = obj_utils.create_test_node(self.context,
provision_state=states.AVAILABLE, name='node-39')
p = mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for')
self.mock_gtf = p.start()
self.mock_gtf.return_value = 'test-topic'
self.addCleanup(p.stop)
p = mock.patch.object(rpcapi.ConductorAPI, 'change_node_power_state')
self.mock_cnps = p.start()
self.addCleanup(p.stop)
p = mock.patch.object(rpcapi.ConductorAPI, 'do_node_deploy')
self.mock_dnd = p.start()
self.addCleanup(p.stop)
p = mock.patch.object(rpcapi.ConductorAPI, 'do_node_tear_down')
self.mock_dntd = p.start()
self.addCleanup(p.stop)
p = mock.patch.object(rpcapi.ConductorAPI, 'inspect_hardware')
self.mock_dnih = p.start()
self.addCleanup(p.stop)
def test_power_state(self):
response = self.put_json('/nodes/%s/states/power' % self.node.uuid,
{'target': states.POWER_ON})
self.assertEqual(202, response.status_code)
self.assertEqual(b'', response.body)
self.mock_cnps.assert_called_once_with(mock.ANY,
self.node.uuid,
states.POWER_ON,
'test-topic')
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/nodes/%s/states' % self.node.uuid
self.assertEqual(urlparse.urlparse(response.location).path,
expected_location)
def test_power_state_by_name_unsupported(self):
response = self.put_json('/nodes/%s/states/power' % self.node.name,
{'target': states.POWER_ON},
expect_errors=True)
self.assertEqual(404, response.status_code)
def test_power_state_by_name(self):
response = self.put_json('/nodes/%s/states/power' % self.node.name,
{'target': states.POWER_ON},
headers={api_base.Version.string: "1.5"})
self.assertEqual(202, response.status_code)
self.assertEqual(b'', response.body)
self.mock_cnps.assert_called_once_with(mock.ANY,
self.node.uuid,
states.POWER_ON,
'test-topic')
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/nodes/%s/states' % self.node.name
self.assertEqual(urlparse.urlparse(response.location).path,
expected_location)
def test_power_invalid_state_request(self):
ret = self.put_json('/nodes/%s/states/power' % self.node.uuid,
{'target': 'not-supported'}, expect_errors=True)
self.assertEqual(400, ret.status_code)
def test_power_change_during_cleaning(self):
self.node.provision_state = states.CLEANING
self.node.save()
ret = self.put_json('/nodes/%s/states/power' % self.node.uuid,
{'target': states.POWER_OFF}, expect_errors=True)
self.assertEqual(400, ret.status_code)
def test_provision_invalid_state_request(self):
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': 'not-supported'}, expect_errors=True)
self.assertEqual(400, ret.status_code)
def test_provision_with_deploy(self):
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': states.ACTIVE})
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
self.mock_dnd.assert_called_once_with(
mock.ANY, self.node.uuid, False, None, 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states' % self.node.uuid
self.assertEqual(urlparse.urlparse(ret.location).path,
expected_location)
def test_provision_by_name_unsupported(self):
ret = self.put_json('/nodes/%s/states/provision' % self.node.name,
{'target': states.ACTIVE},
expect_errors=True)
self.assertEqual(404, ret.status_code)
def test_provision_by_name(self):
ret = self.put_json('/nodes/%s/states/provision' % self.node.name,
{'target': states.ACTIVE},
headers={api_base.Version.string: "1.5"})
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
self.mock_dnd.assert_called_once_with(
mock.ANY, self.node.uuid, False, None, 'test-topic')
def test_provision_with_deploy_configdrive(self):
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': states.ACTIVE, 'configdrive': 'foo'})
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
self.mock_dnd.assert_called_once_with(
mock.ANY, self.node.uuid, False, 'foo', 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states' % self.node.uuid
self.assertEqual(urlparse.urlparse(ret.location).path,
expected_location)
def test_provision_with_configdrive_not_active(self):
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': states.DELETED, 'configdrive': 'foo'},
expect_errors=True)
self.assertEqual(400, ret.status_code)
def test_provision_with_tear_down(self):
node = self.node
node.provision_state = states.ACTIVE
node.target_provision_state = states.NOSTATE
node.save()
ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
{'target': states.DELETED})
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
self.mock_dntd.assert_called_once_with(
mock.ANY, node.uuid, 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states' % node.uuid
self.assertEqual(urlparse.urlparse(ret.location).path,
expected_location)
def test_provision_already_in_progress(self):
node = self.node
node.provision_state = states.DEPLOYING
node.target_provision_state = states.ACTIVE
node.reservation = 'fake-host'
node.save()
ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
{'target': states.ACTIVE},
expect_errors=True)
self.assertEqual(409, ret.status_code) # Conflict
def test_provision_with_tear_down_in_progress_deploywait(self):
node = self.node
node.provision_state = states.DEPLOYWAIT
node.target_provision_state = states.ACTIVE
node.save()
ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
{'target': states.DELETED})
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
self.mock_dntd.assert_called_once_with(
mock.ANY, node.uuid, 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states' % node.uuid
self.assertEqual(urlparse.urlparse(ret.location).path,
expected_location)
# NOTE(deva): this test asserts API funcionality which is not part of
# the new-ironic-state-machine in Kilo. It is retained for backwards
# compatibility with Juno.
# TODO(deva): add a deprecation-warning to the REST result
# and check for it here.
def test_provision_with_deploy_after_deployfail(self):
node = self.node
node.provision_state = states.DEPLOYFAIL
node.target_provision_state = states.ACTIVE
node.save()
ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
{'target': states.ACTIVE})
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
self.mock_dnd.assert_called_once_with(
mock.ANY, node.uuid, False, None, 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states' % node.uuid
self.assertEqual(expected_location,
urlparse.urlparse(ret.location).path)
def test_provision_already_in_state(self):
self.node.provision_state = states.ACTIVE
self.node.save()
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': states.ACTIVE},
expect_errors=True)
self.assertEqual(400, ret.status_code)
def test_manage_raises_error_before_1_2(self):
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': states.VERBS['manage']},
headers={},
expect_errors=True)
self.assertEqual(406, ret.status_code)
@mock.patch.object(rpcapi.ConductorAPI, 'do_provisioning_action')
def test_provide_from_manage(self, mock_dpa):
self.node.provision_state = states.MANAGEABLE
self.node.save()
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': states.VERBS['provide']},
headers={api_base.Version.string: "1.4"})
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
mock_dpa.assert_called_once_with(mock.ANY, self.node.uuid,
states.VERBS['provide'],
'test-topic')
def test_inspect_already_in_progress(self):
node = self.node
node.provision_state = states.INSPECTING
node.target_provision_state = states.MANAGEABLE
node.reservation = 'fake-host'
node.save()
ret = self.put_json('/nodes/%s/states/provision' % node.uuid,
{'target': states.MANAGEABLE},
expect_errors=True)
self.assertEqual(409, ret.status_code) # Conflict
@mock.patch.object(rpcapi.ConductorAPI, 'do_provisioning_action')
def test_manage_from_available(self, mock_dpa):
self.node.provision_state = states.AVAILABLE
self.node.save()
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': states.VERBS['manage']},
headers={api_base.Version.string: "1.4"})
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
mock_dpa.assert_called_once_with(mock.ANY, self.node.uuid,
states.VERBS['manage'],
'test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'do_provisioning_action')
def test_bad_requests_in_managed_state(self, mock_dpa):
self.node.provision_state = states.MANAGEABLE
self.node.save()
for state in [states.ACTIVE, states.REBUILD, states.DELETED]:
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': states.ACTIVE},
expect_errors=True)
self.assertEqual(400, ret.status_code)
self.assertEqual(0, mock_dpa.call_count)
def test_set_console_mode_enabled(self):
with mock.patch.object(rpcapi.ConductorAPI,
'set_console_mode') as mock_scm:
ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
{'enabled': "true"})
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
mock_scm.assert_called_once_with(mock.ANY, self.node.uuid,
True, 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states/console' % self.node.uuid
self.assertEqual(urlparse.urlparse(ret.location).path,
expected_location)
@mock.patch.object(rpcapi.ConductorAPI, 'set_console_mode')
def test_set_console_by_name_unsupported(self, mock_scm):
ret = self.put_json('/nodes/%s/states/console' % self.node.name,
{'enabled': "true"},
expect_errors=True)
self.assertEqual(404, ret.status_code)
@mock.patch.object(rpcapi.ConductorAPI, 'set_console_mode')
def test_set_console_by_name(self, mock_scm):
ret = self.put_json('/nodes/%s/states/console' % self.node.name,
{'enabled': "true"},
headers={api_base.Version.string: "1.5"})
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
mock_scm.assert_called_once_with(mock.ANY, self.node.uuid,
True, 'test-topic')
def test_set_console_mode_disabled(self):
with mock.patch.object(rpcapi.ConductorAPI,
'set_console_mode') as mock_scm:
ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
{'enabled': "false"})
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
mock_scm.assert_called_once_with(mock.ANY, self.node.uuid,
False, 'test-topic')
# Check location header
self.assertIsNotNone(ret.location)
expected_location = '/v1/nodes/%s/states/console' % self.node.uuid
self.assertEqual(urlparse.urlparse(ret.location).path,
expected_location)
def test_set_console_mode_bad_request(self):
with mock.patch.object(rpcapi.ConductorAPI,
'set_console_mode') as mock_scm:
ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
{'enabled': "invalid-value"},
expect_errors=True)
self.assertEqual(400, ret.status_code)
# assert set_console_mode wasn't called
assert not mock_scm.called
def test_set_console_mode_bad_request_missing_parameter(self):
with mock.patch.object(rpcapi.ConductorAPI,
'set_console_mode') as mock_scm:
ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
{}, expect_errors=True)
self.assertEqual(400, ret.status_code)
# assert set_console_mode wasn't called
assert not mock_scm.called
def test_set_console_mode_console_not_supported(self):
with mock.patch.object(rpcapi.ConductorAPI,
'set_console_mode') as mock_scm:
mock_scm.side_effect = exception.UnsupportedDriverExtension(
extension='console', driver='test-driver')
ret = self.put_json('/nodes/%s/states/console' % self.node.uuid,
{'enabled': "true"}, expect_errors=True)
self.assertEqual(400, ret.status_code)
mock_scm.assert_called_once_with(mock.ANY, self.node.uuid,
True, 'test-topic')
def test_provision_node_in_maintenance_fail(self):
self.node.maintenance = True
self.node.save()
ret = self.put_json('/nodes/%s/states/provision' % self.node.uuid,
{'target': states.ACTIVE},
expect_errors=True)
self.assertEqual(400, ret.status_code)
self.assertTrue(ret.json['error_message'])
@mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
def test_set_boot_device(self, mock_sbd):
device = boot_devices.PXE
ret = self.put_json('/nodes/%s/management/boot_device'
% self.node.uuid, {'boot_device': device})
self.assertEqual(204, ret.status_code)
self.assertEqual(b'', ret.body)
mock_sbd.assert_called_once_with(mock.ANY, self.node.uuid,
device, persistent=False,
topic='test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
def test_set_boot_device_by_name(self, mock_sbd):
device = boot_devices.PXE
ret = self.put_json('/nodes/%s/management/boot_device'
% self.node.name, {'boot_device': device},
headers={api_base.Version.string: "1.5"})
self.assertEqual(204, ret.status_code)
self.assertEqual(b'', ret.body)
mock_sbd.assert_called_once_with(mock.ANY, self.node.uuid,
device, persistent=False,
topic='test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
def test_set_boot_device_not_supported(self, mock_sbd):
mock_sbd.side_effect = exception.UnsupportedDriverExtension(
extension='management', driver='test-driver')
device = boot_devices.PXE
ret = self.put_json('/nodes/%s/management/boot_device'
% self.node.uuid, {'boot_device': device},
expect_errors=True)
self.assertEqual(400, ret.status_code)
self.assertTrue(ret.json['error_message'])
mock_sbd.assert_called_once_with(mock.ANY, self.node.uuid,
device, persistent=False,
topic='test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
def test_set_boot_device_persistent(self, mock_sbd):
device = boot_devices.PXE
ret = self.put_json('/nodes/%s/management/boot_device?persistent=True'
% self.node.uuid, {'boot_device': device})
self.assertEqual(204, ret.status_code)
self.assertEqual(b'', ret.body)
mock_sbd.assert_called_once_with(mock.ANY, self.node.uuid,
device, persistent=True,
topic='test-topic')
@mock.patch.object(rpcapi.ConductorAPI, 'set_boot_device')
def test_set_boot_device_persistent_invalid_value(self, mock_sbd):
device = boot_devices.PXE
ret = self.put_json('/nodes/%s/management/boot_device?persistent=blah'
% self.node.uuid, {'boot_device': device},
expect_errors=True)
self.assertEqual('application/json', ret.content_type)
self.assertEqual(400, ret.status_code)
def _test_set_node_maintenance_mode(self, mock_update, mock_get, reason,
node_ident, is_by_name=False):
request_body = {}
if reason:
request_body['reason'] = reason
self.node.maintenance = False
mock_get.return_value = self.node
if is_by_name:
headers = {api_base.Version.string: "1.5"}
else:
headers = {}
ret = self.put_json('/nodes/%s/maintenance' % node_ident,
request_body, headers=headers)
self.assertEqual(202, ret.status_code)
self.assertEqual(b'', ret.body)
self.assertEqual(True, self.node.maintenance)
self.assertEqual(reason, self.node.maintenance_reason)
mock_get.assert_called_once_with(mock.ANY, node_ident)
mock_update.assert_called_once_with(mock.ANY, mock.ANY,
topic='test-topic')
@mock.patch.object(objects.Node, 'get_by_uuid')
@mock.patch.object(rpcapi.ConductorAPI, 'update_node')
def test_set_node_maintenance_mode(self, mock_update, mock_get):
self._test_set_node_maintenance_mode(mock_update, mock_get,
'fake_reason', self.node.uuid)
@mock.patch.object(objects.Node, 'get_by_uuid')
@mock.patch.object(rpcapi.ConductorAPI, 'update_node')
def test_set_node_maintenance_mode_no_reason(self, mock_update, mock_get):
self._test_set_node_maintenance_mode(mock_update, mock_get, None,
self.node.uuid)
@mock.patch.object(objects.Node, 'get_by_name')
@mock.patch.object(rpcapi.ConductorAPI, 'update_node')
def test_set_node_maintenance_mode_by_name(self, mock_update, mock_get):
self._test_set_node_maintenance_mode(mock_update, mock_get,
'fake_reason', self.node.name,
is_by_name=True)
@mock.patch.object(objects.Node, 'get_by_name')
@mock.patch.object(rpcapi.ConductorAPI, 'update_node')
def test_set_node_maintenance_mode_no_reason_by_name(self, mock_update,
mock_get):
self._test_set_node_maintenance_mode(mock_update, mock_get, None,
self.node.name, is_by_name=True)
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
if sys.platform == 'win32':
import wmi
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.virt.hyperv import vmutils
from nova.virt.hyperv import volumeutilsv2
LOG = logging.getLogger(__name__)
class LiveMigrationUtils(object):
def __init__(self):
self._vmutils = vmutils.VMUtils()
self._volutils = volumeutilsv2.VolumeUtilsV2()
def _get_conn_v2(self, host='localhost'):
try:
return wmi.WMI(moniker='//%s/root/virtualization/v2' % host)
except wmi.x_wmi as ex:
LOG.exception(ex)
if ex.com_error.hresult == -2147217394:
msg = (_('Live migration is not supported on target host "%s"')
% host)
elif ex.com_error.hresult == -2147023174:
msg = (_('Target live migration host "%s" is unreachable')
% host)
else:
msg = _('Live migration failed: %s') % ex.message
raise vmutils.HyperVException(msg)
def check_live_migration_config(self):
conn_v2 = self._get_conn_v2()
migration_svc = conn_v2.Msvm_VirtualSystemMigrationService()[0]
vsmssds = migration_svc.associators(
wmi_association_class='Msvm_ElementSettingData',
wmi_result_class='Msvm_VirtualSystemMigrationServiceSettingData')
vsmssd = vsmssds[0]
if not vsmssd.EnableVirtualSystemMigration:
raise vmutils.HyperVException(
_('Live migration is not enabled on this host'))
if not migration_svc.MigrationServiceListenerIPAddressList:
raise vmutils.HyperVException(
_('Live migration networks are not configured on this host'))
def _get_vm(self, conn_v2, vm_name):
vms = conn_v2.Msvm_ComputerSystem(ElementName=vm_name)
n = len(vms)
if not n:
raise exception.NotFound(_('VM not found: %s') % vm_name)
elif n > 1:
raise vmutils.HyperVException(_('Duplicate VM name found: %s')
% vm_name)
return vms[0]
def _destroy_planned_vm(self, conn_v2_remote, planned_vm):
LOG.debug(_("Destroying existing remote planned VM: %s"),
planned_vm.ElementName)
vs_man_svc = conn_v2_remote.Msvm_VirtualSystemManagementService()[0]
(job_path, ret_val) = vs_man_svc.DestroySystem(planned_vm.path_())
self._vmutils.check_ret_val(ret_val, job_path)
def _check_existing_planned_vm(self, conn_v2_remote, vm):
# Make sure that there's not yet a remote planned VM on the target
# host for this VM
planned_vms = conn_v2_remote.Msvm_PlannedComputerSystem(Name=vm.Name)
if planned_vms:
self._destroy_planned_vm(conn_v2_remote, planned_vms[0])
def _create_remote_planned_vm(self, conn_v2_local, conn_v2_remote,
vm, rmt_ip_addr_list, dest_host):
# Staged
vsmsd = conn_v2_local.query("select * from "
"Msvm_VirtualSystemMigrationSettingData "
"where MigrationType = 32770")[0]
vsmsd.DestinationIPAddressList = rmt_ip_addr_list
migration_setting_data = vsmsd.GetText_(1)
LOG.debug(_("Creating remote planned VM for VM: %s"),
vm.ElementName)
migr_svc = conn_v2_local.Msvm_VirtualSystemMigrationService()[0]
(job_path, ret_val) = migr_svc.MigrateVirtualSystemToHost(
ComputerSystem=vm.path_(),
DestinationHost=dest_host,
MigrationSettingData=migration_setting_data)
self._vmutils.check_ret_val(ret_val, job_path)
return conn_v2_remote.Msvm_PlannedComputerSystem(Name=vm.Name)[0]
def _get_physical_disk_paths(self, vm_name):
ide_ctrl_path = self._vmutils.get_vm_ide_controller(vm_name, 0)
ide_paths = self._vmutils.get_controller_volume_paths(ide_ctrl_path)
scsi_ctrl_path = self._vmutils.get_vm_scsi_controller(vm_name)
scsi_paths = self._vmutils.get_controller_volume_paths(scsi_ctrl_path)
return dict(ide_paths.items() + scsi_paths.items())
def _get_remote_disk_data(self, vmutils_remote, disk_paths, dest_host):
volutils_remote = volumeutilsv2.VolumeUtilsV2(dest_host)
disk_paths_remote = {}
iscsi_targets = []
for (rasd_rel_path, disk_path) in disk_paths.items():
(target_iqn,
target_lun) = self._volutils.get_target_from_disk_path(disk_path)
iscsi_targets.append((target_iqn, target_lun))
dev_num = volutils_remote.get_device_number_for_target(target_iqn,
target_lun)
disk_path_remote = vmutils_remote.get_mounted_disk_by_drive_number(
dev_num)
disk_paths_remote[rasd_rel_path] = disk_path_remote
return (disk_paths_remote, iscsi_targets)
def _update_planned_vm_disk_resources(self, vmutils_remote, conn_v2_remote,
planned_vm, vm_name,
disk_paths_remote):
vm_settings = planned_vm.associators(
wmi_association_class='Msvm_SettingsDefineState',
wmi_result_class='Msvm_VirtualSystemSettingData')[0]
updated_resource_setting_data = []
sasds = vm_settings.associators(
wmi_association_class='Msvm_VirtualSystemSettingDataComponent')
for sasd in sasds:
if (sasd.ResourceType == 17 and sasd.ResourceSubType ==
"Microsoft:Hyper-V:Physical Disk Drive" and
sasd.HostResource):
# Replace the local disk target with the correct remote one
old_disk_path = sasd.HostResource[0]
new_disk_path = disk_paths_remote.pop(sasd.path().RelPath)
LOG.debug(_("Replacing host resource "
"%(old_disk_path)s with "
"%(new_disk_path)s on planned VM %(vm_name)s"),
{'old_disk_path': old_disk_path,
'new_disk_path': new_disk_path,
'vm_name': vm_name})
sasd.HostResource = [new_disk_path]
updated_resource_setting_data.append(sasd.GetText_(1))
LOG.debug(_("Updating remote planned VM disk paths for VM: %s"),
vm_name)
vsmsvc = conn_v2_remote.Msvm_VirtualSystemManagementService()[0]
(res_settings, job_path, ret_val) = vsmsvc.ModifyResourceSettings(
ResourceSettings=updated_resource_setting_data)
vmutils_remote.check_ret_val(ret_val, job_path)
def _get_vhd_setting_data(self, vm):
vm_settings = vm.associators(
wmi_association_class='Msvm_SettingsDefineState',
wmi_result_class='Msvm_VirtualSystemSettingData')[0]
new_resource_setting_data = []
sasds = vm_settings.associators(
wmi_association_class='Msvm_VirtualSystemSettingDataComponent',
wmi_result_class='Msvm_StorageAllocationSettingData')
for sasd in sasds:
if (sasd.ResourceType == 31 and sasd.ResourceSubType ==
"Microsoft:Hyper-V:Virtual Hard Disk"):
#sasd.PoolId = ""
new_resource_setting_data.append(sasd.GetText_(1))
return new_resource_setting_data
def _live_migrate_vm(self, conn_v2_local, vm, planned_vm, rmt_ip_addr_list,
new_resource_setting_data, dest_host):
# VirtualSystemAndStorage
vsmsd = conn_v2_local.query("select * from "
"Msvm_VirtualSystemMigrationSettingData "
"where MigrationType = 32771")[0]
vsmsd.DestinationIPAddressList = rmt_ip_addr_list
if planned_vm:
vsmsd.DestinationPlannedVirtualSystemId = planned_vm.Name
migration_setting_data = vsmsd.GetText_(1)
migr_svc = conn_v2_local.Msvm_VirtualSystemMigrationService()[0]
LOG.debug(_("Starting live migration for VM: %s"), vm.ElementName)
(job_path, ret_val) = migr_svc.MigrateVirtualSystemToHost(
ComputerSystem=vm.path_(),
DestinationHost=dest_host,
MigrationSettingData=migration_setting_data,
NewResourceSettingData=new_resource_setting_data)
self._vmutils.check_ret_val(ret_val, job_path)
def _get_remote_ip_address_list(self, conn_v2_remote, dest_host):
LOG.debug(_("Getting live migration networks for remote host: %s"),
dest_host)
migr_svc_rmt = conn_v2_remote.Msvm_VirtualSystemMigrationService()[0]
return migr_svc_rmt.MigrationServiceListenerIPAddressList
def live_migrate_vm(self, vm_name, dest_host):
self.check_live_migration_config()
conn_v2_local = self._get_conn_v2()
conn_v2_remote = self._get_conn_v2(dest_host)
vm = self._get_vm(conn_v2_local, vm_name)
self._check_existing_planned_vm(conn_v2_remote, vm)
rmt_ip_addr_list = self._get_remote_ip_address_list(conn_v2_remote,
dest_host)
iscsi_targets = []
planned_vm = None
disk_paths = self._get_physical_disk_paths(vm_name)
if disk_paths:
vmutils_remote = vmutils.VMUtils(dest_host)
(disk_paths_remote,
iscsi_targets) = self._get_remote_disk_data(vmutils_remote,
disk_paths,
dest_host)
planned_vm = self._create_remote_planned_vm(conn_v2_local,
conn_v2_remote,
vm, rmt_ip_addr_list,
dest_host)
self._update_planned_vm_disk_resources(vmutils_remote,
conn_v2_remote, planned_vm,
vm_name, disk_paths_remote)
new_resource_setting_data = self._get_vhd_setting_data(vm)
self._live_migrate_vm(conn_v2_local, vm, planned_vm, rmt_ip_addr_list,
new_resource_setting_data, dest_host)
# In case the caller wants to log off the targets after migration
return iscsi_targets
|
|
######################################################################
# Software License Agreement (BSD License)
#
# Copyright (c) 2010, Rice University
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the Rice University nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
######################################################################
# Author: Mark Moll
import logging
from os.path import exists, join, isdir
from os import getenv
import subprocess
from sys import platform
from pygccxml import declarations
from pyplusplus import module_builder, messages
from pyplusplus.module_builder import call_policies
from pyplusplus.decl_wrappers import print_declarations
# disable some warnings that are mostly harmless
messages.disable( messages.W1014 )
messages.disable( messages.W1023 )
messages.disable( messages.W1040 )
# create a dictionary called "default_replacement" that can be used to replace method
# declaration with another method. The dictionary is indexed by method name,
# either with or without full scope. For each method there is a 2-tuple. The
# first component is a string that tells Boost.Python how to export the
# replacement method. The second component is a string that contains the C++
# code for the replacement method.
default_replacement = {}
# A C++ call like "foo.print(std::cout)" will be replaced with something more
# pythonesque using the special __str__ method: "print(foo)"
default_replacement['print'] = ('def("__str__", &__str__)', """
std::string __str__(%s* obj)
{
std::ostringstream s;
obj->print(s);
return s.str();
}
""")
# A C++ call like "foo.printSettings(std::cout)" will be replaced with
# something more pythonesque: "print(foo.settings())"
default_replacement['printSettings'] = ('def("settings", &__printSettings)', """
std::string __printSettings(%s* obj)
{
std::ostringstream s;
obj->printSettings(s);
return s.str();
}
""")
# add array read access to objects that can return a const reference using the
# [] operator.
default_replacement['array_read_access'] = ('def("__getitem__", &__getitem)',"""
%s __getitem(%s* obj, unsigned int i)
{
return (*obj)[i];
}
""")
# add array write access to objects that can return a non-const reference
# using the [] operator.
default_replacement['array_write_access'] = ('def("__setitem__", &__setitem)',"""
void __setitem(%s* obj, unsigned int i, const %s& val)
{
(*obj)[i] = val;
}
""")
def returns_reference(decl):
"""Return True iff the declaration returns a pointer or reference."""
c = decl.return_type.decl_string[-1]
return c=='&' or c=='*'
class code_generator_t(object):
"""The base class code generator."""
def __init__(self, name, deps=None, replacement_dict=default_replacement):
"""Constructor.
@name name of the python module
@dep name of another module this module depends on"""
module_builder.set_logger_level( logging.INFO )
candidate_include_paths = [ "/wg/stor5/mpomarlan/ompl/src", "",
"/usr/include/python2.7", "/usr/include", "", "/wg/stor5/mpomarlan/ompl/src/external"]
# Adding standard windows headers
if platform == 'win32':
compiler = getenv('GCCXML_COMPILER')
# MinGW
if compiler != None and (compiler.lower().endswith('g++') or compiler.lower().endswith('c++')):
version = subprocess.Popen([compiler, '-dumpversion'],
stdout=subprocess.PIPE).communicate()[0].strip()
# Find whole path to MinGW
compiler_path = ""
for path in getenv('PATH').split(';'):
if exists(join(path, compiler + '.exe')):
compiler_path = path
break
if compiler_path is not "":
# Adding in necessary include paths
candidate_include_paths.append (join(compiler_path, "..", "include"))
candidate_include_paths.append (join(compiler_path, "..", "lib", "gcc", "mingw32", version, "include"))
candidate_include_paths.append (join(compiler_path, "..", "lib", "gcc", "mingw32", version, "include", "c++"))
candidate_include_paths.append (join(compiler_path, "..", "lib", "gcc", "mingw32", version, "include", "c++", "mingw32"))
include_paths = []
for path in candidate_include_paths:
if len(path)>0 and exists(path):
include_paths.append(path)
self.mb = module_builder.module_builder_t(
files = [ 'bindings/' + name + '.h' ],
cache = '/wg/stor5/mpomarlan/ompl/build/pyplusplus_'+name+'.cache',
gccxml_path = "/usr/local/bin/gccxml",
include_paths = include_paths,
cflags="",
indexing_suite_version = 2 )
self.replacement = {} if replacement_dict==None else replacement_dict
self.mb.classes().always_expose_using_scope = True
self.mb.add_registration_code('PyEval_InitThreads();', tail=False)
self.std_ns = self.mb.namespace('std')
self.ompl_ns = self.mb.namespace('ompl')
self.call_policies()
self.filter_declarations()
if deps!=None:
for dep in deps:
self.mb.register_module_dependency(dep)
self.mb.build_code_creator( module_name='_'+name )
self.mb.split_module('bindings/' + name, use_files_sum_repository=True)
def call_policies(self):
"""Define default call policies: for anything that returns a reference,
return a reference to the existing object. This is potentially dangerous."""
decllist = [ self.mb.member_functions(returns_reference),
self.mb.free_functions(returns_reference),
self.mb.operators(returns_reference) ]
for decls in decllist:
for decl in decls:
if decl.return_type.decl_string[-7:]=='const &':
decl.call_policies = \
call_policies.return_value_policy(
call_policies.copy_const_reference)
else:
decl.call_policies = \
call_policies.return_value_policy(
call_policies.reference_existing_object)
def filter_declarations(self):
"""Remove some classes and functions from the std namespace"""
self.std_ns.class_('ios_base').exclude()
self.std_ns.free_functions().exclude()
self.std_ns.operators().exclude()
def replace_member_function(self, decl):
"""Utility function to replace a declaration."""
decl.exclude()
cls = decl.parent
self.mb.logger.info('Replacing member function ' + cls.decl_string+'::'+decl.name)
try:
(reg, wrapper) = self.replacement[cls.decl_string+'::'+decl.name]
except KeyError:
(reg, wrapper) = self.replacement[decl.name]
cls.add_registration_code(reg)
cls.add_declaration_code(wrapper % cls.decl_string)
def replace_member_functions(self, decls):
"""Utility function to replace multiple declarations."""
for decl in decls:
self.replace_member_function(decl)
def add_array_access(self, cls, rettype=None):
"""Add array indexing to a class."""
op = cls.operators("[]")
op.exclude()
self.mb.logger.info('Adding array access to class ' + cls.decl_string)
(reg, wrapper) = self.replacement['array_read_access']
cls.add_registration_code(reg)
if not rettype: rettype=op[0].return_type.decl_string
cls.add_declaration_code(wrapper % (rettype, cls.decl_string))
(reg, wrapper) = self.replacement['array_write_access']
cls.add_registration_code(reg)
cls.add_declaration_code(wrapper % (cls.decl_string, rettype))
def add_boost_function(self, FT, func_name, func_doc):
self.mb.add_declaration_code('PYDECLARE_FUNCTION(%s,%s)' % (FT, func_name))
self.mb.add_registration_code('PYREGISTER_FUNCTION(%s,%s,"%s")' % (FT, func_name, func_doc))
|
|
"""The tests for the logbook component."""
# pylint: disable=protected-access,invalid-name
import collections
from datetime import datetime, timedelta
import json
from unittest.mock import Mock, patch
import pytest
import voluptuous as vol
from homeassistant.components import logbook, recorder
from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME
from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED
from homeassistant.components.recorder.models import process_timestamp_to_utc_isoformat
from homeassistant.components.script import EVENT_SCRIPT_STARTED
from homeassistant.const import (
ATTR_DOMAIN,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_NAME,
ATTR_SERVICE,
CONF_DOMAINS,
CONF_ENTITIES,
CONF_EXCLUDE,
CONF_INCLUDE,
EVENT_CALL_SERVICE,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STARTED,
EVENT_HOMEASSISTANT_STOP,
EVENT_STATE_CHANGED,
STATE_OFF,
STATE_ON,
)
import homeassistant.core as ha
from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS
from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component, setup_component
import homeassistant.util.dt as dt_util
from tests.common import get_test_home_assistant, init_recorder_component, mock_platform
from tests.components.recorder.common import trigger_db_commit
EMPTY_CONFIG = logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})
@pytest.fixture
def hass_():
"""Set up things to be run when tests are started."""
hass = get_test_home_assistant()
init_recorder_component(hass) # Force an in memory DB
with patch("homeassistant.components.http.start_http_server_and_save_config"):
assert setup_component(hass, logbook.DOMAIN, EMPTY_CONFIG)
yield hass
hass.stop()
def test_service_call_create_logbook_entry(hass_):
"""Test if service call create log book entry."""
calls = []
@ha.callback
def event_listener(event):
"""Append on event."""
calls.append(event)
hass_.bus.listen(logbook.EVENT_LOGBOOK_ENTRY, event_listener)
hass_.services.call(
logbook.DOMAIN,
"log",
{
logbook.ATTR_NAME: "Alarm",
logbook.ATTR_MESSAGE: "is triggered",
logbook.ATTR_DOMAIN: "switch",
logbook.ATTR_ENTITY_ID: "switch.test_switch",
},
True,
)
hass_.services.call(
logbook.DOMAIN,
"log",
{
logbook.ATTR_NAME: "This entry",
logbook.ATTR_MESSAGE: "has no domain or entity_id",
},
True,
)
# Logbook entry service call results in firing an event.
# Our service call will unblock when the event listeners have been
# scheduled. This means that they may not have been processed yet.
trigger_db_commit(hass_)
hass_.block_till_done()
hass_.data[recorder.DATA_INSTANCE].block_till_done()
events = list(
logbook._get_events(
hass_,
dt_util.utcnow() - timedelta(hours=1),
dt_util.utcnow() + timedelta(hours=1),
)
)
assert len(events) == 2
assert len(calls) == 2
first_call = calls[-2]
assert first_call.data.get(logbook.ATTR_NAME) == "Alarm"
assert first_call.data.get(logbook.ATTR_MESSAGE) == "is triggered"
assert first_call.data.get(logbook.ATTR_DOMAIN) == "switch"
assert first_call.data.get(logbook.ATTR_ENTITY_ID) == "switch.test_switch"
last_call = calls[-1]
assert last_call.data.get(logbook.ATTR_NAME) == "This entry"
assert last_call.data.get(logbook.ATTR_MESSAGE) == "has no domain or entity_id"
assert last_call.data.get(logbook.ATTR_DOMAIN) == "logbook"
def test_service_call_create_log_book_entry_no_message(hass_):
"""Test if service call create log book entry without message."""
calls = []
@ha.callback
def event_listener(event):
"""Append on event."""
calls.append(event)
hass_.bus.listen(logbook.EVENT_LOGBOOK_ENTRY, event_listener)
with pytest.raises(vol.Invalid):
hass_.services.call(logbook.DOMAIN, "log", {}, True)
# Logbook entry service call results in firing an event.
# Our service call will unblock when the event listeners have been
# scheduled. This means that they may not have been processed yet.
hass_.block_till_done()
assert len(calls) == 0
def test_humanify_filter_sensor(hass_):
"""Test humanify filter too frequent sensor values."""
entity_id = "sensor.bla"
pointA = dt_util.utcnow().replace(minute=2)
pointB = pointA.replace(minute=5)
pointC = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES)
entity_attr_cache = logbook.EntityAttributeCache(hass_)
eventA = create_state_changed_event(pointA, entity_id, 10)
eventB = create_state_changed_event(pointB, entity_id, 20)
eventC = create_state_changed_event(pointC, entity_id, 30)
entries = list(
logbook.humanify(hass_, (eventA, eventB, eventC), entity_attr_cache, {})
)
assert len(entries) == 2
assert_entry(entries[0], pointB, "bla", entity_id=entity_id)
assert_entry(entries[1], pointC, "bla", entity_id=entity_id)
def test_home_assistant_start_stop_grouped(hass_):
"""Test if HA start and stop events are grouped.
Events that are occurring in the same minute.
"""
entity_attr_cache = logbook.EntityAttributeCache(hass_)
entries = list(
logbook.humanify(
hass_,
(
MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP),
MockLazyEventPartialState(EVENT_HOMEASSISTANT_START),
),
entity_attr_cache,
{},
),
)
assert len(entries) == 1
assert_entry(
entries[0], name="Home Assistant", message="restarted", domain=ha.DOMAIN
)
def test_home_assistant_start(hass_):
"""Test if HA start is not filtered or converted into a restart."""
entity_id = "switch.bla"
pointA = dt_util.utcnow()
entity_attr_cache = logbook.EntityAttributeCache(hass_)
entries = list(
logbook.humanify(
hass_,
(
MockLazyEventPartialState(EVENT_HOMEASSISTANT_START),
create_state_changed_event(pointA, entity_id, 10),
),
entity_attr_cache,
{},
)
)
assert len(entries) == 2
assert_entry(entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN)
assert_entry(entries[1], pointA, "bla", entity_id=entity_id)
def test_process_custom_logbook_entries(hass_):
"""Test if custom log book entries get added as an entry."""
name = "Nice name"
message = "has a custom entry"
entity_id = "sun.sun"
entity_attr_cache = logbook.EntityAttributeCache(hass_)
entries = list(
logbook.humanify(
hass_,
(
MockLazyEventPartialState(
logbook.EVENT_LOGBOOK_ENTRY,
{
logbook.ATTR_NAME: name,
logbook.ATTR_MESSAGE: message,
logbook.ATTR_ENTITY_ID: entity_id,
},
),
),
entity_attr_cache,
{},
)
)
assert len(entries) == 1
assert_entry(entries[0], name=name, message=message, entity_id=entity_id)
# pylint: disable=no-self-use
def assert_entry(
entry, when=None, name=None, message=None, domain=None, entity_id=None
):
"""Assert an entry is what is expected."""
return _assert_entry(entry, when, name, message, domain, entity_id)
def create_state_changed_event(
event_time_fired,
entity_id,
state,
attributes=None,
last_changed=None,
last_updated=None,
):
"""Create state changed event."""
old_state = ha.State(
entity_id, "old", attributes, last_changed, last_updated
).as_dict()
new_state = ha.State(
entity_id, state, attributes, last_changed, last_updated
).as_dict()
return create_state_changed_event_from_old_new(
entity_id, event_time_fired, old_state, new_state
)
# pylint: disable=no-self-use
def create_state_changed_event_from_old_new(
entity_id, event_time_fired, old_state, new_state
):
"""Create a state changed event from a old and new state."""
attributes = {}
if new_state is not None:
attributes = new_state.get("attributes")
attributes_json = json.dumps(attributes, cls=JSONEncoder)
row = collections.namedtuple(
"Row",
[
"event_type"
"event_data"
"time_fired"
"context_id"
"context_user_id"
"context_parent_id"
"state"
"entity_id"
"domain"
"attributes"
"state_id",
"old_state_id",
],
)
row.event_type = EVENT_STATE_CHANGED
row.event_data = "{}"
row.attributes = attributes_json
row.time_fired = event_time_fired
row.state = new_state and new_state.get("state")
row.entity_id = entity_id
row.domain = entity_id and ha.split_entity_id(entity_id)[0]
row.context_id = None
row.context_user_id = None
row.context_parent_id = None
row.old_state_id = old_state and 1
row.state_id = new_state and 1
return logbook.LazyEventPartialState(row)
async def test_logbook_view(hass, hass_client):
"""Test the logbook view."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(f"/api/logbook/{dt_util.utcnow().isoformat()}")
assert response.status == 200
async def test_logbook_view_period_entity(hass, hass_client):
"""Test the logbook view with period and entity."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id_test = "switch.test"
hass.states.async_set(entity_id_test, STATE_OFF)
hass.states.async_set(entity_id_test, STATE_ON)
entity_id_second = "switch.second"
hass.states.async_set(entity_id_second, STATE_OFF)
hass.states.async_set(entity_id_second, STATE_ON)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_second
# Test today entries with filter by period
response = await client.get(f"/api/logbook/{start_date.isoformat()}?period=1")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_second
# Test today entries with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
# Test entries for 3 days with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?period=3&entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
# Tomorrow time 00:00:00
start = (dt_util.utcnow() + timedelta(days=1)).date()
start_date = datetime(start.year, start.month, start.day)
# Test tomorrow entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 0
# Test tomorrow entries with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 0
# Test entries from tomorrow to 3 days ago with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?period=3&entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
async def test_logbook_describe_event(hass, hass_client):
"""Test teaching logbook about a new event."""
await hass.async_add_executor_job(init_recorder_component, hass)
def _describe(event):
"""Describe an event."""
return {"name": "Test Name", "message": "tested a message"}
hass.config.components.add("fake_integration")
mock_platform(
hass,
"fake_integration.logbook",
Mock(
async_describe_events=lambda hass, async_describe_event: async_describe_event(
"test_domain", "some_event", _describe
)
),
)
assert await async_setup_component(hass, "logbook", {})
with patch(
"homeassistant.util.dt.utcnow",
return_value=dt_util.utcnow() - timedelta(seconds=5),
):
hass.bus.async_fire("some_event")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(
hass.data[recorder.DATA_INSTANCE].block_till_done
)
client = await hass_client()
response = await client.get("/api/logbook")
results = await response.json()
assert len(results) == 1
event = results[0]
assert event["name"] == "Test Name"
assert event["message"] == "tested a message"
assert event["domain"] == "test_domain"
async def test_exclude_described_event(hass, hass_client):
"""Test exclusions of events that are described by another integration."""
name = "My Automation Rule"
entity_id = "automation.excluded_rule"
entity_id2 = "automation.included_rule"
entity_id3 = "sensor.excluded_domain"
def _describe(event):
"""Describe an event."""
return {
"name": "Test Name",
"message": "tested a message",
"entity_id": event.data.get(ATTR_ENTITY_ID),
}
def async_describe_events(hass, async_describe_event):
"""Mock to describe events."""
async_describe_event("automation", "some_automation_event", _describe)
async_describe_event("sensor", "some_event", _describe)
hass.config.components.add("fake_integration")
mock_platform(
hass,
"fake_integration.logbook",
Mock(async_describe_events=async_describe_events),
)
await hass.async_add_executor_job(init_recorder_component, hass)
assert await async_setup_component(
hass,
logbook.DOMAIN,
{
logbook.DOMAIN: {
CONF_EXCLUDE: {CONF_DOMAINS: ["sensor"], CONF_ENTITIES: [entity_id]}
}
},
)
with patch(
"homeassistant.util.dt.utcnow",
return_value=dt_util.utcnow() - timedelta(seconds=5),
):
hass.bus.async_fire(
"some_automation_event",
{logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id},
)
hass.bus.async_fire(
"some_automation_event",
{logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id2},
)
hass.bus.async_fire(
"some_event", {logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id3}
)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(
hass.data[recorder.DATA_INSTANCE].block_till_done
)
client = await hass_client()
response = await client.get("/api/logbook")
results = await response.json()
assert len(results) == 1
event = results[0]
assert event["name"] == "Test Name"
assert event["entity_id"] == "automation.included_rule"
async def test_logbook_view_end_time_entity(hass, hass_client):
"""Test the logbook view with end_time and entity."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id_test = "switch.test"
hass.states.async_set(entity_id_test, STATE_OFF)
hass.states.async_set(entity_id_test, STATE_ON)
entity_id_second = "switch.second"
hass.states.async_set(entity_id_second, STATE_OFF)
hass.states.async_set(entity_id_second, STATE_ON)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_second
# Test entries for 3 days with filter by entity_id
end_time = start + timedelta(hours=72)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
# Tomorrow time 00:00:00
start = dt_util.utcnow()
start_date = datetime(start.year, start.month, start.day)
# Test entries from today to 3 days with filter by entity_id
end_time = start_date + timedelta(hours=72)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
async def test_logbook_entity_filter_with_automations(hass, hass_client):
"""Test the logbook view with end_time and entity with automations and scripts."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await async_setup_component(hass, "automation", {})
await async_setup_component(hass, "script", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id_test = "alarm_control_panel.area_001"
hass.states.async_set(entity_id_test, STATE_OFF)
hass.states.async_set(entity_id_test, STATE_ON)
entity_id_second = "alarm_control_panel.area_002"
hass.states.async_set(entity_id_second, STATE_OFF)
hass.states.async_set(entity_id_second, STATE_ON)
hass.bus.async_fire(
EVENT_AUTOMATION_TRIGGERED,
{ATTR_NAME: "Mock automation", ATTR_ENTITY_ID: "automation.mock_automation"},
)
hass.bus.async_fire(
EVENT_SCRIPT_STARTED,
{ATTR_NAME: "Mock script", ATTR_ENTITY_ID: "script.mock_script"},
)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
assert response.status == 200
json_dict = await response.json()
assert json_dict[0]["entity_id"] == entity_id_test
assert json_dict[1]["entity_id"] == entity_id_second
assert json_dict[2]["entity_id"] == "automation.mock_automation"
assert json_dict[3]["entity_id"] == "script.mock_script"
assert json_dict[4]["domain"] == "homeassistant"
# Test entries for 3 days with filter by entity_id
end_time = start + timedelta(hours=72)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=alarm_control_panel.area_001"
)
assert response.status == 200
json_dict = await response.json()
assert len(json_dict) == 1
assert json_dict[0]["entity_id"] == entity_id_test
# Tomorrow time 00:00:00
start = dt_util.utcnow()
start_date = datetime(start.year, start.month, start.day)
# Test entries from today to 3 days with filter by entity_id
end_time = start_date + timedelta(hours=72)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=alarm_control_panel.area_002"
)
assert response.status == 200
json_dict = await response.json()
assert len(json_dict) == 1
assert json_dict[0]["entity_id"] == entity_id_second
async def test_filter_continuous_sensor_values(hass, hass_client):
"""Test remove continuous sensor events from logbook."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id_test = "switch.test"
hass.states.async_set(entity_id_test, STATE_OFF)
hass.states.async_set(entity_id_test, STATE_ON)
entity_id_second = "sensor.bla"
hass.states.async_set(entity_id_second, STATE_OFF, {"unit_of_measurement": "foo"})
hass.states.async_set(entity_id_second, STATE_ON, {"unit_of_measurement": "foo"})
entity_id_third = "light.bla"
hass.states.async_set(entity_id_third, STATE_OFF, {"unit_of_measurement": "foo"})
hass.states.async_set(entity_id_third, STATE_ON, {"unit_of_measurement": "foo"})
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_third
async def test_exclude_new_entities(hass, hass_client):
"""Test if events are excluded on first update."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id = "climate.bla"
entity_id2 = "climate.blu"
hass.states.async_set(entity_id, STATE_OFF)
hass.states.async_set(entity_id2, STATE_ON)
hass.states.async_set(entity_id2, STATE_OFF)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id2
assert response_json[1]["domain"] == "homeassistant"
assert response_json[1]["message"] == "started"
async def test_exclude_removed_entities(hass, hass_client):
"""Test if events are excluded on last update."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id = "climate.bla"
entity_id2 = "climate.blu"
hass.states.async_set(entity_id, STATE_ON)
hass.states.async_set(entity_id, STATE_OFF)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.states.async_set(entity_id2, STATE_ON)
hass.states.async_set(entity_id2, STATE_OFF)
hass.states.async_remove(entity_id)
hass.states.async_remove(entity_id2)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 3
assert response_json[0]["entity_id"] == entity_id
assert response_json[1]["domain"] == "homeassistant"
assert response_json[1]["message"] == "started"
assert response_json[2]["entity_id"] == entity_id2
async def test_exclude_attribute_changes(hass, hass_client):
"""Test if events of attribute changes are filtered."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.states.async_set("light.kitchen", STATE_OFF)
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 100})
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 200})
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 300})
hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 400})
hass.states.async_set("light.kitchen", STATE_OFF)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 3
assert response_json[0]["domain"] == "homeassistant"
assert response_json[1]["entity_id"] == "light.kitchen"
assert response_json[2]["entity_id"] == "light.kitchen"
async def test_logbook_entity_context_id(hass, hass_client):
"""Test the logbook view with end_time and entity with automations and scripts."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await async_setup_component(hass, "automation", {})
await async_setup_component(hass, "script", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
context = ha.Context(
id="ac5bd62de45711eaaeb351041eec8dd9",
user_id="b400facee45711eaa9308bfd3d19e474",
)
# An Automation
automation_entity_id_test = "automation.alarm"
hass.bus.async_fire(
EVENT_AUTOMATION_TRIGGERED,
{ATTR_NAME: "Mock automation", ATTR_ENTITY_ID: automation_entity_id_test},
context=context,
)
hass.bus.async_fire(
EVENT_SCRIPT_STARTED,
{ATTR_NAME: "Mock script", ATTR_ENTITY_ID: "script.mock_script"},
context=context,
)
hass.states.async_set(
automation_entity_id_test,
STATE_ON,
{ATTR_FRIENDLY_NAME: "Alarm Automation"},
context=context,
)
entity_id_test = "alarm_control_panel.area_001"
hass.states.async_set(entity_id_test, STATE_OFF, context=context)
await hass.async_block_till_done()
hass.states.async_set(entity_id_test, STATE_ON, context=context)
await hass.async_block_till_done()
entity_id_second = "alarm_control_panel.area_002"
hass.states.async_set(entity_id_second, STATE_OFF, context=context)
await hass.async_block_till_done()
hass.states.async_set(entity_id_second, STATE_ON, context=context)
await hass.async_block_till_done()
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
await hass.async_add_executor_job(
logbook.log_entry,
hass,
"mock_name",
"mock_message",
"alarm_control_panel",
"alarm_control_panel.area_003",
context,
)
await hass.async_block_till_done()
await hass.async_add_executor_job(
logbook.log_entry,
hass,
"mock_name",
"mock_message",
"homeassistant",
None,
context,
)
await hass.async_block_till_done()
# A service call
light_turn_off_service_context = ha.Context(
id="9c5bd62de45711eaaeb351041eec8dd9",
user_id="9400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set("light.switch", STATE_ON)
await hass.async_block_till_done()
hass.bus.async_fire(
EVENT_CALL_SERVICE,
{
ATTR_DOMAIN: "light",
ATTR_SERVICE: "turn_off",
ATTR_ENTITY_ID: "light.switch",
},
context=light_turn_off_service_context,
)
await hass.async_block_till_done()
hass.states.async_set(
"light.switch", STATE_OFF, context=light_turn_off_service_context
)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
assert response.status == 200
json_dict = await response.json()
assert json_dict[0]["entity_id"] == "automation.alarm"
assert "context_entity_id" not in json_dict[0]
assert json_dict[0]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[1]["entity_id"] == "script.mock_script"
assert json_dict[1]["context_event_type"] == "automation_triggered"
assert json_dict[1]["context_entity_id"] == "automation.alarm"
assert json_dict[1]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[1]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[2]["entity_id"] == entity_id_test
assert json_dict[2]["context_event_type"] == "automation_triggered"
assert json_dict[2]["context_entity_id"] == "automation.alarm"
assert json_dict[2]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[2]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[3]["entity_id"] == entity_id_second
assert json_dict[3]["context_event_type"] == "automation_triggered"
assert json_dict[3]["context_entity_id"] == "automation.alarm"
assert json_dict[3]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[3]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[4]["domain"] == "homeassistant"
assert json_dict[5]["entity_id"] == "alarm_control_panel.area_003"
assert json_dict[5]["context_event_type"] == "automation_triggered"
assert json_dict[5]["context_entity_id"] == "automation.alarm"
assert json_dict[5]["domain"] == "alarm_control_panel"
assert json_dict[5]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[5]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[6]["domain"] == "homeassistant"
assert json_dict[6]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[7]["entity_id"] == "light.switch"
assert json_dict[7]["context_event_type"] == "call_service"
assert json_dict[7]["context_domain"] == "light"
assert json_dict[7]["context_service"] == "turn_off"
assert json_dict[7]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
async def test_logbook_entity_context_parent_id(hass, hass_client):
"""Test the logbook view links events via context parent_id."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await async_setup_component(hass, "automation", {})
await async_setup_component(hass, "script", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
context = ha.Context(
id="ac5bd62de45711eaaeb351041eec8dd9",
user_id="b400facee45711eaa9308bfd3d19e474",
)
# An Automation triggering scripts with a new context
automation_entity_id_test = "automation.alarm"
hass.bus.async_fire(
EVENT_AUTOMATION_TRIGGERED,
{ATTR_NAME: "Mock automation", ATTR_ENTITY_ID: automation_entity_id_test},
context=context,
)
child_context = ha.Context(
id="2798bfedf8234b5e9f4009c91f48f30c",
parent_id="ac5bd62de45711eaaeb351041eec8dd9",
user_id="b400facee45711eaa9308bfd3d19e474",
)
hass.bus.async_fire(
EVENT_SCRIPT_STARTED,
{ATTR_NAME: "Mock script", ATTR_ENTITY_ID: "script.mock_script"},
context=child_context,
)
hass.states.async_set(
automation_entity_id_test,
STATE_ON,
{ATTR_FRIENDLY_NAME: "Alarm Automation"},
context=child_context,
)
entity_id_test = "alarm_control_panel.area_001"
hass.states.async_set(entity_id_test, STATE_OFF, context=child_context)
await hass.async_block_till_done()
hass.states.async_set(entity_id_test, STATE_ON, context=child_context)
await hass.async_block_till_done()
entity_id_second = "alarm_control_panel.area_002"
hass.states.async_set(entity_id_second, STATE_OFF, context=child_context)
await hass.async_block_till_done()
hass.states.async_set(entity_id_second, STATE_ON, context=child_context)
await hass.async_block_till_done()
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
logbook.async_log_entry(
hass,
"mock_name",
"mock_message",
"alarm_control_panel",
"alarm_control_panel.area_003",
child_context,
)
await hass.async_block_till_done()
logbook.async_log_entry(
hass,
"mock_name",
"mock_message",
"homeassistant",
None,
child_context,
)
await hass.async_block_till_done()
# A state change via service call with the script as the parent
light_turn_off_service_context = ha.Context(
id="9c5bd62de45711eaaeb351041eec8dd9",
parent_id="2798bfedf8234b5e9f4009c91f48f30c",
user_id="9400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set("light.switch", STATE_ON)
await hass.async_block_till_done()
hass.bus.async_fire(
EVENT_CALL_SERVICE,
{
ATTR_DOMAIN: "light",
ATTR_SERVICE: "turn_off",
ATTR_ENTITY_ID: "light.switch",
},
context=light_turn_off_service_context,
)
await hass.async_block_till_done()
hass.states.async_set(
"light.switch", STATE_OFF, context=light_turn_off_service_context
)
await hass.async_block_till_done()
# An event with a parent event, but the parent event isn't available
missing_parent_context = ha.Context(
id="fc40b9a0d1f246f98c34b33c76228ee6",
parent_id="c8ce515fe58e442f8664246c65ed964f",
user_id="485cacf93ef84d25a99ced3126b921d2",
)
logbook.async_log_entry(
hass,
"mock_name",
"mock_message",
"alarm_control_panel",
"alarm_control_panel.area_009",
missing_parent_context,
)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
assert response.status == 200
json_dict = await response.json()
assert json_dict[0]["entity_id"] == "automation.alarm"
assert "context_entity_id" not in json_dict[0]
assert json_dict[0]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
# New context, so this looks to be triggered by the Alarm Automation
assert json_dict[1]["entity_id"] == "script.mock_script"
assert json_dict[1]["context_event_type"] == "automation_triggered"
assert json_dict[1]["context_entity_id"] == "automation.alarm"
assert json_dict[1]["context_entity_id_name"] == "Alarm Automation"
assert json_dict[1]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[2]["entity_id"] == entity_id_test
assert json_dict[2]["context_event_type"] == "script_started"
assert json_dict[2]["context_entity_id"] == "script.mock_script"
assert json_dict[2]["context_entity_id_name"] == "mock script"
assert json_dict[2]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[3]["entity_id"] == entity_id_second
assert json_dict[3]["context_event_type"] == "script_started"
assert json_dict[3]["context_entity_id"] == "script.mock_script"
assert json_dict[3]["context_entity_id_name"] == "mock script"
assert json_dict[3]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[4]["domain"] == "homeassistant"
assert json_dict[5]["entity_id"] == "alarm_control_panel.area_003"
assert json_dict[5]["context_event_type"] == "script_started"
assert json_dict[5]["context_entity_id"] == "script.mock_script"
assert json_dict[5]["domain"] == "alarm_control_panel"
assert json_dict[5]["context_entity_id_name"] == "mock script"
assert json_dict[5]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[6]["domain"] == "homeassistant"
assert json_dict[6]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474"
assert json_dict[7]["entity_id"] == "light.switch"
assert json_dict[7]["context_event_type"] == "call_service"
assert json_dict[7]["context_domain"] == "light"
assert json_dict[7]["context_service"] == "turn_off"
assert json_dict[7]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
assert json_dict[8]["entity_id"] == "alarm_control_panel.area_009"
assert json_dict[8]["domain"] == "alarm_control_panel"
assert "context_event_type" not in json_dict[8]
assert "context_entity_id" not in json_dict[8]
assert "context_entity_id_name" not in json_dict[8]
assert json_dict[8]["context_user_id"] == "485cacf93ef84d25a99ced3126b921d2"
async def test_logbook_context_from_template(hass, hass_client):
"""Test the logbook view with end_time and entity with automations and scripts."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
assert await async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "template",
"switches": {
"test_template_switch": {
"value_template": "{{ states.switch.test_state.state }}",
"turn_on": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"turn_off": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
# Entity added (should not be logged)
hass.states.async_set("switch.test_state", STATE_ON)
await hass.async_block_till_done()
# First state change (should be logged)
hass.states.async_set("switch.test_state", STATE_OFF)
await hass.async_block_till_done()
switch_turn_off_context = ha.Context(
id="9c5bd62de45711eaaeb351041eec8dd9",
user_id="9400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set(
"switch.test_state", STATE_ON, context=switch_turn_off_context
)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
assert response.status == 200
json_dict = await response.json()
assert json_dict[0]["domain"] == "homeassistant"
assert "context_entity_id" not in json_dict[0]
assert json_dict[1]["entity_id"] == "switch.test_template_switch"
assert json_dict[2]["entity_id"] == "switch.test_state"
assert json_dict[3]["entity_id"] == "switch.test_template_switch"
assert json_dict[3]["context_entity_id"] == "switch.test_state"
assert json_dict[3]["context_entity_id_name"] == "test state"
assert json_dict[4]["entity_id"] == "switch.test_state"
assert json_dict[4]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
assert json_dict[5]["entity_id"] == "switch.test_template_switch"
assert json_dict[5]["context_entity_id"] == "switch.test_state"
assert json_dict[5]["context_entity_id_name"] == "test state"
assert json_dict[5]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
async def test_logbook_entity_matches_only(hass, hass_client):
"""Test the logbook view with a single entity and entity_matches_only."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
assert await async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "template",
"switches": {
"test_template_switch": {
"value_template": "{{ states.switch.test_state.state }}",
"turn_on": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"turn_off": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
# Entity added (should not be logged)
hass.states.async_set("switch.test_state", STATE_ON)
await hass.async_block_till_done()
# First state change (should be logged)
hass.states.async_set("switch.test_state", STATE_OFF)
await hass.async_block_till_done()
switch_turn_off_context = ha.Context(
id="9c5bd62de45711eaaeb351041eec8dd9",
user_id="9400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set(
"switch.test_state", STATE_ON, context=switch_turn_off_context
)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test_state&entity_matches_only"
)
assert response.status == 200
json_dict = await response.json()
assert len(json_dict) == 2
assert json_dict[0]["entity_id"] == "switch.test_state"
assert json_dict[1]["entity_id"] == "switch.test_state"
assert json_dict[1]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
async def test_logbook_entity_matches_only_multiple(hass, hass_client):
"""Test the logbook view with a multiple entities and entity_matches_only."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
assert await async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "template",
"switches": {
"test_template_switch": {
"value_template": "{{ states.switch.test_state.state }}",
"turn_on": {
"service": "switch.turn_on",
"entity_id": "switch.test_state",
},
"turn_off": {
"service": "switch.turn_off",
"entity_id": "switch.test_state",
},
}
},
}
},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
# Entity added (should not be logged)
hass.states.async_set("switch.test_state", STATE_ON)
hass.states.async_set("light.test_state", STATE_ON)
await hass.async_block_till_done()
# First state change (should be logged)
hass.states.async_set("switch.test_state", STATE_OFF)
hass.states.async_set("light.test_state", STATE_OFF)
await hass.async_block_till_done()
switch_turn_off_context = ha.Context(
id="9c5bd62de45711eaaeb351041eec8dd9",
user_id="9400facee45711eaa9308bfd3d19e474",
)
hass.states.async_set(
"switch.test_state", STATE_ON, context=switch_turn_off_context
)
hass.states.async_set("light.test_state", STATE_ON, context=switch_turn_off_context)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test_state,light.test_state&entity_matches_only"
)
assert response.status == 200
json_dict = await response.json()
assert len(json_dict) == 4
assert json_dict[0]["entity_id"] == "switch.test_state"
assert json_dict[1]["entity_id"] == "light.test_state"
assert json_dict[2]["entity_id"] == "switch.test_state"
assert json_dict[2]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
assert json_dict[3]["entity_id"] == "light.test_state"
assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
async def test_logbook_invalid_entity(hass, hass_client):
"""Test the logbook view with requesting an invalid entity."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_block_till_done()
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries with filter by end_time
end_time = start + timedelta(hours=24)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=invalid&entity_matches_only"
)
assert response.status == 500
async def test_icon_and_state(hass, hass_client):
"""Test to ensure state and custom icons are returned."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.states.async_set("light.kitchen", STATE_OFF, {"icon": "mdi:chemical-weapon"})
hass.states.async_set(
"light.kitchen", STATE_ON, {"brightness": 100, "icon": "mdi:security"}
)
hass.states.async_set(
"light.kitchen", STATE_ON, {"brightness": 200, "icon": "mdi:security"}
)
hass.states.async_set(
"light.kitchen", STATE_ON, {"brightness": 300, "icon": "mdi:security"}
)
hass.states.async_set(
"light.kitchen", STATE_ON, {"brightness": 400, "icon": "mdi:security"}
)
hass.states.async_set("light.kitchen", STATE_OFF, {"icon": "mdi:chemical-weapon"})
await _async_commit_and_wait(hass)
client = await hass_client()
response_json = await _async_fetch_logbook(client)
assert len(response_json) == 3
assert response_json[0]["domain"] == "homeassistant"
assert response_json[1]["entity_id"] == "light.kitchen"
assert response_json[1]["icon"] == "mdi:security"
assert response_json[1]["state"] == STATE_ON
assert response_json[2]["entity_id"] == "light.kitchen"
assert response_json[2]["icon"] == "mdi:chemical-weapon"
assert response_json[2]["state"] == STATE_OFF
async def test_exclude_events_domain(hass, hass_client):
"""Test if events are filtered if domain is excluded in config."""
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {CONF_EXCLUDE: {CONF_DOMAINS: ["switch", "alexa"]}},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 2
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
async def test_exclude_events_domain_glob(hass, hass_client):
"""Test if events are filtered if domain or glob is excluded in config."""
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
entity_id3 = "sensor.excluded"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_EXCLUDE: {
CONF_DOMAINS: ["switch", "alexa"],
CONF_ENTITY_GLOBS: "*.excluded",
}
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
hass.states.async_set(entity_id3, None)
hass.states.async_set(entity_id3, 30)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 2
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
async def test_include_events_entity(hass, hass_client):
"""Test if events are filtered if entity is included in config."""
entity_id = "sensor.bla"
entity_id2 = "sensor.blu"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["homeassistant"],
CONF_ENTITIES: [entity_id2],
}
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 2
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
async def test_exclude_events_entity(hass, hass_client):
"""Test if events are filtered if entity is excluded in config."""
entity_id = "sensor.bla"
entity_id2 = "sensor.blu"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {CONF_EXCLUDE: {CONF_ENTITIES: [entity_id]}},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 2
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
async def test_include_events_domain(hass, hass_client):
"""Test if events are filtered if domain is included in config."""
assert await async_setup_component(hass, "alexa", {})
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_INCLUDE: {CONF_DOMAINS: ["homeassistant", "sensor", "alexa"]}
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.bus.async_fire(
EVENT_ALEXA_SMART_HOME,
{"request": {"namespace": "Alexa.Discovery", "name": "Discover"}},
)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 3
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="Amazon Alexa", domain="alexa")
_assert_entry(entries[2], name="blu", entity_id=entity_id2)
async def test_include_events_domain_glob(hass, hass_client):
"""Test if events are filtered if domain or glob is included in config."""
assert await async_setup_component(hass, "alexa", {})
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
entity_id3 = "switch.included"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["homeassistant", "sensor", "alexa"],
CONF_ENTITY_GLOBS: ["*.included"],
}
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.bus.async_fire(
EVENT_ALEXA_SMART_HOME,
{"request": {"namespace": "Alexa.Discovery", "name": "Discover"}},
)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 20)
hass.states.async_set(entity_id3, None)
hass.states.async_set(entity_id3, 30)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 4
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="Amazon Alexa", domain="alexa")
_assert_entry(entries[2], name="blu", entity_id=entity_id2)
_assert_entry(entries[3], name="included", entity_id=entity_id3)
async def test_include_exclude_events(hass, hass_client):
"""Test if events are filtered if include and exclude is configured."""
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
entity_id3 = "sensor.bli"
entity_id4 = "sensor.keep"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["sensor", "homeassistant"],
CONF_ENTITIES: ["switch.bla"],
},
CONF_EXCLUDE: {
CONF_DOMAINS: ["switch"],
CONF_ENTITIES: ["sensor.bli"],
},
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 10)
hass.states.async_set(entity_id3, None)
hass.states.async_set(entity_id3, 10)
hass.states.async_set(entity_id, 20)
hass.states.async_set(entity_id2, 20)
hass.states.async_set(entity_id4, None)
hass.states.async_set(entity_id4, 10)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 3
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
_assert_entry(entries[2], name="keep", entity_id=entity_id4)
async def test_include_exclude_events_with_glob_filters(hass, hass_client):
"""Test if events are filtered if include and exclude is configured."""
entity_id = "switch.bla"
entity_id2 = "sensor.blu"
entity_id3 = "sensor.bli"
entity_id4 = "light.included"
entity_id5 = "switch.included"
entity_id6 = "sensor.excluded"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["sensor", "homeassistant"],
CONF_ENTITIES: ["switch.bla"],
CONF_ENTITY_GLOBS: ["*.included"],
},
CONF_EXCLUDE: {
CONF_DOMAINS: ["switch"],
CONF_ENTITY_GLOBS: ["*.excluded"],
CONF_ENTITIES: ["sensor.bli"],
},
},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
hass.states.async_set(entity_id2, None)
hass.states.async_set(entity_id2, 10)
hass.states.async_set(entity_id3, None)
hass.states.async_set(entity_id3, 10)
hass.states.async_set(entity_id, 20)
hass.states.async_set(entity_id2, 20)
hass.states.async_set(entity_id4, None)
hass.states.async_set(entity_id4, 30)
hass.states.async_set(entity_id5, None)
hass.states.async_set(entity_id5, 30)
hass.states.async_set(entity_id6, None)
hass.states.async_set(entity_id6, 30)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 3
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id2)
_assert_entry(entries[2], name="included", entity_id=entity_id4)
async def test_empty_config(hass, hass_client):
"""Test we can handle an empty entity filter."""
entity_id = "sensor.blu"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 2
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", entity_id=entity_id)
async def _async_fetch_logbook(client):
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day) - timedelta(hours=24)
# Test today entries without filters
end_time = start + timedelta(hours=48)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
assert response.status == 200
return await response.json()
async def _async_commit_and_wait(hass):
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
await hass.async_block_till_done()
def _assert_entry(
entry, when=None, name=None, message=None, domain=None, entity_id=None
):
"""Assert an entry is what is expected."""
if when:
assert when.isoformat() == entry["when"]
if name:
assert name == entry["name"]
if message:
assert message == entry["message"]
if domain:
assert domain == entry["domain"]
if entity_id:
assert entity_id == entry["entity_id"]
class MockLazyEventPartialState(ha.Event):
"""Minimal mock of a Lazy event."""
@property
def data_entity_id(self):
"""Lookup entity id."""
return self.data.get(ATTR_ENTITY_ID)
@property
def data_domain(self):
"""Lookup domain."""
return self.data.get(ATTR_DOMAIN)
@property
def time_fired_minute(self):
"""Minute the event was fired."""
return self.time_fired.minute
@property
def context_user_id(self):
"""Context user id of event."""
return self.context.user_id
@property
def context_id(self):
"""Context id of event."""
return self.context.id
@property
def time_fired_isoformat(self):
"""Time event was fired in utc isoformat."""
return process_timestamp_to_utc_isoformat(self.time_fired)
|
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import (
Any,
AsyncIterator,
Awaitable,
Callable,
Sequence,
Tuple,
Optional,
Iterator,
)
from google.cloud.recaptchaenterprise_v1.types import recaptchaenterprise
class ListKeysPager:
"""A pager for iterating through ``list_keys`` requests.
This class thinly wraps an initial
:class:`google.cloud.recaptchaenterprise_v1.types.ListKeysResponse` object, and
provides an ``__iter__`` method to iterate through its
``keys`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListKeys`` requests and continue to iterate
through the ``keys`` field on the
corresponding responses.
All the usual :class:`google.cloud.recaptchaenterprise_v1.types.ListKeysResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., recaptchaenterprise.ListKeysResponse],
request: recaptchaenterprise.ListKeysRequest,
response: recaptchaenterprise.ListKeysResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.recaptchaenterprise_v1.types.ListKeysRequest):
The initial request object.
response (google.cloud.recaptchaenterprise_v1.types.ListKeysResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = recaptchaenterprise.ListKeysRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[recaptchaenterprise.ListKeysResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[recaptchaenterprise.Key]:
for page in self.pages:
yield from page.keys
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListKeysAsyncPager:
"""A pager for iterating through ``list_keys`` requests.
This class thinly wraps an initial
:class:`google.cloud.recaptchaenterprise_v1.types.ListKeysResponse` object, and
provides an ``__aiter__`` method to iterate through its
``keys`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListKeys`` requests and continue to iterate
through the ``keys`` field on the
corresponding responses.
All the usual :class:`google.cloud.recaptchaenterprise_v1.types.ListKeysResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., Awaitable[recaptchaenterprise.ListKeysResponse]],
request: recaptchaenterprise.ListKeysRequest,
response: recaptchaenterprise.ListKeysResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.recaptchaenterprise_v1.types.ListKeysRequest):
The initial request object.
response (google.cloud.recaptchaenterprise_v1.types.ListKeysResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = recaptchaenterprise.ListKeysRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterator[recaptchaenterprise.ListKeysResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterator[recaptchaenterprise.Key]:
async def async_generator():
async for page in self.pages:
for response in page.keys:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListRelatedAccountGroupsPager:
"""A pager for iterating through ``list_related_account_groups`` requests.
This class thinly wraps an initial
:class:`google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupsResponse` object, and
provides an ``__iter__`` method to iterate through its
``related_account_groups`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListRelatedAccountGroups`` requests and continue to iterate
through the ``related_account_groups`` field on the
corresponding responses.
All the usual :class:`google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., recaptchaenterprise.ListRelatedAccountGroupsResponse],
request: recaptchaenterprise.ListRelatedAccountGroupsRequest,
response: recaptchaenterprise.ListRelatedAccountGroupsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupsRequest):
The initial request object.
response (google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = recaptchaenterprise.ListRelatedAccountGroupsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterator[recaptchaenterprise.ListRelatedAccountGroupsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[recaptchaenterprise.RelatedAccountGroup]:
for page in self.pages:
yield from page.related_account_groups
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListRelatedAccountGroupsAsyncPager:
"""A pager for iterating through ``list_related_account_groups`` requests.
This class thinly wraps an initial
:class:`google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``related_account_groups`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListRelatedAccountGroups`` requests and continue to iterate
through the ``related_account_groups`` field on the
corresponding responses.
All the usual :class:`google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
..., Awaitable[recaptchaenterprise.ListRelatedAccountGroupsResponse]
],
request: recaptchaenterprise.ListRelatedAccountGroupsRequest,
response: recaptchaenterprise.ListRelatedAccountGroupsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupsRequest):
The initial request object.
response (google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = recaptchaenterprise.ListRelatedAccountGroupsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(
self,
) -> AsyncIterator[recaptchaenterprise.ListRelatedAccountGroupsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterator[recaptchaenterprise.RelatedAccountGroup]:
async def async_generator():
async for page in self.pages:
for response in page.related_account_groups:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListRelatedAccountGroupMembershipsPager:
"""A pager for iterating through ``list_related_account_group_memberships`` requests.
This class thinly wraps an initial
:class:`google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupMembershipsResponse` object, and
provides an ``__iter__`` method to iterate through its
``related_account_group_memberships`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListRelatedAccountGroupMemberships`` requests and continue to iterate
through the ``related_account_group_memberships`` field on the
corresponding responses.
All the usual :class:`google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupMembershipsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
..., recaptchaenterprise.ListRelatedAccountGroupMembershipsResponse
],
request: recaptchaenterprise.ListRelatedAccountGroupMembershipsRequest,
response: recaptchaenterprise.ListRelatedAccountGroupMembershipsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupMembershipsRequest):
The initial request object.
response (google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupMembershipsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = recaptchaenterprise.ListRelatedAccountGroupMembershipsRequest(
request
)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(
self,
) -> Iterator[recaptchaenterprise.ListRelatedAccountGroupMembershipsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[recaptchaenterprise.RelatedAccountGroupMembership]:
for page in self.pages:
yield from page.related_account_group_memberships
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListRelatedAccountGroupMembershipsAsyncPager:
"""A pager for iterating through ``list_related_account_group_memberships`` requests.
This class thinly wraps an initial
:class:`google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupMembershipsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``related_account_group_memberships`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListRelatedAccountGroupMemberships`` requests and continue to iterate
through the ``related_account_group_memberships`` field on the
corresponding responses.
All the usual :class:`google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupMembershipsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
...,
Awaitable[recaptchaenterprise.ListRelatedAccountGroupMembershipsResponse],
],
request: recaptchaenterprise.ListRelatedAccountGroupMembershipsRequest,
response: recaptchaenterprise.ListRelatedAccountGroupMembershipsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupMembershipsRequest):
The initial request object.
response (google.cloud.recaptchaenterprise_v1.types.ListRelatedAccountGroupMembershipsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = recaptchaenterprise.ListRelatedAccountGroupMembershipsRequest(
request
)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(
self,
) -> AsyncIterator[recaptchaenterprise.ListRelatedAccountGroupMembershipsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(
self,
) -> AsyncIterator[recaptchaenterprise.RelatedAccountGroupMembership]:
async def async_generator():
async for page in self.pages:
for response in page.related_account_group_memberships:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class SearchRelatedAccountGroupMembershipsPager:
"""A pager for iterating through ``search_related_account_group_memberships`` requests.
This class thinly wraps an initial
:class:`google.cloud.recaptchaenterprise_v1.types.SearchRelatedAccountGroupMembershipsResponse` object, and
provides an ``__iter__`` method to iterate through its
``related_account_group_memberships`` field.
If there are more pages, the ``__iter__`` method will make additional
``SearchRelatedAccountGroupMemberships`` requests and continue to iterate
through the ``related_account_group_memberships`` field on the
corresponding responses.
All the usual :class:`google.cloud.recaptchaenterprise_v1.types.SearchRelatedAccountGroupMembershipsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
..., recaptchaenterprise.SearchRelatedAccountGroupMembershipsResponse
],
request: recaptchaenterprise.SearchRelatedAccountGroupMembershipsRequest,
response: recaptchaenterprise.SearchRelatedAccountGroupMembershipsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.recaptchaenterprise_v1.types.SearchRelatedAccountGroupMembershipsRequest):
The initial request object.
response (google.cloud.recaptchaenterprise_v1.types.SearchRelatedAccountGroupMembershipsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = recaptchaenterprise.SearchRelatedAccountGroupMembershipsRequest(
request
)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(
self,
) -> Iterator[recaptchaenterprise.SearchRelatedAccountGroupMembershipsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterator[recaptchaenterprise.RelatedAccountGroupMembership]:
for page in self.pages:
yield from page.related_account_group_memberships
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class SearchRelatedAccountGroupMembershipsAsyncPager:
"""A pager for iterating through ``search_related_account_group_memberships`` requests.
This class thinly wraps an initial
:class:`google.cloud.recaptchaenterprise_v1.types.SearchRelatedAccountGroupMembershipsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``related_account_group_memberships`` field.
If there are more pages, the ``__aiter__`` method will make additional
``SearchRelatedAccountGroupMemberships`` requests and continue to iterate
through the ``related_account_group_memberships`` field on the
corresponding responses.
All the usual :class:`google.cloud.recaptchaenterprise_v1.types.SearchRelatedAccountGroupMembershipsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[
...,
Awaitable[recaptchaenterprise.SearchRelatedAccountGroupMembershipsResponse],
],
request: recaptchaenterprise.SearchRelatedAccountGroupMembershipsRequest,
response: recaptchaenterprise.SearchRelatedAccountGroupMembershipsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.recaptchaenterprise_v1.types.SearchRelatedAccountGroupMembershipsRequest):
The initial request object.
response (google.cloud.recaptchaenterprise_v1.types.SearchRelatedAccountGroupMembershipsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = recaptchaenterprise.SearchRelatedAccountGroupMembershipsRequest(
request
)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(
self,
) -> AsyncIterator[
recaptchaenterprise.SearchRelatedAccountGroupMembershipsResponse
]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(
self,
) -> AsyncIterator[recaptchaenterprise.RelatedAccountGroupMembership]:
async def async_generator():
async for page in self.pages:
for response in page.related_account_group_memberships:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
|
|
import urllib
import json
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth.decorators import user_passes_test
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.forms.util import ErrorList
from crits.core import form_consts
from crits.core.data_tools import json_handler
from crits.core.handsontable_tools import form_to_dict
from crits.core.user_tools import user_can_view_data
from crits.domains.forms import TLDUpdateForm, AddDomainForm
from crits.domains.handlers import edit_domain_name
from crits.domains.handlers import add_new_domain, get_domain_details
from crits.domains.handlers import update_tlds, generate_domain_jtable
from crits.domains.handlers import generate_domain_csv, process_bulk_add_domain
from crits.objects.forms import AddObjectForm
@user_passes_test(user_can_view_data)
def domain_detail(request, domain):
"""
Generate the Domain details page.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param domain: The domain to get details for.
:type domain: str
:returns: :class:`django.http.HttpResponse`
"""
template = "domain_detail.html"
(new_template, args) = get_domain_details(domain,
request.user.username)
if new_template:
template = new_template
return render_to_response(template,
args,
RequestContext(request))
@user_passes_test(user_can_view_data)
def bulk_add_domain(request):
"""
Bulk add domains via a bulk upload form.
Args:
request: The Django context which contains information about the
session and key/value pairs for the bulk add domains request
Returns:
If the request is not a POST and not a Ajax call then:
Returns a rendered HTML form for a bulk add of domains
If the request is a POST and a Ajax call then:
Returns a response that contains information about the
status of the bulk uploaded domains. This may include information
such as domains that failed or successfully added. This may
also contain helpful status messages about each operation.
"""
formdict = form_to_dict(AddDomainForm(request.user))
if request.method == "POST" and request.is_ajax():
response = process_bulk_add_domain(request, formdict);
return HttpResponse(json.dumps(response,
default=json_handler),
mimetype='application/json')
else:
objectformdict = form_to_dict(AddObjectForm(request.user))
return render_to_response('bulk_add_default.html',
{'formdict': formdict,
'objectformdict': objectformdict,
'title': "Bulk Add Domains",
'table_name': 'domain',
'local_validate_columns': [form_consts.Domain.DOMAIN_NAME],
'custom_js': "domain_handsontable.js",
'is_bulk_add_objects': True},
RequestContext(request));
@user_passes_test(user_can_view_data)
def domains_listing(request,option=None):
"""
Generate the Domain listing page.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', 'csv', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
if option == "csv":
return generate_domain_csv(request)
return generate_domain_jtable(request, option)
@user_passes_test(user_can_view_data)
def add_domain(request):
"""
Add a domain. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.is_ajax() and request.method == "POST":
add_form = AddDomainForm(request.user, request.POST)
result = False
retVal = {}
errors = []
if add_form.is_valid():
errors = []
data = add_form.cleaned_data
(result, errors, retVal) = add_new_domain(data,
request,
errors)
if errors:
if not 'message' in retVal:
retVal['message'] = ""
elif not isinstance(retVal['message'], str):
retVal['message'] = str(retVal['message'])
for e in errors:
if 'Domain' in e or 'TLD' in e:
dom_form_error = add_form._errors.setdefault("domain",
ErrorList())
dom_form_error.append('Invalid Domain')
elif 'IP' in e:
ip_form_error = add_form._errors.setdefault("ip",
ErrorList())
ip_form_error.append('Invalid IP')
retVal['message'] += '<div>' + str(e) + '</div>'
if not result:
retVal['form'] = add_form.as_table()
retVal['success'] = result
return HttpResponse(json.dumps(retVal,
default=json_handler),
mimetype="application/json")
else:
return render_to_response("error.html",
{"error" : 'Expected POST' },
RequestContext(request))
@user_passes_test(user_can_view_data)
def edit_domain(request, domain):
"""
Edit a domain. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param domain: The domain to edit.
:type domain: str
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
new_name = request.POST.get('value')
analyst = request.user.username
if edit_domain_name(domain, new_name, analyst):
return HttpResponse(new_name)
else:
return HttpResponse(domain)
else:
return render_to_response("error.html",
{"error" : 'Expected AJAX POST' },
RequestContext(request))
@user_passes_test(user_can_view_data)
def domain_search(request):
"""
Search for domains.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponseRedirect`
"""
query = {}
query[request.GET.get('search_type', '')]=request.GET.get('q', '').strip()
#return render_to_response('error.html', {'error': query})
return HttpResponseRedirect(reverse('crits.domains.views.domains_listing')
+ "?%s" % urllib.urlencode(query))
@user_passes_test(user_can_view_data)
def tld_update(request):
"""
Update TLDs. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponseRedirect`
"""
if request.method == 'POST':
form = TLDUpdateForm(request.POST, request.FILES)
if form.is_valid():
filedata = request.FILES['filedata']
result = update_tlds(filedata)
if result['success']:
response = {'success': True,
'message': 'Success! <a href="%s">Go to Domains.</a>'
% reverse('crits.domains.views.domains_listing')}
else:
response = {'success': False, 'form': form.as_table()}
else:
response = {'success': False, 'form': form.as_table()}
return render_to_response('file_upload_response.html',
{'response': json.dumps(response)},
RequestContext(request))
else:
return render_to_response('error.html',
{'error': 'Expected POST'},
RequestContext(request))
|
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014, 2015, 2016 Carlos Jenkins <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from sys import stderr, hexversion
logging.basicConfig(stream=stderr)
import hmac
from hashlib import sha1
from json import loads, dumps
from subprocess import Popen, PIPE
from tempfile import mkstemp
from os import access, X_OK, remove, fdopen
from os.path import isfile, abspath, normpath, dirname, join, basename
import requests
from ipaddress import ip_address, ip_network
from flask import Flask, request, abort
application = Flask(__name__)
@application.route('/', methods=['GET', 'POST'])
def index():
"""
Main WSGI application entry.
"""
path = normpath(abspath(dirname(__file__)))
# Only POST is implemented
if request.method != 'POST':
abort(501)
# Load config
with open(join(path, 'config.json'), 'r') as cfg:
config = loads(cfg.read())
hooks = config.get('hooks_path', join(path, 'hooks'))
# Allow Github IPs only
if config.get('github_ips_only', True):
src_ip = ip_address(
u'{}'.format(request.access_route[0]) # Fix stupid ipaddress issue
)
whitelist = requests.get('https://api.github.com/meta').json()['hooks']
for valid_ip in whitelist:
if src_ip in ip_network(valid_ip):
break
else:
logging.error('IP {} not allowed'.format(
src_ip
))
abort(403)
# Enforce secret
secret = config.get('enforce_secret', '')
if secret:
# Only SHA1 is supported
header_signature = request.headers.get('X-Hub-Signature')
if header_signature is None:
abort(403)
sha_name, signature = header_signature.split('=')
if sha_name != 'sha1':
abort(501)
# HMAC requires the key to be bytes, but data is string
mac = hmac.new(str(secret), msg=request.data, digestmod='sha1')
# Python prior to 2.7.7 does not have hmac.compare_digest
if hexversion >= 0x020707F0:
if not hmac.compare_digest(str(mac.hexdigest()), str(signature)):
abort(403)
else:
# What compare_digest provides is protection against timing
# attacks; we can live without this protection for a web-based
# application
if not str(mac.hexdigest()) == str(signature):
abort(403)
# Implement ping
event = request.headers.get('X-GitHub-Event', 'ping')
if event == 'ping':
return dumps({'msg': 'pong'})
# Gather data
try:
payload = request.get_json()
except Exception:
logging.warning('Request parsing failed')
abort(400)
# Determining the branch is tricky, as it only appears for certain event
# types an at different levels
branch = None
try:
# Case 1: a ref_type indicates the type of ref.
# This true for create and delete events.
if 'ref_type' in payload:
if payload['ref_type'] == 'branch':
branch = payload['ref']
# Case 2: a pull_request object is involved. This is pull_request and
# pull_request_review_comment events.
elif 'pull_request' in payload:
# This is the TARGET branch for the pull-request, not the source
# branch
branch = payload['pull_request']['base']['ref']
elif event in ['push']:
# Push events provide a full Git ref in 'ref' and not a 'ref_type'.
branch = payload['ref'].split('/', 2)[2]
except KeyError:
# If the payload structure isn't what we expect, we'll live without
# the branch name
pass
# All current events have a repository, but some legacy events do not,
# so let's be safe
name = payload['repository']['name'] if 'repository' in payload else None
meta = {
'name': name,
'branch': branch,
'event': event
}
logging.info('Metadata:\n{}'.format(dumps(meta)))
# Skip push-delete
if event == 'push' and payload['deleted']:
logging.info('Skipping push-delete event for {}'.format(dumps(meta)))
return dumps({'status': 'skipped'})
# Possible hooks
scripts = []
if branch and name:
scripts.append(join(hooks, '{event}-{name}-{branch}'.format(**meta)))
if name:
scripts.append(join(hooks, '{event}-{name}'.format(**meta)))
scripts.append(join(hooks, '{event}'.format(**meta)))
scripts.append(join(hooks, 'all'))
# Check permissions
scripts = [s for s in scripts if isfile(s) and access(s, X_OK)]
if not scripts:
return dumps({'status': 'nop'})
# Save payload to temporal file
osfd, tmpfile = mkstemp()
with fdopen(osfd, 'w') as pf:
pf.write(dumps(payload))
# Run scripts
ran = {}
for s in scripts:
proc = Popen(
[s, tmpfile, event],
stdout=PIPE, stderr=PIPE
)
stdout, stderr = proc.communicate()
ran[basename(s)] = {
'returncode': proc.returncode,
'stdout': stdout.decode('utf-8'),
'stderr': stderr.decode('utf-8'),
}
# Log errors if a hook failed
if proc.returncode != 0:
logging.error('{} : {} \n{}'.format(
s, proc.returncode, stderr
))
# Remove temporal file
remove(tmpfile)
info = config.get('return_scripts_info', False)
if not info:
return dumps({'status': 'done'})
output = dumps(ran, sort_keys=True, indent=4)
logging.info(output)
return output
if __name__ == '__main__':
application.run(debug=True, host='0.0.0.0')
|
|
from merc.actor import Actor
from merc.collision import Collision
from merc.player import Player
from pprint import pprint
import sys
import json
import numpy
import itertools
from PIL import Image, ImageDraw, ImageColor
class Game:
NUM_TWEENS = 10
def __init__(self, data):
"""
`data` should contain the JSON output of Octane
"""
self.__dict__ = data
self.frame = None
self.tween = 0
self.seconds_remaining = 0
self.actors = {}
self.players = {}
self.ball_actor = None
self.grouped_actors = {}
def processFrames(self):
"""
Step through the frames one by one. Build the actors, update the game state,
link the actors, and generate stats.
"""
for frame in self.Frames:
self.frame = frame
for id, data in frame['Spawned'].items():
self.actors[id] = Actor(id, data)
for id, data in frame['Updated'].items():
self.actors[id].update(data, self.frame['Number'])
self.updateState()
self.linkActors()
#self.checkCollisions()
def updateState(self):
"""
Update the game state. Creates a sort of cache to help find commonly used stuff.
"""
self.ball_actor = None
self.grouped_actors = {}
for actor in self.actors.values():
actor_class = actor.getClass()
if actor_class == 'TAGame.GameEvent_Soccar_TA':
# shortcut for the time remaining
s = actor.getProp('TAGame.GameEvent_Soccar_TA:SecondsRemaining', -1)
if s >= 0:
self.seconds_remaining = s
elif actor_class == 'TAGame.Ball_TA':
# shortcut to find the ball actor
self.ball_actor = actor
else:
# group similar actors together
if not actor_class in self.grouped_actors:
self.grouped_actors[actor_class] = []
self.grouped_actors[actor_class].append(actor)
def linkActors(self):
"""
Some actors have relationships with each other, so we set those relationships here.
"""
'''
components -> car -> pri -> team
'''
# link pri -> team
if 'TAGame.PRI_TA' in self.grouped_actors:
for pri_actor in self.grouped_actors['TAGame.PRI_TA']:
if hasattr(pri_actor, 'team'):
continue
team_prop = pri_actor.getProp('Engine.PlayerReplicationInfo:Team')
if not team_prop:
continue
pri_actor.team = self.findActor(team_prop[1])
# link components to car
components = [
'TAGame.CarComponent_Boost_TA',
'TAGame.CarComponent_Jump_TA',
'TAGame.CarComponent_DoubleJump_TA',
'TAGame.CarComponent_Dodge_TA',
'TAGame.CarComponent_FlipCar_TA',
]
for component in components:
if component in self.grouped_actors:
for component_actor in self.grouped_actors[component]:
if hasattr(component_actor, 'car'):
continue
car_prop = component_actor.getProp('TAGame.CarComponent_TA:Vehicle')
if not car_prop:
continue
component_actor.car = self.findActor(car_prop[1])
if not component_actor.car:
continue
if not hasattr(component_actor.car, 'components'):
component_actor.car.components = []
if 'TAGame.Car_TA' in self.grouped_actors:
# link car -> pri
for car_actor in self.grouped_actors['TAGame.Car_TA']:
if hasattr(car_actor, 'pri'):
continue
pri_prop = car_actor.getProp('Engine.Pawn:PlayerReplicationInfo')
if not pri_prop:
continue
car_actor.pri = self.findActor(pri_prop[1])
# create / update players
for car_actor in self.grouped_actors['TAGame.Car_TA']:
player_id = car_actor.getPlayerId()
if not player_id:
continue
if player_id not in self.players:
self.players[player_id] = Player(player_id)
self.players[player_id].update(car_actor)
def findActor(self, find_actor_id):
"""
Attempts to find and return an actor with the given `find_actor_id`. Returns
None when the actor cannot be found.
"""
find_actor_id = int(find_actor_id)
for actor_id, actor in self.actors.items():
if int(actor_id) == find_actor_id:
return actor
return None
def checkCollisions(self):
"""
Determine when and where each collision happened during this game. Save
the collision data in `self.players`.
"""
if 'TAGame.Car_TA' not in self.grouped_actors:
# no need to check collisions when no cars exist
return
# each frame, we only want to check actors that are within Collisions.FRAME_CHECK_RADIUS
# units of each other
# each frame, we only want to tween if anyone is within Collisions.FRAME_CHECK_RADIUS
# units of each other
# create tuples of actors that we want to check this frame
pairs = []
ball = self.ball_actor
for car in self.grouped_actors['TAGame.Car_TA']:
# we dont want to check cars that arent linked with players yet
player_id = car.getPlayerId()
if not player_id:
continue
player = self.players[player_id]
# check if the last collision with the ball was within a certain number of frames
# if it is, we should skip this pair
last_collision = player.getLastCollisionWithActor(ball.id)
if last_collision and last_collision.frame_number > self.frame['Number'] - Collision.MIN_FRAMES_BETWEEN:
continue
# skip if the distance is over the limit
dist = self.distance(ball, car)
if not dist:
continue
if dist > Collision.FRAME_CHECK_RADIUS:
continue
pairs.append((ball, car))
if len(pairs) <= 0:
# only tween if any pairs need to be checked
return
self.tween = 0
# save which actors have collided
collided = []
for i in range(self.NUM_TWEENS):
for actor1, actor2 in pairs:
# combine actor ids into a key for faster lookup
key = actor1.id + actor2.id * 1024
if key in collided:
# dont allow multiple collisions between the same actors per frame
continue
# determine the check radius
check_radius = Collision.CAR_AND_BALL_RADIUS
if actor1.isClass('TAGame.Car_TA'):
if actor2.isClass('TAGame.Car_TA'):
check_radius = Collision.CAR_AND_CAR_RADIUS
else:
check_radius = Collision.CAR_AND_BALL_RADIUS
collision = self.collides(actor1, actor2, check_radius)
if collision:
self.handleCollision(actor1, actor2, collision)
collided.append(key)
self.tween += 1
self.tween = 0
def handleCollision(self, actor1, actor2, collision):
"""
Handles a single collision between two actors.
"""
if (actor1.isClass('TAGame.Car_TA')):
player_id = actor1.getPlayerId()
if player_id:
self.players[player_id].addCollision(collision)
if (actor2.isClass('TAGame.Car_TA')):
player_id = actor2.getPlayerId()
if player_id:
self.players[player_id].addCollision(collision)
print("*** Collision! ***", self.seconds_remaining, self.frame['Number'], self.tween, "[{0}] x [{1}]".format(actor1.getName(), actor2.getName()), collision.point)
def distance(self, actor1, actor2, return_midpoint=False):
"""
Returns the distance between two actors. Optionally also returns the midpoint
between those two actors.
"""
rval = False
if return_midpoint:
rval = (False, False)
rb1 = actor1.getRB(self.frame['Number'], self.tween, self.NUM_TWEENS)
rb2 = actor2.getRB(self.frame['Number'], self.tween, self.NUM_TWEENS)
if not rb1 or not rb2:
return rval
p1 = numpy.array(rb1['Position'])
p2 = numpy.array(rb2['Position'])
dist = numpy.linalg.norm(p1 - p2)
if return_midpoint:
return (dist, numpy.median([p1, p2], axis=0))
return dist
def collides(self, actor1, actor2, check_radius):
"""
Returns a Collision if the two actors intersect. Otherwise returns False.
"""
(dist, midpoint) = self.distance(actor1, actor2, True)
if not dist:
return False
if dist > check_radius + Collision.TOLERANCE:
return False
shape1 = actor1.getShape(self.frame['Number'], self.tween, self.NUM_TWEENS)
shape2 = actor2.getShape(self.frame['Number'], self.tween, self.NUM_TWEENS)
if not shape1 or not shape2:
return False
if shape1.intersects(shape2, Collision.TOLERANCE):
return Collision(midpoint, self.frame['Number'], actor1.id, actor2.id)
return False
|
|
#!/usr/bin/env python3
"""
* This file is part of plainRFM69.
* Copyright (c) 2014, Ivor Wanders
* MIT License, see the LICENSE.md file in the root folder.
"""
import crcmod
from Crypto.Cipher import AES
"""
This script does some analysis on the packets produced by the RFM69 radio
modules. It investigates the cipher mode and the CRC polynomial.
"""
print("Decoding a few packets using ECB cipher mode.\n")
packets=["8E7760F05C573E15AA5BE39470BE70CC202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
"E7D3AF2F7EBE569B7EE2F2F3EE825F2E202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
"ED660A0682E94BBBBB98D9E84B7EBDE3202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
"B720DD819F49264684815C4767BC5A8B202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
"5ACE8F6CC4710212D2CB294792BF1D7E202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
"B26162233AFC2D47ADDFB4B92D0697C2202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
"DBC3A0068B8ACAB1867B6F8E897F8DA0202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
"D8AD1CA54F5F086861C5C9808E020903202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
"BA9EDEA3D76ED5A123A1314F86296BCB202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
"A213715080D4BE92C80EAF1ADF1C8EF0202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
"0216DC6EA3C50DF948AB69A0A578A9BC202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
"23E0D6A53E72AFA05C7E0AA438A10080202C11692E65C99BCB7BA90B1B61524A02A0ECCC1F2BC60C836CB312E81B3A3F",
]
aes_ECB_zeros = AES.new(bytes([0 for i in range(0,16)]), AES.MODE_ECB)
packetbytes = [bytes([int(j[i:i+2],16) for i in range(0,len(j), 2)]) for j in packets]
# print(packetbytes)
for j in packetbytes:
print(aes_ECB_zeros.decrypt(j))
# Sent with:
# uint8_t key[16] = {146, 48, 0, 16, 31, 203, 208, 65, 31, 10, 94, 64, 8, 198, 226, 121};
# rfm->setPacketConfig1(RFM69_PACKET_CONFIG_CRC_ON);
# rfm->setPacketConfig2(11, true, true, true);
# rfm->setPayloadLength(20);
# Received as:
# 0xAA0000004445464748494A4B4C4D4E4F505152530000000000
# 0xAD0000004445464748494A4B4C4D4E4F505152530000000000
# Recieved with CRC off, AES off, payloadlength of 34.
# 0xA02AEBADF27DBBB36F3928BD53A3BC87AD7159E950A85F4ABF59F043828932B7226E
# 0x2B240611993CEB856A07FD353C940ACAAD7159E950A85F4ABF59F043828932B73A22
# 0x2475FEF5D93D2EE636B43584DEDCF622AD7159E950A85F4ABF59F043828932B7A639
# 0x27D3806ED8A8BB63BD700B9FAE8B64C9AD7159E950A85F4ABF59F043828932B782D4
# 0x652729B2F2A75C1279AF2833417DFCF5AD7159E950A85F4ABF59F043828932B76DA1
secondblob= """0x40C3D18D9DD0B5AA282163095BCAA2A3AD7159E950A85F4ABF59F043828932B71FCE
0x85096C2B74B868AB0028B8EB1C5F32DFAD7159E950A85F4ABF59F043828932B74531
0x775CA8AC1172E7BE0087620AB85A3FA5AD7159E950A85F4ABF59F043828932B79BC8
0x2171F599317F14647152AF7575878392AD7159E950A85F4ABF59F043828932B719EF
0x9EEA1DEDD22250EBE86A9E76C8FD09E9AD7159E950A85F4ABF59F043828932B7EBA2
0x69B8EE198B6D1D86F2C325C99433365BAD7159E950A85F4ABF59F043828932B70D11
0x0B4CF2CAC1C54C7FBC74166E56DDB8BEAD7159E950A85F4ABF59F043828932B79FD5
0xEAC5E015D784F5EBD52288E29DE829E1AD7159E950A85F4ABF59F043828932B74D6F
0x04C6F98A3AE2F734F04B5AD6BCD331B7AD7159E950A85F4ABF59F043828932B7F59C
0xEC8BC1DF88627E05CA4E7A8E61FF66A4AD7159E950A85F4ABF59F043828932B73DAC
0xCBE28965605C1DCCC49DE89E70740AC4AD7159E950A85F4ABF59F043828932B7E348
0xDA0F57C322198373F8B8D5F1C2092973AD7159E950A85F4ABF59F043828932B75675
0x712E58D47D65099DA1284FD2468A2D92AD7159E950A85F4ABF59F043828932B7636B
0xEB9749AF00AD035E9F5FD10192B0BE8DAD7159E950A85F4ABF59F043828932B77780
0x78A93AE0086ACAE9C40EB21BC1ED3780AD7159E950A85F4ABF59F043828932B76758
0x5C5F1708EAFFB0AE5C099D80B8E4EA1AAD7159E950A85F4ABF59F043828932B77233
0x3DCF439921381E5759032FDC4A1A5256AD7159E950A85F4ABF59F043828932B7F9C2
0x7AB451B10DFFF162876E8A5CC7A43624AD7159E950A85F4ABF59F043828932B729A3
0x200F94DFABB83D2AD9257B89861E0838AD7159E950A85F4ABF59F043828932B70ABB
0x2A1C2DE6E3E61B18F4BE6F2A8735B994AD7159E950A85F4ABF59F043828932B71458
0xED7A09F2F4855703ED61F2BF900FB05AAD7159E950A85F4ABF59F043828932B7965E
0xAD86E27BFCC2ADB7163D8EC3B1F2C7EFAD7159E950A85F4ABF59F043828932B7F02A
0xE2DF08D40982ED92F3B7240165ACFC7CAD7159E950A85F4ABF59F043828932B76F0D
0xE48A637BB4FC8163CDAB64B31CB91D94AD7159E950A85F4ABF59F043828932B7646B
0xBFB6B5347425C5E0878C70AB4034C51BAD7159E950A85F4ABF59F043828932B71CF6
0x3AF7D0B56189B60990732EE7CC0AA3B7AD7159E950A85F4ABF59F043828932B7FC0D
0x99CB3B7C809BF435337861DA21A409A9AD7159E950A85F4ABF59F043828932B76515
0xB852EC76F7E645387224FA49A55A681AAD7159E950A85F4ABF59F043828932B768BC
0x2BFEE1521309749AFA635366CA2A959CAD7159E950A85F4ABF59F043828932B7F8D5
0x9A5CBD91C84EBE906EF16A818FCCB7F4AD7159E950A85F4ABF59F043828932B73458
0x4BEEC7794EF3E72DD8432524E7621428AD7159E950A85F4ABF59F043828932B78ED5
0x57E40429BD34F151D9D7A8A5758F7903AD7159E950A85F4ABF59F043828932B70038
0xD45FDF85ACB352A02275448281DEF736AD7159E950A85F4ABF59F043828932B761CB
0x759BFBB7F8E04F335B285DA414F69D6DAD7159E950A85F4ABF59F043828932B7323C
0x0F9F13D6B72AF71E9B34A28671DB12B8AD7159E950A85F4ABF59F043828932B711D1
0x9382E8DFC3803F9F1D3CC169EFC2D6FAAD7159E950A85F4ABF59F043828932B7967E
0xA3D0603CFBE16DBC9E8EB9F625BF0014AD7159E950A85F4ABF59F043828932B72C7A"""
print("\n\nCalculating CRC XOR and INIT parameters with several packets.\n")
packets=[
"A02AEBADF27DBBB36F3928BD53A3BC87AD7159E950A85F4ABF59F043828932B7226E",
"2B240611993CEB856A07FD353C940ACAAD7159E950A85F4ABF59F043828932B73A22",
"2475FEF5D93D2EE636B43584DEDCF622AD7159E950A85F4ABF59F043828932B7A639",
"27D3806ED8A8BB63BD700B9FAE8B64C9AD7159E950A85F4ABF59F043828932B782D4",
"652729B2F2A75C1279AF2833417DFCF5AD7159E950A85F4ABF59F043828932B76DA1"]
packetbytes = [bytes([int(j[i:i+2],16) for i in range(0,len(j), 2)]) for j in packets]
# print(packetbytes)
crcbytes = [int(a[-4:],16) for a in packets]
# packetbytes = [bytes([int(j[i+2:i+2+2],16) for i in range(0,len(j)-2, 2)]) for j in secondblob.split("\n")]
# crcbytes = [int(a[-4:],16) for a in secondblob.split("\n")]
# print(crcbytes)
payload = [a[0:-2] for a in packetbytes]
# print(payload)
isDone = False
for init in range(0,0xFFFF):
for xor in range(0,0xFFFF):
A = crcmod.mkCrcFun(0x11021, initCrc=init, rev=False, xorOut=xor)
res = [a for a in list(map(A, payload))]
# print(crcbytes)
# print(res1)
z = [0 if res[i] == crcbytes[i] else 1 for i in range(0,len(res))]
if (sum(z) == 0):
print(res)
print(crcbytes)
print("Init: {}".format(init))
print("xor: {}".format(xor))
isDone = True
break
# sys.exit(0)
if (isDone):
break
#resulted in xor=1272, init=0, no reverse
print("Verifying the calculated parameters on another packetcapture.")
#verify that on the new blobl.
packetbytes = [bytes([int(j[i+2:i+2+2],16) for i in range(0,len(j)-2, 2)]) for j in secondblob.split("\n")]
crcbytes = [int(a[-4:],16) for a in secondblob.split("\n")]
payload = [a[0:-2] for a in packetbytes]
# print(crcbytes)
A = crcmod.mkCrcFun(0x11021, initCrc=0, rev=False, xorOut=1272)
res = list(map(A, payload))
z = [0 if res[i] == crcbytes[i] else 1 for i in range(0,len(res))]
print(res[0:7])
print(crcbytes[0:7])
if (sum(z) == 0):
print("Yes, correct poly.")
print("crcmod.mkCrcFun(0x11021, initCrc={:d}, rev=False, xorOut={:d})".format(init, xor))
# thing = AES.new(bytes([146, 48, 0, 16, 31, 203, 208, 65, 31, 10, 94, 64, 8, 198, 226, 121]), AES.MODE_ECB)
# decrypted = [thing.decrypt(j[0:16]) + thing.decrypt(j[16:32]) for j in packetbytes]
print("\n\nDetermining how AES cipher is handled.\n")
m ="""
Sent with variable length, no addressing, variable length taken to be 5. AES (0)
Received with fixed length 17, no addressing. AES disabled.
CRC is removed by Rx phase, so that's missing.
"""
print(m)
# print("\nVariable length, no addressing\n");
testblob = """0x05E594B1DA6CD6DD582B92E6C3FB2D0BBF
0x051C485121872B1EA87514326714F73197
0x0546AFBACAC815CC0DF7DC59F0DF5E93AC
0x05B119A2572AE34141F798F8D567157379
0x0570F76CCCFE3B5D83666736587C00C7CF
0x05B62024068A1DBB90B2BC58C4DBCBAB4F
0x055563A1E1193DEE76BFAB68AE5E1E0235
0x05ED021D57E52FC8071B37BCFD3B17487C
0x057656A68E421018542A9E1F55096FE4A8
0x058849BF9CD25613EA53018F8E8C651317
0x05102567E9863433712831B10AA73A85B2
0x05FE36E1A813EE9AB12D6BFB3042AA5D59
0x05F909A334C9C092692C92FD55312389A9
0x05641EB053DCEE83B8C4809025790B5261
0x05FAE6B432A382CAC15D1295CA80EB70E7
0x0566AC800895885E97B03A8A59F25200C1
0x059CD45E03A5973D57D4FEBFBA030359D5"""
print("Packetbytes:")
packetbytes = [bytes([int(j[i+2:i+2+2],16) for i in range(0,len(j)-2, 2)]) for j in testblob.split("\n")]
crcbytes = [int(a[-4:],16) for a in testblob.split("\n")]
aes_ECB_zeros = AES.new(bytes([0 for i in range(0,16)]), AES.MODE_ECB)
for j in packetbytes[0:5]:
print("Cipher: " + str(j))
for j in packetbytes[0:5]:
plaintext = aes_ECB_zeros.decrypt(j[1:])
print("Plain: " + str(plaintext))
"""
Results in:
b'\xad\x01\x02\x03\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\xae\x01\x02\x03\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\xaf\x01\x02\x03\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
"""
m="""
So, as the datasheet states, the length field is not encrypted.
Not described is that the message is zero-padded to 16 bytes AES payload.
Which is necessary for the block cipher to work.
"""
print(m)
m="""
Sent with variable length, with addressing, variable length set to 6, AES(0)
Received with fixed length 18, addressing, AES disabled
"""
print("With variable length and adressing")
print(m)
testblob="""0x06ABDBA03075EFA0410C3B7C4206112BB309
0x06AB94E0A6F6B37C4489D484FA8343198406
0x06AB1F2FF5A442F4BB89C420F2D577E2DB44
0x06AB01B88ACE401B5DD17CD96D743632C667
0x06AB4AFD493D859581C4942387991B50768B
0x06AB82228277F697B4D512C17A78A8C42BDA
0x06AB76645F0D6329639DA8F675BB543A2591
0x06AB31794A0E916248A2699C8040D89E58FD
0x06AB10BDE17028E8AAAC9E39C0E8D163863F"""
packetbytes = [bytes([int(j[i+2+2:i+2+2+2],16) for i in range(0,len(j)-4, 2)]) for j in testblob.split("\n")]
"""
b'.\x01\x02\x03\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'/\x01\x02\x03\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'0\x01\x02\x03\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'1\x01\x02\x03\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
"""
# thing = AES.new(bytes([0 for i in range(0,16)]), AES.MODE_ECB)
for j in packetbytes[0:3]:
print("Cipher: " + str(j))
for j in packetbytes[0:3]:
plaintext = aes_ECB_zeros.decrypt(j[1:])
print("Plain: " + str(plaintext))
m="""
So, if AES is used, the data starting after the possible addressing byte or
length and address byte is zero padded to obtain blocks of 16 bytes long.
The length byte is not increased, but the actual data transmitted over the
air is.
Transmitting less than 16 bytes does not result in faster transmissions.
"""
print(m)
|
|
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
''' IMPORTS '''
import xml
import tempfile
import contextlib
import OpenSSL.crypto
from xml.sax.saxutils import escape
import re
''' GLOBALS/PARAMS '''
FETCH_MAX_INCIDENTS = 500
SECURITY_INCIDENT_NODE_XPATH = ".//SecurityIncident"
SECURITY_INCIDENT_SUMMARY_NODE_XPATH = ".//SecurityIncidentSummary"
''' PREREQUISITES '''
@contextlib.contextmanager
def pfx_to_pem(pfx, pfx_password):
""" Decrypts the .pfx file to be used with requests. """
with tempfile.NamedTemporaryFile(suffix=".pem") as t_pem:
f_pem = open(t_pem.name, "wb")
p12 = OpenSSL.crypto.load_pkcs12(pfx, pfx_password)
f_pem.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, p12.get_privatekey()))
f_pem.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, p12.get_certificate()))
ca = p12.get_ca_certificates()
if ca is not None:
for cert in ca:
f_pem.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert))
f_pem.close()
yield t_pem.name
def load_server_url():
""" Cleans and loads the server url from the configuration """
url = demisto.params()["server"]
url = re.sub("/[/]+$/", "", url)
url = re.sub("/$", "", url)
return url
def load_certificate():
""" Loads the certificate and passphrase from the configuration """
cert = demisto.params()["certificate"]
cert = base64.b64decode(cert)
passphrase = demisto.params()["passphrase"] if "passphrase" in demisto.params() else ""
return cert, passphrase
def load_severities():
possible_severities = ["Emergency", "Critical", "Warning", "Informational"]
try:
severities_list = demisto.params()["severities"].replace(" ", "").split(",")
except Exception:
raise Exception("Error parsing severities parameter.")
for s in severities_list:
if s not in possible_severities:
raise Exception("Illegal argument in severities parameter.")
return ",".join(severities_list)
''' GLOBALS/PARAMS '''
SERVER_URL = load_server_url()
CERTIFICATE, CERTIFICATE_PASSPHRASE = load_certificate()
FETCH_SEVERITIES = load_severities()
DST = 1 if time.daylight else 0
''' HELPER FUNCTIONS '''
def strip_unwanted_chars(s):
return re.sub('&\S{1,6};', '', s)
def api_call(body, headers):
""" Makes an HTTP Post to the SWS incidents API using the configured certificate """
with pfx_to_pem(CERTIFICATE, CERTIFICATE_PASSPHRASE) as cert:
res = requests.post(url=SERVER_URL + "/SWS/incidents.asmx", cert=cert, data=body, headers=headers)
if res.status_code < 200 or res.status_code >= 300:
raise Exception(
"Got status code " + str(res.status_code) + " with body " + res.content + " with headers " + str(
res.headers))
try:
return xml.etree.ElementTree.fromstring(res.content)
except xml.etree.ElementTree.ParseError as exc:
# in case of a parsing error, try to remove problematic chars and try again.
demisto.debug('failed to parse request content, trying to parse without problematic chars:\n{}'.format(exc))
return xml.etree.ElementTree.fromstring(strip_unwanted_chars(res.content))
def event_to_incident(event):
""" Converts a Symantec event to a Demisto incident """
incident = dict() # type: Dict[str, Any]
incident["name"] = "Incident: %s (%s)" % (event["IncidentNumber"], event["Classification"])
incident["occurred"] = event["TimeCreated"] + "+0%s:00" % DST
incident["rawJSON"] = json.dumps(event)
labels = [] # type: List[str]
incident["labels"] = labels
return incident
def isoformat(date):
""" Convert a datetime object to asmx ISO format """
return date.isoformat()[:-3] + "Z"
''' COMMANDS + REQUESTS FUNCTIONS '''
def test():
now = datetime.utcnow()
get_incidents_list_request(isoformat(now), None, None, 1)
demisto.results("ok")
def fetch_incidents():
t = datetime.utcnow()
now = isoformat(t)
last_run = demisto.getLastRun() and demisto.getLastRun()["time"]
if len(last_run) == 0:
t = t - timedelta(minutes=10)
last_run = isoformat(t)
incidents = []
events = get_incidents_list_request(time=last_run, src_ip=None, severities=FETCH_SEVERITIES,
max_incidents=FETCH_MAX_INCIDENTS)
for event in events:
inc = event_to_incident(event)
incidents.append(inc)
demisto.incidents(incidents)
demisto.setLastRun({"time": now})
def get_incidents_list(time):
src_ip = demisto.args()["sourceIp"] if "sourceIp" in demisto.args() else None
severities = demisto.args()["severities"] if "severities" in demisto.args() else None
max_incidents = demisto.args()["max"] if "max" in demisto.args() else None
# Request events
result = get_incidents_list_request(time, src_ip, severities, max_incidents)
# Set human readable
headers = [
"IncidentNumber",
"TimeCreated",
"Severity",
"Category",
"CountryOfOrigin",
"DaysSeenGlobally",
"SourceIPString",
"Correlation",
"HostNameList",
"IsInternalExternal",
"GlobalLookbackDays",
"LatestKeyEvent",
"CustomerSeverity",
"CountryCode",
"FirstSeenInLast30Days",
"DaysSeenInLast30Days",
"DestOrganizationName",
"SourceOrganizationName",
"FirstSeenGlobally",
"CountryName",
"UserList",
"Classification",
"UpdateTimestampGMT",
"PrevalenceGlobally"
]
hr = tableToMarkdown("Incidents", result, headers)
# Set context
context = {
"Symantec MSS.Incidents list(val.IncidentNumber && val.IncidentNumber === obj.IncidentNumber)": result
}
demisto.results({
"ContentsFormat": formats["json"],
"Type": entryTypes["note"],
"Contents": result,
"EntryContext": context,
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": hr
})
def get_incidents_list_request(time, src_ip, severities, max_incidents):
src_ip = "<SourceIP>%s</SourceIP>" % src_ip if src_ip else ""
severities = "<Severity>%s</Severity>" % severities if severities else ""
max_incidents = "<MaxIncidents>%s</MaxIncidents>" % max_incidents if max_incidents else ""
body = """<?xml version="1.0" encoding="utf-8"?>
<soap12:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" \
xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap12="http://www.w3.org/2003/05/soap-envelope">
<soap12:Body>
<IncidentGetList xmlns="https://www.monitoredsecurity.com/">
<StartTimeStampGMT>%s</StartTimeStampGMT>
%s
%s
%s
</IncidentGetList>
</soap12:Body>
</soap12:Envelope>""" % (time, src_ip, severities, max_incidents)
headers = {
"content-Type": "application/soap+xml; charset=utf-8",
"content-Length": str(len(body))
}
root = api_call(body=body, headers=headers)
incident_nodes = root.findall(SECURITY_INCIDENT_SUMMARY_NODE_XPATH)
result = []
for incident in incident_nodes:
string_incident_xml = xml.etree.ElementTree.tostring(incident)
string_incident_json = xml2json(string_incident_xml)
dict_incident = json.loads(string_incident_json)["SecurityIncidentSummary"]
result.append(dict_incident)
return result
def update_incident():
# Fill in required fields from the existing incident (for the api call)
num = demisto.args()["number"]
dict_query = query_incident(num=num, workflow_query=True)
dict_workflow_query = dict_query["WorkFlowDetail"]
# Use the supplied params, filling the missing ones from the existing workflow if possible,
# if not possible - require from user
status = demisto.args()["status"] if "status" in demisto.args() else dict_workflow_query["Status"]
if not status:
raise Exception("No current status, please supply a status parameter")
resolution = demisto.args()["resolution"] if "resolution" in demisto.args() else dict_workflow_query["Resolution"]
if not resolution:
raise Exception("No current resolution, please supply a resolution parameter")
severity = demisto.args()["severity"] if "severity" in demisto.args() else dict_query["Severity"]
if not severity:
raise Exception("No current severity, please supply a severity parameter")
# Optional params
ref = demisto.args()["reference"] if "reference" in demisto.args() else None
comments = demisto.args()["comments"] if "comments" in demisto.args() else None
# Only one of them should exist
assign_to_org = demisto.args()["assignOrganization"] if "assignOrganization" in demisto.args() else None
assign_to_person = demisto.args()["assignPerson"] if "assignPerson" in demisto.args() else None
if assign_to_org and assign_to_person:
raise Exception("Unable to assign to both organization and a person, please choose only one")
if not assign_to_org and not assign_to_person:
if "AssignedOrganization" in dict_workflow_query and dict_workflow_query["AssignedOrganization"]:
assign_to_org = dict_workflow_query["AssignedOrganization"]
elif "AssignedPerson" in dict_workflow_query and dict_workflow_query["AssignedPerson"]:
assign_to_person = dict_workflow_query["AssignedPerson"]
# Make the request with the params
success = update_incident_request(num, status, resolution, ref, severity, assign_to_org, assign_to_person, comments)
# Create result
msg = "Updated successfully" if success else "Update failed"
result = [{"Update status": msg}]
hr = tableToMarkdown("", result)
demisto.results({
"ContentsFormat": formats["text"],
"Type": entryTypes["note"],
"Contents": msg,
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": hr
})
def update_incident_request(num, status, resolution, ref, severity, assign_to_org, assign_to_person, comments):
# Create optional parameter tags if needed
ref = "<Reference>%s</Reference>" % (ref) if ref else ""
assign_to_org = "<AssignedToOrganiztion>%s</AssignedToOrganiztion>" % assign_to_org if assign_to_org else ""
assign_to_person = "<AssignedToPerson>%s</AssignedToPerson>" % assign_to_person if assign_to_person else ""
comments = "<Comments>%s</Comments>" % comments if comments else ""
body = """<?xml version="1.0" encoding="utf-8"?>
<soap12:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" \
xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap12="http://www.w3.org/2003/05/soap-envelope">
<soap12:Body>
<UpdateIncidentWorkflow xmlns="https://www.monitoredsecurity.com/">
<IncidentNumber>%s</IncidentNumber>
<Status>%s</Status>
<Resolution>%s</Resolution>
%s
<Severity>%s</Severity>
%s
%s
%s
</UpdateIncidentWorkflow>
</soap12:Body>
</soap12:Envelope>""" % (num, status, resolution, ref, severity, assign_to_org, assign_to_person,
escape(comments))
headers = {
"content-Type": "application/soap+xml; charset=utf-8",
"content-Length": str(len(body))
}
res = api_call(body=body, headers=headers)
res_string_xml = xml.etree.ElementTree.tostring(res)
res_string_json = xml2json(res_string_xml)
dict_res = json.loads(res_string_json)
res = dict_res["Envelope"]["Body"]["UpdateIncidentWorkflowResponse"]["UpdateIncidentWorkflowResult"]
return res == "true"
def query_incident_cmd():
result = query_incident(demisto.args()["number"], workflow_query=True)
# Create minimal signature list
data = result.get("SignatureList", {}).get("Signature") or []
if not isinstance(data, list):
data = [data]
sigs = []
for sig in data:
sig_dict = dict() # type: Dict[str, Any]
sig_dict["SourceIPString"] = sig["SourceIPString"]
sig_dict["SignatureName"] = sig["SignatureName"]
sig_dict["VendorSignature"] = sig["VendorSignature"]
sig_dict["NumberBlocked"] = sig["NumberBlocked"]
sig_dict["NumberNotBlocked"] = sig["NumberNotBlocked"]
sigs.append(sig_dict)
# Set Human readable
flatten_relevant_fields = [{
"Incident Number": result.get("IncidentNumber", ""),
"Time Created": result.get("TimeCreated", ""),
"Status": result.get("WorkFlowDetail", {}).get("Status", ""),
"Classification": result.get("Classification", ""),
"Assigned Person": result.get("WorkFlowDetail", {}).get("AssignedPerson",
"") if result.get("WorkFlowDetail", {}) else "",
"Description": result.get("Description", ""),
"Analyst Assessment": result.get("AnalystAssessment", ""),
"Number of Analyzed Signatures": result.get("NumberOfAnalyzedSignatures", ""),
"Signaturtes": json.dumps(sigs) or "",
"Related Incidents": json.dumps(result.get("RelatedIncidents",
{}).get("IncidentNumber", "")) if result.get("RelatedIncidents",
{}) else "",
"Comment": result.get("IncidentComments", {}).get("IncidentComment",
{}).get("Comment", "") if result.get("IncidentComments",
{}) else ""
}]
headers = [
"Incident Number",
"Time Created",
"Status",
"Classification",
"Assigned Person",
"Description",
"Analyst Assessment",
"Number of Analyzed Signatures",
"Signaturtes",
"Related Incidents",
"Comment"
]
hr = tableToMarkdown("Incident query", flatten_relevant_fields, headers)
# Set context
result_ctx = {
"IncidentNumber": result.get("IncidentNumber", ""),
"NumberOfAnalyzedSignatures": result.get("NumberOfAnalyzedSignatures", ""),
"SignatureList": {
"Signature": sigs
},
"TimeCreated": result.get("TimeCreated", ""),
"Classification": result.get("Classification", ""),
"Description": result.get("Description", ""),
"AnalystAssessment": result.get("AnalystAssessment", ""),
"CountryCode": result.get("CountryCode", ""),
"CountryName": result.get("CountryName", ""),
"RelatedTickets": result.get("RelatedTickets", ""),
"WorkFlowDetail": {
"Status": result.get("WorkFlowDetail", {}).get("Status", ""),
"AssignedPerson": result.get("WorkFlowDetail", {}).get("AssignedPerson", "")
},
"RelatedIncidents": {
"IncidentNumber": result["RelatedIncidents"]["IncidentNumber"] if result.get("RelatedIncidents") else ""
}
}
if result.get('IncidentComments') and result.get('IncidentComments').get('IncidentComment'):
result_ctx["IncidentComments"] = {"IncidentComment": {
"CommentedTimeStampGMT": result["IncidentComments"]["IncidentComment"]["CommentedTimeStampGMT"],
"Comment": result["IncidentComments"]["IncidentComment"]["Comment"],
"CommentedBy": result["IncidentComments"]["IncidentComment"]["CommentedBy"]
}
}
else:
result_ctx["IncidentComments"] = {}
if result.get("IncidentAttachmentItems") and result.get('IncidentAttachmentItems').get('IncidentAttachmentItem'):
result_ctx['IncidentAttachmentItems'] = {"IncidentAttachmentItem": {
"AttachmentNumber": result["IncidentAttachmentItems"]["IncidentAttachmentItem"]["AttachmentNumber"],
"AttachmentName": result["IncidentAttachmentItems"]["IncidentAttachmentItem"]["AttachmentName"],
"UploadDateGMT": result["IncidentAttachmentItems"]["IncidentAttachmentItem"]["UploadDateGMT"],
"UploadBy": result["IncidentAttachmentItems"]["IncidentAttachmentItem"]["UploadBy"],
"Comment": result["IncidentAttachmentItems"]["IncidentAttachmentItem"]["Comment"]
}
}
else:
result_ctx['IncidentAttachmentItems'] = {}
context = {
"Symantec MSS.Incident query(val.IncidentNumber && val.IncidentNumber === obj.IncidentNumber)": result_ctx
}
demisto.results({
"ContentsFormat": formats["json"],
"Type": entryTypes["note"],
"Contents": result,
"EntryContext": context,
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": hr
})
def query_incident(num, workflow_query=False):
query = query_incident_request(num) if not workflow_query else query_incident_workflow_request(num)
return query
def query_incident_request(num):
body = """<?xml version="1.0" encoding="utf-8"?>
<soap12:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" \
xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap12="http://www.w3.org/2003/05/soap-envelope">
<soap12:Body>
<IncidentQuery xmlns="https://www.monitoredsecurity.com/">
<IncidentNumber>%s</IncidentNumber>
</IncidentQuery>
</soap12:Body>
</soap12:Envelope>""" % num
headers = {
"content-Type": "application/soap+xml; charset=utf-8",
"content-Length": str(len(body))
}
query = api_call(body=body, headers=headers)
query_node = query.find(SECURITY_INCIDENT_NODE_XPATH)
string_query_xml = xml.etree.ElementTree.tostring(query_node)
string_query_json = xml2json(string_query_xml)
dict_query = json.loads(string_query_json)["SecurityIncident"]
return dict_query
def query_incident_workflow_request(num):
body = """<?xml version="1.0" encoding="utf-8"?>
<soap12:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" \
xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap12="http://www.w3.org/2003/05/soap-envelope">
<soap12:Body>
<IncidentWorkflowQuery xmlns="https://www.monitoredsecurity.com/">
<IncidentNumber>%s</IncidentNumber>
</IncidentWorkflowQuery>
</soap12:Body>
</soap12:Envelope>""" % num
headers = {
"content-Type": "application/soap+xml; charset=utf-8",
"content-Length": str(len(body))
}
query = api_call(body=body, headers=headers)
query_node = query.find(SECURITY_INCIDENT_NODE_XPATH)
string_query_xml = xml.etree.ElementTree.tostring(query_node)
string_query_json = xml2json(string_query_xml)
dict_query = json.loads(string_query_json)["SecurityIncident"]
return dict_query
''' COMMANDS MANAGER / SWITCH PANEL '''
LOG('Command being called is %s' % (demisto.command()))
try:
handle_proxy()
if demisto.command() == "fetch-incidents":
fetch_incidents()
if demisto.command() == "test-module":
test()
if demisto.command() == "symantec-mss-update-incident":
update_incident()
if demisto.command() == "symantec-mss-get-incident":
query_incident_cmd()
if demisto.command() == "symantec-mss-incidents-list":
time = demisto.args()["time"] if "time" in demisto.args() else isoformat(
datetime.utcnow() - timedelta(hours=24))
get_incidents_list(time)
# Log exceptions
except Exception as e:
return_error(str(e))
|
|
"""
Websocket based API for Home Assistant.
For more details about this component, please refer to the documentation at
https://home-assistant.io/developers/websocket_api/
"""
import asyncio
from contextlib import suppress
from functools import partial
import json
import logging
from aiohttp import web
import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant.const import (
MATCH_ALL, EVENT_TIME_CHANGED, EVENT_HOMEASSISTANT_STOP,
__version__)
from homeassistant.components import frontend
from homeassistant.core import callback
from homeassistant.remote import JSONEncoder
from homeassistant.helpers import config_validation as cv
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.http.auth import validate_password
from homeassistant.components.http.const import KEY_AUTHENTICATED
from homeassistant.components.http.ban import process_wrong_login
DOMAIN = 'websocket_api'
URL = '/api/websocket'
DEPENDENCIES = ('http',)
MAX_PENDING_MSG = 512
ERR_ID_REUSE = 1
ERR_INVALID_FORMAT = 2
ERR_NOT_FOUND = 3
TYPE_AUTH = 'auth'
TYPE_AUTH_INVALID = 'auth_invalid'
TYPE_AUTH_OK = 'auth_ok'
TYPE_AUTH_REQUIRED = 'auth_required'
TYPE_CALL_SERVICE = 'call_service'
TYPE_EVENT = 'event'
TYPE_GET_CONFIG = 'get_config'
TYPE_GET_PANELS = 'get_panels'
TYPE_GET_SERVICES = 'get_services'
TYPE_GET_STATES = 'get_states'
TYPE_PING = 'ping'
TYPE_PONG = 'pong'
TYPE_RESULT = 'result'
TYPE_SUBSCRIBE_EVENTS = 'subscribe_events'
TYPE_UNSUBSCRIBE_EVENTS = 'unsubscribe_events'
_LOGGER = logging.getLogger(__name__)
JSON_DUMP = partial(json.dumps, cls=JSONEncoder)
AUTH_MESSAGE_SCHEMA = vol.Schema({
vol.Required('type'): TYPE_AUTH,
vol.Required('api_password'): str,
})
SUBSCRIBE_EVENTS_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_SUBSCRIBE_EVENTS,
vol.Optional('event_type', default=MATCH_ALL): str,
})
UNSUBSCRIBE_EVENTS_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_UNSUBSCRIBE_EVENTS,
vol.Required('subscription'): cv.positive_int,
})
CALL_SERVICE_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_CALL_SERVICE,
vol.Required('domain'): str,
vol.Required('service'): str,
vol.Optional('service_data', default=None): dict
})
GET_STATES_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_GET_STATES,
})
GET_SERVICES_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_GET_SERVICES,
})
GET_CONFIG_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_GET_CONFIG,
})
GET_PANELS_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_GET_PANELS,
})
PING_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): TYPE_PING,
})
BASE_COMMAND_MESSAGE_SCHEMA = vol.Schema({
vol.Required('id'): cv.positive_int,
vol.Required('type'): vol.Any(TYPE_CALL_SERVICE,
TYPE_SUBSCRIBE_EVENTS,
TYPE_UNSUBSCRIBE_EVENTS,
TYPE_GET_STATES,
TYPE_GET_SERVICES,
TYPE_GET_CONFIG,
TYPE_GET_PANELS,
TYPE_PING)
}, extra=vol.ALLOW_EXTRA)
def auth_ok_message():
"""Return an auth_ok message."""
return {
'type': TYPE_AUTH_OK,
'ha_version': __version__,
}
def auth_required_message():
"""Return an auth_required message."""
return {
'type': TYPE_AUTH_REQUIRED,
'ha_version': __version__,
}
def auth_invalid_message(message):
"""Return an auth_invalid message."""
return {
'type': TYPE_AUTH_INVALID,
'message': message,
}
def event_message(iden, event):
"""Return an event message."""
return {
'id': iden,
'type': TYPE_EVENT,
'event': event.as_dict(),
}
def error_message(iden, code, message):
"""Return an error result message."""
return {
'id': iden,
'type': TYPE_RESULT,
'success': False,
'error': {
'code': code,
'message': message,
},
}
def pong_message(iden):
"""Return a pong message."""
return {
'id': iden,
'type': TYPE_PONG,
}
def result_message(iden, result=None):
"""Return a success result message."""
return {
'id': iden,
'type': TYPE_RESULT,
'success': True,
'result': result,
}
@asyncio.coroutine
def async_setup(hass, config):
"""Initialize the websocket API."""
hass.http.register_view(WebsocketAPIView)
return True
class WebsocketAPIView(HomeAssistantView):
"""View to serve a websockets endpoint."""
name = "websocketapi"
url = URL
requires_auth = False
@asyncio.coroutine
def get(self, request):
"""Handle an incoming websocket connection."""
# pylint: disable=no-self-use
return ActiveConnection(request.app['hass']).handle(request)
class ActiveConnection:
"""Handle an active websocket client connection."""
def __init__(self, hass):
"""Initialize an active connection."""
self.hass = hass
self.wsock = None
self.event_listeners = {}
self.to_write = asyncio.Queue(maxsize=MAX_PENDING_MSG, loop=hass.loop)
self._handle_task = None
self._writer_task = None
def debug(self, message1, message2=''):
"""Print a debug message."""
_LOGGER.debug("WS %s: %s %s", id(self.wsock), message1, message2)
def log_error(self, message1, message2=''):
"""Print an error message."""
_LOGGER.error("WS %s: %s %s", id(self.wsock), message1, message2)
@asyncio.coroutine
def _writer(self):
"""Write outgoing messages."""
# Exceptions if Socket disconnected or cancelled by connection handler
with suppress(RuntimeError, asyncio.CancelledError):
while not self.wsock.closed:
message = yield from self.to_write.get()
if message is None:
break
self.debug("Sending", message)
yield from self.wsock.send_json(message, dumps=JSON_DUMP)
@callback
def send_message_outside(self, message):
"""Send a message to the client outside of the main task.
Closes connection if the client is not reading the messages.
Async friendly.
"""
try:
self.to_write.put_nowait(message)
except asyncio.QueueFull:
self.log_error("Client exceeded max pending messages [2]:",
MAX_PENDING_MSG)
self.cancel()
@callback
def cancel(self):
"""Cancel the connection."""
self._handle_task.cancel()
self._writer_task.cancel()
@asyncio.coroutine
def handle(self, request):
"""Handle the websocket connection."""
wsock = self.wsock = web.WebSocketResponse()
yield from wsock.prepare(request)
self.debug("Connected")
# Get a reference to current task so we can cancel our connection
self._handle_task = asyncio.Task.current_task(loop=self.hass.loop)
@callback
def handle_hass_stop(event):
"""Cancel this connection."""
self.cancel()
unsub_stop = self.hass.bus.async_listen(
EVENT_HOMEASSISTANT_STOP, handle_hass_stop)
self._writer_task = self.hass.async_add_job(self._writer())
final_message = None
msg = None
authenticated = False
try:
if request[KEY_AUTHENTICATED]:
authenticated = True
else:
yield from self.wsock.send_json(auth_required_message())
msg = yield from wsock.receive_json()
msg = AUTH_MESSAGE_SCHEMA(msg)
if validate_password(request, msg['api_password']):
authenticated = True
else:
self.debug("Invalid password")
yield from self.wsock.send_json(
auth_invalid_message('Invalid password'))
if not authenticated:
yield from process_wrong_login(request)
return wsock
yield from self.wsock.send_json(auth_ok_message())
# ---------- AUTH PHASE OVER ----------
msg = yield from wsock.receive_json()
last_id = 0
while msg:
self.debug("Received", msg)
msg = BASE_COMMAND_MESSAGE_SCHEMA(msg)
cur_id = msg['id']
if cur_id <= last_id:
self.to_write.put_nowait(error_message(
cur_id, ERR_ID_REUSE,
'Identifier values have to increase.'))
else:
handler_name = 'handle_{}'.format(msg['type'])
getattr(self, handler_name)(msg)
last_id = cur_id
msg = yield from wsock.receive_json()
except vol.Invalid as err:
error_msg = "Message incorrectly formatted: "
if msg:
error_msg += humanize_error(msg, err)
else:
error_msg += str(err)
self.log_error(error_msg)
if not authenticated:
final_message = auth_invalid_message(error_msg)
else:
if isinstance(msg, dict):
iden = msg.get('id')
else:
iden = None
final_message = error_message(
iden, ERR_INVALID_FORMAT, error_msg)
except TypeError as err:
if wsock.closed:
self.debug("Connection closed by client")
else:
self.log_error("Unexpected TypeError", msg)
except ValueError as err:
msg = "Received invalid JSON"
value = getattr(err, 'doc', None) # Py3.5+ only
if value:
msg += ': {}'.format(value)
self.log_error(msg)
self._writer_task.cancel()
except asyncio.CancelledError:
self.debug("Connection cancelled by server")
except asyncio.QueueFull:
self.log_error("Client exceeded max pending messages [1]:",
MAX_PENDING_MSG)
self._writer_task.cancel()
except Exception: # pylint: disable=broad-except
error = "Unexpected error inside websocket API. "
if msg is not None:
error += str(msg)
_LOGGER.exception(error)
finally:
unsub_stop()
for unsub in self.event_listeners.values():
unsub()
try:
if final_message is not None:
self.to_write.put_nowait(final_message)
self.to_write.put_nowait(None)
# Make sure all error messages are written before closing
yield from self._writer_task
except asyncio.QueueFull:
self._writer_task.cancel()
yield from wsock.close()
self.debug("Closed connection")
return wsock
def handle_subscribe_events(self, msg):
"""Handle subscribe events command.
Async friendly.
"""
msg = SUBSCRIBE_EVENTS_MESSAGE_SCHEMA(msg)
@asyncio.coroutine
def forward_events(event):
"""Forward events to websocket."""
if event.event_type == EVENT_TIME_CHANGED:
return
self.send_message_outside(event_message(msg['id'], event))
self.event_listeners[msg['id']] = self.hass.bus.async_listen(
msg['event_type'], forward_events)
self.to_write.put_nowait(result_message(msg['id']))
def handle_unsubscribe_events(self, msg):
"""Handle unsubscribe events command.
Async friendly.
"""
msg = UNSUBSCRIBE_EVENTS_MESSAGE_SCHEMA(msg)
subscription = msg['subscription']
if subscription in self.event_listeners:
self.event_listeners.pop(subscription)()
self.to_write.put_nowait(result_message(msg['id']))
else:
self.to_write.put_nowait(error_message(
msg['id'], ERR_NOT_FOUND,
'Subscription not found.'))
def handle_call_service(self, msg):
"""Handle call service command.
This is a coroutine.
"""
msg = CALL_SERVICE_MESSAGE_SCHEMA(msg)
@asyncio.coroutine
def call_service_helper(msg):
"""Call a service and fire complete message."""
yield from self.hass.services.async_call(
msg['domain'], msg['service'], msg['service_data'], True)
self.send_message_outside(result_message(msg['id']))
self.hass.async_add_job(call_service_helper(msg))
def handle_get_states(self, msg):
"""Handle get states command.
Async friendly.
"""
msg = GET_STATES_MESSAGE_SCHEMA(msg)
self.to_write.put_nowait(result_message(
msg['id'], self.hass.states.async_all()))
def handle_get_services(self, msg):
"""Handle get services command.
Async friendly.
"""
msg = GET_SERVICES_MESSAGE_SCHEMA(msg)
self.to_write.put_nowait(result_message(
msg['id'], self.hass.services.async_services()))
def handle_get_config(self, msg):
"""Handle get config command.
Async friendly.
"""
msg = GET_CONFIG_MESSAGE_SCHEMA(msg)
self.to_write.put_nowait(result_message(
msg['id'], self.hass.config.as_dict()))
def handle_get_panels(self, msg):
"""Handle get panels command.
Async friendly.
"""
msg = GET_PANELS_MESSAGE_SCHEMA(msg)
self.to_write.put_nowait(result_message(
msg['id'], self.hass.data[frontend.DATA_PANELS]))
def handle_ping(self, msg):
"""Handle ping command.
Async friendly.
"""
self.to_write.put_nowait(pong_message(msg['id']))
|
|
"""Durus backend test classes."""
# Copyright (c) 2001-2009 ElevenCraft Inc.
# See LICENSE for details.
from StringIO import StringIO
from schevo import database
from schevo.lib import module
from durus.file import File
_db_cache = {
# (backend_args, format, version, evolve_skipped, schema_source, suffix):
# (db, filename),
}
_cached_dbs = set(
# db,
)
class Durus_TestMethods_CreatesDatabase(object):
__test__ = False
@staticmethod
def backend_base_open(test_object, suffix, schema_source, schema_version):
"""Perform the actual opening of a database, then return it.
- `test_object`: The instance of the test class we're opening
a database for.
- `suffix`: The suffix to use on variable names when storing
open databases and auxiliary information.
- `schema_source`: Schema source code to use.
- `schema_version`: Version of the schema to use.
"""
db_name = 'db' + suffix
filename = getattr(test_object, 'filename' + suffix, None)
if filename is None:
filename = random_filename()
db = database.create(
'durus:///:temp:',
backend_args=test_object.backend_args,
schema_source=schema_source,
schema_version=schema_version,
format=test_object.format,
)
filename = db.backend.database
else:
db = database.open(
filename=filename,
backend_args=test_object.backend_args,
)
setattr(test_object, db_name, db)
setattr(test_object, 'filename' + suffix, filename)
return db
@staticmethod
def backend_close(test_object, suffix=''):
"""Perform the actual closing of a database.
- `test_object`: The instance of the test class we're closing
a database for.
- `suffix`: The suffix to use on variable names when finding
the database and auxiliary information for it.
"""
db_name = 'db' + suffix
db = getattr(test_object, db_name)
if db not in _cached_dbs:
db.close()
@staticmethod
def backend_convert_format(test_object, suffix, format):
"""Convert the internal structure format of an already-open database.
- `test_object`: The instance of the test class we're
converting a database for.
- `suffix`: The suffix to use on variable names when finding
the database and auxiliary information for it.
"""
filename = getattr(test_object, 'filename' + suffix)
# Convert it to the requested format.
database.convert_format(
filename = filename,
backend_args = test_object.backend_args,
format = format,
)
@staticmethod
def backend_reopen_finish(test_object, suffix):
"""Perform cleanup required at the end of a call to
`self.reopen()` within a test.
- `test_object`: The instance of the test class performing the
reopen.
- `suffix`: The suffix to use on variable names when finding
the database and auxiliary information for it.
"""
pass
@staticmethod
def backend_reopen_save_state(test_object, suffix):
"""Save the state of a database file before it gets closed
during a call to `self.reopen()` within a test.
- `test_object`: The instance of the test class performing the
reopen.
- `suffix`: The suffix to use on variable names when finding
the database and auxiliary information for it.
"""
db = getattr(test_object, 'db' + suffix)
db.backend.close()
class Durus_TestMethods_CreatesSchema(Durus_TestMethods_CreatesDatabase):
__test__ = False
@staticmethod
def backend_open(test_object, suffix, schema):
"""Perform the actual opening of a database for a test
instance.
- `test_object`: The instance of the test class we're opening
a database for.
- `suffix`: The suffix to use on variable names when storing
open databases and auxiliary information.
- `schema`: Schema source code to use.
"""
format = test_object.format
db_name = 'db' + suffix
filename_name = 'filename' + suffix
cache_key = (tuple(sorted(test_object.backend_args.items())),
format, 1, None, schema, suffix)
if (test_object._use_db_cache
and cache_key in _db_cache
and not hasattr(test_object, filename_name)
):
db, filename = _db_cache[cache_key]
setattr(test_object, filename_name, filename)
if not hasattr(test_object, db_name):
db._reset_all()
setattr(test_object, db_name, db)
else:
# Forget existing modules.
for m in module.MODULES:
module.forget(m)
db = test_object._base_open(suffix, schema)
if test_object._use_db_cache:
filename = getattr(test_object, filename_name)
db_info = (db, filename)
_db_cache[cache_key] = db_info
_cached_dbs.add(db)
return db
class Durus_TestMethods_EvolvesSchemata(Durus_TestMethods_CreatesDatabase):
__test__ = False
@staticmethod
def backend_open(test_object):
"""Perform the actual opening of a database for a test
instance.
- `test_object`: The instance of the test class we're opening
a database for.
"""
format = test_object.format
use_db_cache = test_object._use_db_cache
filename_name = 'filename'
schema = test_object.schemata[-1]
version = test_object.schema_version
skip_evolution = test_object.skip_evolution
suffix = ''
cache_key = (tuple(sorted(test_object.backend_args.items())),
format, version, skip_evolution, schema, suffix)
if (use_db_cache
and cache_key in _db_cache
and not hasattr(test_object, filename_name)
):
db, filename = _db_cache[cache_key]
test_object.filename = filename
if not hasattr(test_object, 'db'):
db._reset_all()
test_object.db = db
else:
# Forget existing modules.
for m in module.MODULES:
module.forget(m)
if not skip_evolution:
# Open database with version 1.
db = test_object._base_open(suffix, test_object.schemata[0])
# Evolve to latest.
for i in xrange(1, len(test_object.schemata)):
schema_source = test_object.schemata[i]
database.evolve(db, schema_source, version=i+1)
else:
# Open database with target version.
db = test_object._base_open(suffix, schema, schema_version=version)
if use_db_cache:
filename = test_object.filename
_db_cache[cache_key] = (db, filename)
_cached_dbs.add(db)
return db
# ------------------------------------------------------------------------
# class Xdserver_TestMethods_CreatesDatabase(object):
# __test__ = False
# @staticmethod
# def backend_base_open(test_object, suffix, schema_source, schema_version):
# """Perform the actual opening of a database, then return it.
# - `test_object`: The instance of the test class we're opening
# a database for.
# - `suffix`: The suffix to use on variable names when storing
# open databases and auxiliary information.
# - `schema_source`: Schema source code to use.
# - `schema_version`: Version of the schema to use.
# """
# db_name = 'db' + suffix
# filename = getattr(test_object, 'filename' + suffix, None)
# if filename is None:
# filename = random_filename()
# db = database.create(
# 'durus:///:temp:',
# backend_args=test_object.backend_args,
# schema_source=schema_source,
# schema_version=schema_version,
# format=test_object.format,
# )
# filename = db.backend.database
# else:
# db = database.open(
# filename=filename,
# backend_args=test_object.backend_args,
# )
# setattr(test_object, db_name, db)
# setattr(test_object, 'filename' + suffix, filename)
# return db
# @staticmethod
# def backend_close(test_object, suffix=''):
# """Perform the actual closing of a database.
# - `test_object`: The instance of the test class we're closing
# a database for.
# - `suffix`: The suffix to use on variable names when finding
# the database and auxiliary information for it.
# """
# db_name = 'db' + suffix
# db = getattr(test_object, db_name)
# if db not in _cached_dbs:
# db.close()
# @staticmethod
# def backend_convert_format(test_object, suffix, format):
# """Convert the internal structure format of an already-open database.
# - `test_object`: The instance of the test class we're
# converting a database for.
# - `suffix`: The suffix to use on variable names when finding
# the database and auxiliary information for it.
# """
# filename = getattr(test_object, 'filename' + suffix)
# # Convert it to the requested format.
# database.convert_format(
# filename = filename,
# backend_args = test_object.backend_args,
# format = format,
# )
# @staticmethod
# def backend_reopen_finish(test_object, suffix):
# """Perform cleanup required at the end of a call to
# `self.reopen()` within a test.
# - `test_object`: The instance of the test class performing the
# reopen.
# - `suffix`: The suffix to use on variable names when finding
# the database and auxiliary information for it.
# """
# pass
# @staticmethod
# def backend_reopen_save_state(test_object, suffix):
# """Save the state of a database file before it gets closed
# during a call to `self.reopen()` within a test.
# - `test_object`: The instance of the test class performing the
# reopen.
# - `suffix`: The suffix to use on variable names when finding
# the database and auxiliary information for it.
# """
# db = getattr(test_object, 'db' + suffix)
# db.backend.close()
# class Xdserver_TestMethods_CreatesSchema(Xdserver_TestMethods_CreatesDatabase):
# __test__ = False
# @staticmethod
# def backend_open(test_object, suffix, schema):
# """Perform the actual opening of a database for a test
# instance.
# - `test_object`: The instance of the test class we're opening
# a database for.
# - `suffix`: The suffix to use on variable names when storing
# open databases and auxiliary information.
# - `schema`: Schema source code to use.
# """
# format = test_object.format
# db_name = 'db' + suffix
# filename_name = 'filename' + suffix
# cache_key = (tuple(sorted(test_object.backend_args.items())),
# format, 1, None, schema, suffix)
# if (test_object._use_db_cache
# and cache_key in _db_cache
# and not hasattr(test_object, filename_name)
# ):
# db, filename = _db_cache[cache_key]
# setattr(test_object, filename_name, filename)
# if not hasattr(test_object, db_name):
# db._reset_all()
# setattr(test_object, db_name, db)
# else:
# # Forget existing modules.
# for m in module.MODULES:
# module.forget(m)
# db = test_object._base_open(suffix, schema)
# if test_object._use_db_cache:
# filename = getattr(test_object, filename_name)
# db_info = (db, filename)
# _db_cache[cache_key] = db_info
# _cached_dbs.add(db)
# return db
# class Xdserver_TestMethods_EvolvesSchemata(Xdserver_TestMethods_CreatesDatabase):
# __test__ = False
# @staticmethod
# def backend_open(test_object):
# """Perform the actual opening of a database for a test
# instance.
# - `test_object`: The instance of the test class we're opening
# a database for.
# """
# format = test_object.format
# use_db_cache = test_object._use_db_cache
# filename_name = 'filename'
# schema = test_object.schemata[-1]
# version = test_object.schema_version
# skip_evolution = test_object.skip_evolution
# suffix = ''
# cache_key = (tuple(sorted(test_object.backend_args.items())),
# format, version, skip_evolution, schema, suffix)
# if (use_db_cache
# and cache_key in _db_cache
# and not hasattr(test_object, filename_name)
# ):
# db, filename = _db_cache[cache_key]
# test_object.filename = filename
# if not hasattr(test_object, 'db'):
# db._reset_all()
# test_object.db = db
# else:
# # Forget existing modules.
# for m in module.MODULES:
# module.forget(m)
# if not skip_evolution:
# # Open database with version 1.
# db = test_object._base_open(suffix, test_object.schemata[0])
# # Evolve to latest.
# for i in xrange(1, len(test_object.schemata)):
# schema_source = test_object.schemata[i]
# database.evolve(db, schema_source, version=i+1)
# else:
# # Open database with target version.
# db = test_object._base_open(suffix, schema, schema_version=version)
# if use_db_cache:
# filename = test_object.filename
# _db_cache[cache_key] = (db, filename)
# _cached_dbs.add(db)
# return db
|
|
import unittest
from pokemongo_bot.inventory import *
class InventoryTest(unittest.TestCase):
def test_types(self):
td = Types
self.assertIs(types_data(), td)
self.assertEqual(len(td.STATIC_DATA), 18)
self.assertEqual(len(td.all()), 18)
for name, s in td.STATIC_DATA.iteritems():
assert len(name) > 0
self.assertIs(s.name, name)
for t in s.attack_effective_against:
self.assertIn(s, t.pokemon_vulnerable_to)
for t in s.attack_weak_against:
self.assertIn(s, t.pokemon_resistant_to)
for t in s.pokemon_vulnerable_to:
self.assertIn(s, t.attack_effective_against)
for t in s.pokemon_resistant_to:
self.assertIn(s, t.attack_weak_against)
def test_pokemons(self):
# Init data
self.assertEqual(len(Pokemons().all()), 0) # No inventory loaded here
obj = Pokemons
self.assertEqual(len(obj.STATIC_DATA), 151)
for idx in xrange(len(obj.STATIC_DATA)):
pokemon = obj.STATIC_DATA[idx] # type: PokemonInfo
name = pokemon.name
pokemon_id = pokemon.id
self.assertEqual(pokemon.id, idx+1)
assert (1 <= pokemon_id <= 151)
self.assertGreaterEqual(len(pokemon.movesets), 1)
self.assertIsInstance(pokemon.movesets[0], Moveset)
assert 262 <= pokemon.max_cp <= 4145
assert 1 <= len(pokemon.types) <= 2
assert 40 <= pokemon.base_attack <= 284
assert 54 <= pokemon.base_defense <= 242
assert 20 <= pokemon.base_stamina <= 500
assert .0 <= pokemon.capture_rate <= .56
assert .0 <= pokemon.flee_rate <= .99
assert 1 <= len(pokemon._data['Weaknesses']) <= 7
assert 3 <= len(name) <= 10
self.assertGreaterEqual(len(pokemon.classification), 11)
self.assertGreaterEqual(len(pokemon.fast_attacks), 1)
self.assertGreaterEqual(len(pokemon.charged_attack), 1)
self.assertIs(obj.data_for(pokemon_id), pokemon)
self.assertIs(obj.name_for(pokemon_id), name)
first_evolution_id = obj.first_evolution_id_for(pokemon_id)
self.assertIs(first_evolution_id, pokemon.first_evolution_id)
self.assertIs(pokemon.family_id, first_evolution_id)
self.assertGreaterEqual(first_evolution_id, 1)
next_evolution_ids = obj.next_evolution_ids_for(pokemon_id)
self.assertIs(next_evolution_ids, pokemon.next_evolution_ids)
last_evolution_ids = obj.last_evolution_ids_for(pokemon_id)
self.assertIs(last_evolution_ids, pokemon.last_evolution_ids)
candies_cost = obj.evolution_cost_for(pokemon_id)
self.assertIs(candies_cost, pokemon.evolution_cost)
self.assertIs(obj.prev_evolution_id_for(pokemon_id), pokemon.prev_evolution_id)
self.assertGreaterEqual(len(last_evolution_ids), 1)
if not obj.has_next_evolution(pokemon_id):
assert not pokemon.has_next_evolution
self.assertEqual(pokemon.evolution_cost, 0)
self.assertEqual(pokemon.next_evolution_ids, [])
self.assertEqual(pokemon.next_evolutions_all, [])
self.assertEqual(pokemon.last_evolution_ids, [pokemon_id])
else:
self.assertGreater(candies_cost, 0)
self.assertGreaterEqual(len(next_evolution_ids), 1)
self.assertLessEqual(len(next_evolution_ids), len(last_evolution_ids))
reqs = pokemon._data['Next Evolution Requirements']
self.assertEqual(reqs["Family"], first_evolution_id)
candies_name = obj.name_for(first_evolution_id) + ' candies'
self.assertEqual(reqs["Name"], candies_name)
assert 12 <= candies_cost <= 400
self.assertEqual(reqs["Amount"], candies_cost)
evolutions = pokemon._data["Next evolution(s)"]
self.assertGreaterEqual(len(evolutions), len(next_evolution_ids))
for p in evolutions:
p_id = int(p["Number"])
self.assertNotEqual(p_id, pokemon_id)
self.assertEqual(p["Name"], obj.name_for(p_id))
for p_id in next_evolution_ids:
self.assertEqual(obj.prev_evolution_id_for(p_id), pokemon_id)
prev_evs = obj.data_for(p_id)._data["Previous evolution(s)"]
self.assertGreaterEqual(len(prev_evs), 1)
self.assertEqual(int(prev_evs[-1]["Number"]), pokemon_id)
self.assertEqual(prev_evs[-1]["Name"], name)
# Only Eevee has 3 next evolutions
self.assertEqual(len(next_evolution_ids),
1 if pokemon_id != 133 else 3)
if "Previous evolution(s)" in pokemon._data:
for p in pokemon._data["Previous evolution(s)"]:
p_id = int(p["Number"])
self.assertNotEqual(p_id, pokemon_id)
self.assertEqual(p["Name"], obj.name_for(p_id))
#
# Specific pokemons testing
poke = Pokemon({
"num_upgrades": 2, "move_1": 210, "move_2": 69, "pokeball": 2,
"favorite": 1, "pokemon_id": 42, "battles_attacked": 4,
"stamina": 76, "stamina_max": 76, "individual_attack": 9,
"individual_defense": 4, "individual_stamina": 8,
"cp_multiplier": 0.4627983868122101,
"additional_cp_multiplier": 0.018886566162109375,
"cp": 653, "nickname": "Golb", "id": 13632861873471324})
self.assertEqual(poke.level, 12.5)
self.assertEqual(poke.iv, 0.47)
self.assertAlmostEqual(poke.ivcp, 0.488747515)
self.assertAlmostEqual(poke.static.max_cp, 1921.34561459)
self.assertAlmostEqual(poke.cp_percent, 0.340368964)
assert poke.is_favorite
self.assertEqual(poke.name, 'Golbat')
self.assertEqual(poke.nickname, "Golb")
self.assertEqual(poke.nickname_raw, poke.nickname)
self.assertAlmostEqual(poke.moveset.dps, 10.7540173053)
self.assertAlmostEqual(poke.moveset.dps_attack, 12.14462299)
self.assertAlmostEqual(poke.moveset.dps_defense, 4.876681614)
self.assertAlmostEqual(poke.moveset.attack_perfection, 0.4720730048)
self.assertAlmostEqual(poke.moveset.defense_perfection, 0.8158081497)
poke = Pokemon({
"move_1": 221, "move_2": 129, "pokemon_id": 19, "cp": 106,
"individual_attack": 6, "stamina_max": 22, "individual_defense": 14,
"cp_multiplier": 0.37523558735847473, "id": 7841053399})
self.assertEqual(poke.level, 7.5)
self.assertEqual(poke.iv, 0.44)
self.assertAlmostEqual(poke.ivcp, 0.3804059)
self.assertAlmostEqual(poke.static.max_cp, 581.64643575)
self.assertAlmostEqual(poke.cp_percent, 0.183759867)
self.assertFalse(poke.is_favorite)
self.assertEqual(poke.name, 'Rattata')
self.assertEqual(poke.nickname, poke.name)
self.assertEqual(poke.nickname_raw, '')
self.assertAlmostEqual(poke.moveset.dps, 12.5567813108)
self.assertAlmostEqual(poke.moveset.dps_attack, 15.6959766385)
self.assertAlmostEqual(poke.moveset.dps_defense, 5.54282440561)
self.assertAlmostEqual(poke.moveset.attack_perfection, 0.835172881385)
self.assertAlmostEqual(poke.moveset.defense_perfection, 0.603137650999)
def test_levels_to_cpm(self):
l2c = LevelToCPm
self.assertIs(levels_to_cpm(), l2c)
max_cpm = l2c.cp_multiplier_for(l2c.MAX_LEVEL)
self.assertEqual(l2c.MAX_LEVEL, 40)
self.assertEqual(l2c.MAX_CPM, max_cpm)
self.assertEqual(len(l2c.STATIC_DATA), 79)
self.assertEqual(l2c.cp_multiplier_for("1"), 0.094)
self.assertEqual(l2c.cp_multiplier_for(1), 0.094)
self.assertEqual(l2c.cp_multiplier_for(1.0), 0.094)
self.assertEqual(l2c.cp_multiplier_for("17.5"), 0.558830576)
self.assertEqual(l2c.cp_multiplier_for(17.5), 0.558830576)
self.assertEqual(l2c.cp_multiplier_for('40.0'), 0.79030001)
self.assertEqual(l2c.cp_multiplier_for(40.0), 0.79030001)
self.assertEqual(l2c.cp_multiplier_for(40), 0.79030001)
self.assertEqual(l2c.level_from_cpm(0.79030001), 40.0)
self.assertEqual(l2c.level_from_cpm(0.7903), 40.0)
def test_attacks(self):
self._test_attacks(fast_attacks, FastAttacks)
self._test_attacks(charged_attacks, ChargedAttacks)
def _test_attacks(self, callback, clazz):
charged = clazz is ChargedAttacks
self.assertIs(callback(), clazz)
# check consistency
attacks = clazz.all_by_dps()
number = len(attacks)
assert (number > 0)
self.assertGreaterEqual(len(clazz.BY_TYPE), 17)
self.assertEqual(number, len(clazz.all()))
self.assertEqual(number, len(clazz.STATIC_DATA))
self.assertEqual(number, len(clazz.BY_NAME))
self.assertEqual(number, sum([len(l) for l in clazz.BY_TYPE.values()]))
# check data
prev_dps = float("inf")
for attack in attacks: # type: Attack
self.assertGreater(attack.id, 0)
self.assertGreater(len(attack.name), 0)
self.assertIsInstance(attack.type, Type)
self.assertGreaterEqual(attack.damage, 0)
self.assertGreater(attack.duration, .0)
self.assertGreater(attack.energy, 0)
self.assertGreaterEqual(attack.dps, 0)
assert (.0 <= attack.rate_in_type <= 1.0)
self.assertLessEqual(attack.dps, prev_dps)
self.assertEqual(attack.is_charged, charged)
self.assertIs(attack, clazz.data_for(attack.id))
self.assertIs(attack, clazz.by_name(attack.name))
assert (attack in clazz.list_for_type(attack.type))
assert (attack in clazz.list_for_type(attack.type.name))
self.assertIsInstance(attack, ChargedAttack if charged else Attack)
prev_dps = attack.dps
|
|
# coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""transformer (attention seq-seq model) with mixtures of experts.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensor2tensor.layers import common_attention
from tensor2tensor.layers import common_hparams
from tensor2tensor.layers import common_layers
from tensor2tensor.models import transformer
from tensor2tensor.utils import expert_utils
from tensor2tensor.utils import registry
from tensor2tensor.utils import t2t_model
import tensorflow as tf
# The transformer architecture can be defined using the layer_types hparams.
# If not defined, the default types and num_hidden_layers are used as fallback
# values.
#
# Examples of usage:
# "a/a/a/a/a/a": Original base transformer (6 encoder and decoder layers of
# multihead full attention)
# "a/a/a-moe/a": 4 layers with 1 moe at layer 3
# "loc/red/loc/red": Alternate between local and memory compressed attention
# "a/a/a#": Encoder only model (3 layers)
# "#a/a/a": Decoder only model (3 layers)
# "a/a-moe#a/a/a": Encoder (2 layers with 1 moe), decoder (3 layers)
# Note that all combinations are not necessarily possibles (some attention
# types are not necessarily compatible with the encoder, or can't accept certain
# types of masking)
SEP_ENCODEC = "#"
SEP_LAYER = "/"
SEP_FF = "-"
@registry.register_model
class TransformerMoe(t2t_model.T2TModel):
"""Attention net. See file docstring."""
@staticmethod
def use_body_sharded():
return True
def body_sharded(self, sharded_features):
# ========= Prepare the input and target =========
hparams = self._hparams
dp = self._data_parallelism
# Process input
inputs = sharded_features["inputs"]
target_space = sharded_features["target_space_id"]
(
encoder_input,
encoder_self_attention_bias,
encoder_decoder_attention_bias,
) = dp(self._prepare_encoder, inputs, target_space)
# Process output
targets = sharded_features["targets"]
decoder_input, decoder_self_attention_bias = dp(
self._prepare_decoder, targets
)
def dp_preprocess(x):
return dp(common_layers.layer_preprocess, x, hparams)
def dp_postprocess(x, y):
return dp(common_layers.layer_postprocess, x, y, hparams)
cache = dict(extra_loss=0.0)
def prepostprocess(fct):
"""Apply processing and capture the extra loss."""
@expert_utils.add_var_scope()
def decorated(x, *args, **kwargs):
x_preprocessed = dp_preprocess(x)
y, loss = fct(x_preprocessed, *args, **kwargs)
cache["extra_loss"] += loss
return dp_postprocess(x, y)
return decorated
# ========= Compute the transformer architecture =========
encoder_layers, decoder_layers = self._extract_layer_types()
layers = common_attention.get_standardized_layers(
hparams=hparams,
dp=dp,
)
if hparams.mode == tf.estimator.ModeKeys.TRAIN:
# Display the encoder-decoder architecture
def print_layer(name, layers):
tf.logging.info("{} architecture:".format(name))
for i, l in enumerate(layers):
tf.logging.info(" * Layer {}: {}".format(i, " - ".join(l)))
print_layer("Encoder", encoder_layers)
print_layer("Decoder", decoder_layers)
# ========= Construct the transformer encoder and decoder =========
encoder_outputs = []
x = encoder_input
with tf.variable_scope("encoder"):
for layer_num, block_types in enumerate(encoder_layers):
# Each encoder layers is composed of two blocks:
# * self-attention block
# * feed-forward block
att_type, ff_type = block_types
with tf.variable_scope("layer_{}".format(layer_num)):
x = prepostprocess(layers[att_type])(
x,
bias=encoder_self_attention_bias,
name="att_{}".format(att_type),
)
x = prepostprocess(layers[ff_type])(
x,
name="ff_{}".format(ff_type)
)
encoder_outputs.append(x)
if encoder_outputs:
encoder_outputs[-1] = dp_preprocess(x)
x = decoder_input
with tf.variable_scope("decoder"):
for layer_num, block_types in enumerate(decoder_layers):
# Each decoder layers is composed of three blocks:
# * self-attention block
# * enco-deco attention block (optional)
# * feed-forward block
self_att_type, att_ende_type, ff_type = block_types
with tf.variable_scope("layer_{}".format(layer_num)):
x = prepostprocess(layers[self_att_type])(
x,
bias=decoder_self_attention_bias,
name="self_att_{}".format(self_att_type),
)
# Only add the enco-deco attention layer if there is an encoder
if encoder_outputs:
x = prepostprocess(layers[att_ende_type])(
x,
memory_antecedent=encoder_outputs[-1],
bias=encoder_decoder_attention_bias,
name="att_ende_{}".format(att_ende_type),
)
x = prepostprocess(layers[ff_type])(
x,
name="ff_{}".format(ff_type)
)
# If normalization is done in layer_preprocess, then it should also be
# done on the output, since the output can grow very large, being the sum
# of a whole stack of unnormalized layer outputs.
x = dp_preprocess(x)
decoder_output = dp(tf.expand_dims, x, 2)
return decoder_output, cache["extra_loss"]
@expert_utils.add_name_scope()
def _prepare_encoder(self, inputs, target_space):
"""Process the transformer encoder inputs."""
inputs = common_layers.flatten4d3d(inputs)
output = transformer.transformer_prepare_encoder(
inputs,
target_space,
self._hparams,
features=None,
)
enco_input, enco_self_att_bias, enco_deco_att_bias = output
enco_input = tf.nn.dropout(
enco_input, 1.0 - self._hparams.layer_prepostprocess_dropout)
return enco_input, enco_self_att_bias, enco_deco_att_bias
@expert_utils.add_name_scope()
def _prepare_decoder(self, targets):
"""Process the transformer decoder input."""
targets = common_layers.flatten4d3d(targets)
output = transformer.transformer_prepare_decoder(
targets, self._hparams, features=None,
)
deco_input, deco_self_attention_bias = output
deco_input = tf.nn.dropout(
deco_input, 1.0 - self._hparams.layer_prepostprocess_dropout
)
return deco_input, deco_self_attention_bias
def _extract_layer_types(self):
"""Parse the layer string.
Returns:
list[tuple[str, str]]: Encoder layers: list of (attention, feed-forward)
list[tuple[str, str, str]]: Decoder layers: list of (self-attention,
enc-dec attention, feed-forward)
"""
hparams = self._hparams
layer_types = hparams.layer_types
# If the architecture has not explicitly been set, we just construct a
# standard transformer with the fallback values
if not layer_types:
layer_types = SEP_LAYER.join(
[hparams.default_att] * hparams.num_hidden_layers)
# If encoder not explicitly defined, the encoder will have the same
# structure as the decoder
layer_types = layer_types.split(SEP_ENCODEC)
if len(layer_types) == 1:
layer_types *= 2
# Some models don't need the encoder (ex: language modeling)
# TODO(epot): What are the other conditions (has_input ?)
if hparams.prepend_mode != "none":
layer_types[0] = ""
# Extend the blocks and fill them with the default values if not specified
final_layers = ([], [])
for i, blocks_str_joined in enumerate(layer_types):
for blocks_str in blocks_str_joined.split(SEP_LAYER):
if not blocks_str:
continue
blocks_list = blocks_str.split(SEP_FF)
# Eventually use the fallback values for the layer_types. If the
# encoder is empty, do not use the enco-deco attention.
self_att = blocks_list[0] or hparams.default_att
ende_att = hparams.default_att if layer_types[0] else "_"
ff = hparams.default_ff
if len(blocks_list) > 1:
ff = blocks_list[-1]
if len(blocks_list) == 3:
ende_att = blocks_list[1]
if i == 0: # Encoder
blocks_tuple = (self_att, ff)
elif i == 1: # Decoder
blocks_tuple = (self_att, ende_att, ff)
final_layers[i].append(blocks_tuple)
return final_layers
@registry.register_hparams
def transformer_moe_base():
"""Set of hyperparameters."""
hparams = common_hparams.basic_params1()
hparams.norm_type = "layer"
hparams.hidden_size = 512
hparams.batch_size = 4096
hparams.max_length = 2001
hparams.max_input_seq_length = 2000
hparams.max_target_seq_length = 2000
hparams.dropout = 0.0
hparams.clip_grad_norm = 0. # i.e. no gradient clipping
hparams.optimizer_adam_epsilon = 1e-9
hparams.learning_rate_decay_scheme = "noam"
hparams.learning_rate = 0.1
hparams.learning_rate_warmup_steps = 2000
hparams.initializer_gain = 1.0
hparams.num_hidden_layers = 5
hparams.initializer = "uniform_unit_scaling"
hparams.weight_decay = 0.0
hparams.optimizer_adam_beta1 = 0.9
hparams.optimizer_adam_beta2 = 0.98
hparams.num_sampled_classes = 0
hparams.label_smoothing = 0.0
hparams.shared_embedding_and_softmax_weights = True
# According to noam, ("n", "da") seems better for harder-to-learn models
hparams.layer_preprocess_sequence = "n"
hparams.layer_postprocess_sequence = "da"
# Hparams used by transformer_prepare_decoder() function
hparams.add_hparam("pos", "timing") # timing, none
hparams.add_hparam("proximity_bias", False)
hparams.add_hparam("causal_decoder_self_attention", True)
hparams = common_attention.add_standard_attention_hparams(hparams)
# Decoder layers type. If set, num_decoder_layers parameter will be ignored
# and the number of decoder layer will be deduced from the string
# See top file comment for example of usage
hparams.add_hparam("layer_types", "")
# Default attention type (ex: a, loc, red,...) and feed-forward type (ex: fc,
# sep, moe,...)
hparams.add_hparam("default_att", "a")
hparams.add_hparam("default_ff", "fc")
return hparams
@registry.register_hparams
def transformer_moe_8k():
"""Hyper parameters specifics for long sequence generation."""
hparams = transformer_moe_base()
hparams.batch_size = 8192
hparams.max_length = 0 # max_length == batch_size
hparams.eval_drop_long_sequences = True
hparams.min_length_bucket = 256 # Avoid cyclic problems for big batches
hparams.default_ff = "sep"
hparams.hidden_size = 1024
return hparams
@registry.register_hparams
def transformer_moe_8k_lm():
"""Language modeling params.
Will have the following architecture by default:
* No encoder.
* Decoder architecture:
* Layer 0: a - sepm (masked self-attention/masked separable convolutions)
* Layer 1: a - sepm
* Layer 2: a - moe (mixture of expert layers in the middle)
* Layer 3: a - sepm
* Layer 4: a - sepm
Returns:
hparams
"""
hparams = transformer_moe_8k()
# Use masked versions of local attention and separable convolution
hparams.default_ff = "sepm"
# hparams.layer_types contains the network architecture:
# Start with '#' for decoder only architecture
hparams.layer_types = "#a/a/a-moe/a/a" # 5 full attention layers with 1 moe
# For long sequences, if running out of memory, it's possible to use the
# one of those two optimized versions instead:
# * Memory efficient multihead attention (slow):
# hparams.layer_types = "#mem/mem/mem-moe/mem/mem"
# * Alternate between local/compressed attention layers (faster):
# hparams.layer_types = "#locm/redm/locm-moe/redm/locm"
return hparams
@registry.register_hparams
def transformer_moe_2k():
"""Base transformers model with moe.
Will have the following architecture:
* No encoder.
* Layer 0: a - sep (self-attention - unmasked separable convolutions)
* Layer 1: a - sep
* Layer 2: a - sep
* Layer 3: a - sep
* Layer 4: a - sep
* Decoder architecture:
* Layer 0: a - a - sepm (self-attention - enco/deco-attention - masked sep)
* Layer 1: a - a - sepm
* Layer 2: a - a - moe (mixture of expert layers in the middle)
* Layer 3: a - a - sepm
* Layer 4: a - a - sepm
Returns:
hparams
"""
hparams = transformer_moe_8k()
hparams.batch_size = 2048
hparams.default_ff = "sep"
# hparams.layer_types contains the network architecture:
encoder_archi = "a/a/a/a/a"
decoder_archi = "a-sepm/a-sepm/a-moe/a-sepm/a-sepm"
hparams.layer_types = "{}#{}".format(encoder_archi, decoder_archi)
return hparams
@registry.register_hparams
def transformer_moe_12k():
"""Hyper parameters specifics for long sequence generation."""
hparams = transformer_moe_8k()
hparams.batch_size = 12000
# At 12k, the softmax become the memory bottleneck
hparams.factored_logit = True
return hparams
@registry.register_hparams
def transformer_moe_prepend_8k():
"""Model which formulate a seq2seq problem as language modeling."""
hparams = transformer_moe_8k()
hparams.prepend_mode = "prepend_inputs_masked_attention"
hparams.eval_drop_long_sequences = False
hparams.max_input_seq_length = 7500
hparams.default_ff = "sepm"
hparams.layer_types = "locm/redm/locm-moe/redm/locm"
hparams.moe_num_experts = 256
return hparams
|
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Image manipulation API.
Classes defined in this module:
Image: class used to encapsulate image information and transformations for
that image.
The current manipulations that are available are resize, rotate,
horizontal_flip, vertical_flip, crop and im_feeling_lucky.
It should be noted that each transform can only be called once per image
per execute_transforms() call.
"""
import struct
try:
import json
except:
import simplejson as json
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_types
from google.appengine.api.images import images_service_pb
from google.appengine.runtime import apiproxy_errors
BlobKey = datastore_types.BlobKey
JPEG = images_service_pb.OutputSettings.JPEG
PNG = images_service_pb.OutputSettings.PNG
WEBP = images_service_pb.OutputSettings.WEBP
BMP = -1
GIF = -2
ICO = -3
TIFF = -4
OUTPUT_ENCODING_TYPES = frozenset([JPEG, PNG, WEBP])
UNCHANGED_ORIENTATION = images_service_pb.InputSettings.UNCHANGED_ORIENTATION
CORRECT_ORIENTATION = images_service_pb.InputSettings.CORRECT_ORIENTATION
ORIENTATION_CORRECTION_TYPE = frozenset([UNCHANGED_ORIENTATION,
CORRECT_ORIENTATION])
TOP_LEFT = images_service_pb.CompositeImageOptions.TOP_LEFT
TOP_CENTER = images_service_pb.CompositeImageOptions.TOP
TOP_RIGHT = images_service_pb.CompositeImageOptions.TOP_RIGHT
CENTER_LEFT = images_service_pb.CompositeImageOptions.LEFT
CENTER_CENTER = images_service_pb.CompositeImageOptions.CENTER
CENTER_RIGHT = images_service_pb.CompositeImageOptions.RIGHT
BOTTOM_LEFT = images_service_pb.CompositeImageOptions.BOTTOM_LEFT
BOTTOM_CENTER = images_service_pb.CompositeImageOptions.BOTTOM
BOTTOM_RIGHT = images_service_pb.CompositeImageOptions.BOTTOM_RIGHT
ANCHOR_TYPES = frozenset([TOP_LEFT, TOP_CENTER, TOP_RIGHT, CENTER_LEFT,
CENTER_CENTER, CENTER_RIGHT, BOTTOM_LEFT,
BOTTOM_CENTER, BOTTOM_RIGHT])
MAX_TRANSFORMS_PER_REQUEST = 10
MAX_COMPOSITES_PER_REQUEST = 16
class Error(Exception):
"""Base error class for this module."""
class TransformationError(Error):
"""Error while attempting to transform the image."""
class BadRequestError(Error):
"""The parameters given had something wrong with them."""
class NotImageError(Error):
"""The image data given is not recognizable as an image."""
class BadImageError(Error):
"""The image data given is corrupt."""
class LargeImageError(Error):
"""The image data given is too large to process."""
class InvalidBlobKeyError(Error):
"""The provided blob key was invalid."""
class BlobKeyRequiredError(Error):
"""A blobkey is required for this operation."""
class UnsupportedSizeError(Error):
"""Specified size is not supported by requested operation."""
class Image(object):
"""Image object to manipulate."""
def __init__(self, image_data=None, blob_key=None):
"""Constructor.
Args:
image_data: str, image data in string form.
blob_key: BlobKey, BlobInfo, str, or unicode representation of BlobKey of
blob containing the image data.
Raises:
NotImageError if the given data is empty.
"""
if not image_data and not blob_key:
raise NotImageError("Empty image data.")
if image_data and blob_key:
raise NotImageError("Can only take one image or blob key.")
self._image_data = image_data
self._blob_key = _extract_blob_key(blob_key)
self._transforms = []
self._width = None
self._height = None
self._format = None
self._correct_orientation = UNCHANGED_ORIENTATION
self._original_metadata = None
def _check_transform_limits(self):
"""Ensure some simple limits on the number of transforms allowed.
Raises:
BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
requested for this image
"""
if len(self._transforms) >= MAX_TRANSFORMS_PER_REQUEST:
raise BadRequestError("%d transforms have already been requested on this "
"image." % MAX_TRANSFORMS_PER_REQUEST)
def _update_dimensions(self):
"""Updates the width and height fields of the image.
Raises:
NotImageError if the image data is not an image.
BadImageError if the image data is corrupt.
"""
if not self._image_data:
raise NotImageError("Dimensions unavailable for blob key input")
size = len(self._image_data)
if size >= 6 and self._image_data.startswith("GIF"):
self._update_gif_dimensions()
self._format = GIF;
elif size >= 8 and self._image_data.startswith("\x89PNG\x0D\x0A\x1A\x0A"):
self._update_png_dimensions()
self._format = PNG
elif size >= 2 and self._image_data.startswith("\xff\xD8"):
self._update_jpeg_dimensions()
self._format = JPEG
elif (size >= 8 and (self._image_data.startswith("II\x2a\x00") or
self._image_data.startswith("MM\x00\x2a"))):
self._update_tiff_dimensions()
self._format = TIFF
elif size >= 2 and self._image_data.startswith("BM"):
self._update_bmp_dimensions()
self._format = BMP
elif size >= 4 and self._image_data.startswith("\x00\x00\x01\x00"):
self._update_ico_dimensions()
self._format = ICO
elif (size >= 16 and (self._image_data.startswith("RIFF", 0, 4) and
self._image_data.startswith("WEBP", 8, 12) and
self._image_data.startswith("VP8 ", 12, 16))):
self._update_webp_dimensions()
self._format = WEBP
else:
raise NotImageError("Unrecognized image format")
def _update_gif_dimensions(self):
"""Updates the width and height fields of the gif image.
Raises:
BadImageError if the image string is not a valid gif image.
"""
size = len(self._image_data)
if size >= 10:
self._width, self._height = struct.unpack("<HH", self._image_data[6:10])
else:
raise BadImageError("Corrupt GIF format")
def _update_png_dimensions(self):
"""Updates the width and height fields of the png image.
Raises:
BadImageError if the image string is not a valid png image.
"""
size = len(self._image_data)
if size >= 24 and self._image_data[12:16] == "IHDR":
self._width, self._height = struct.unpack(">II", self._image_data[16:24])
else:
raise BadImageError("Corrupt PNG format")
def _update_jpeg_dimensions(self):
"""Updates the width and height fields of the jpeg image.
Raises:
BadImageError if the image string is not a valid jpeg image.
"""
size = len(self._image_data)
offset = 2
while offset < size:
while offset < size and ord(self._image_data[offset]) != 0xFF:
offset += 1
while offset < size and ord(self._image_data[offset]) == 0xFF:
offset += 1
if (offset < size and ord(self._image_data[offset]) & 0xF0 == 0xC0 and
ord(self._image_data[offset]) != 0xC4):
offset += 4
if offset + 4 <= size:
self._height, self._width = struct.unpack(
">HH",
self._image_data[offset:offset + 4])
break
else:
raise BadImageError("Corrupt JPEG format")
elif offset + 3 <= size:
offset += 1
offset += struct.unpack(">H", self._image_data[offset:offset + 2])[0]
else:
raise BadImageError("Corrupt JPEG format")
if self._height is None or self._width is None:
raise BadImageError("Corrupt JPEG format")
def _update_tiff_dimensions(self):
"""Updates the width and height fields of the tiff image.
Raises:
BadImageError if the image string is not a valid tiff image.
"""
size = len(self._image_data)
if self._image_data.startswith("II"):
endianness = "<"
else:
endianness = ">"
ifd_offset = struct.unpack(endianness + "I", self._image_data[4:8])[0]
if ifd_offset + 14 <= size:
ifd_size = struct.unpack(
endianness + "H",
self._image_data[ifd_offset:ifd_offset + 2])[0]
ifd_offset += 2
for unused_i in range(0, ifd_size):
if ifd_offset + 12 <= size:
tag = struct.unpack(
endianness + "H",
self._image_data[ifd_offset:ifd_offset + 2])[0]
if tag == 0x100 or tag == 0x101:
value_type = struct.unpack(
endianness + "H",
self._image_data[ifd_offset + 2:ifd_offset + 4])[0]
if value_type == 3:
format = endianness + "H"
end_offset = ifd_offset + 10
elif value_type == 4:
format = endianness + "I"
end_offset = ifd_offset + 12
else:
format = endianness + "B"
end_offset = ifd_offset + 9
if tag == 0x100:
self._width = struct.unpack(
format,
self._image_data[ifd_offset + 8:end_offset])[0]
if self._height is not None:
break
else:
self._height = struct.unpack(
format,
self._image_data[ifd_offset + 8:end_offset])[0]
if self._width is not None:
break
ifd_offset += 12
else:
raise BadImageError("Corrupt TIFF format")
if self._width is None or self._height is None:
raise BadImageError("Corrupt TIFF format")
def _update_bmp_dimensions(self):
"""Updates the width and height fields of the bmp image.
Raises:
BadImageError if the image string is not a valid bmp image.
"""
size = len(self._image_data)
if size >= 18:
header_length = struct.unpack("<I", self._image_data[14:18])[0]
if ((header_length == 40 or header_length == 108 or
header_length == 124 or header_length == 64) and size >= 26):
self._width, self._height = struct.unpack("<II",
self._image_data[18:26])
elif header_length == 12 and size >= 22:
self._width, self._height = struct.unpack("<HH",
self._image_data[18:22])
else:
raise BadImageError("Corrupt BMP format")
else:
raise BadImageError("Corrupt BMP format")
def _update_ico_dimensions(self):
"""Updates the width and height fields of the ico image.
Raises:
BadImageError if the image string is not a valid ico image.
"""
size = len(self._image_data)
if size >= 8:
self._width, self._height = struct.unpack("<BB", self._image_data[6:8])
if not self._width:
self._width = 256
if not self._height:
self._height = 256
else:
raise BadImageError("Corrupt ICO format")
def set_correct_orientation(self, correct_orientation):
"""Set flag to correct image orientation based on image metadata.
EXIF metadata within the image may contain a parameter indicating its proper
orientation. This value can equal 1 through 8, inclusive. "1" means that the
image is in its "normal" orientation, i.e., it should be viewed as it is
stored. Normally, this "orientation" value has no effect on the behavior of
the transformations. However, calling this function with the value
CORRECT_ORIENTATION any orientation specified in the EXIF metadata will be
corrected during the first transformation.
NOTE: If CORRECT_ORIENTATION is specified but the image is already in
portrait orientation, i.e., "taller" than it is "wide" no corrections will
be made, since it appears that the camera has already corrected it.
Regardless whether the correction was requested or not, the orientation
value in the transformed image is always cleared to indicate that no
additional corrections of the returned image's orientation is necessary.
Args:
correct_orientation: a value from ORIENTATION_CORRECTION_TYPE.
Raises:
BadRequestError if correct_orientation value is invalid.
"""
if correct_orientation not in ORIENTATION_CORRECTION_TYPE:
raise BadRequestError("Orientation correction must be in %s" %
ORIENTATION_CORRECTION_TYPE)
self._correct_orientation = correct_orientation
def _update_webp_dimensions(self):
"""Updates the width and height fields of the webp image."""
size = len(self._image_data)
if size < 30:
raise BadImageError("Corrupt WEBP format")
bits = (ord(self._image_data[20]) | (ord(self._image_data[21])<<8) |
(ord(self._image_data[22]) << 16))
key_frame = ((bits & 1) == 0)
if not key_frame:
raise BadImageError("Corrupt WEBP format")
profile = (bits >> 1) & 7
show_frame = (bits >> 4) & 1
if profile > 3:
raise BadImageError("Corrupt WEBP format")
if show_frame == 0:
raise BadImageError("Corrupt WEBP format")
self._width, self._height = struct.unpack("<HH", self._image_data[26:30])
if self._height is None or self._width is None:
raise BadImageError("Corrupt WEBP format")
def resize(self, width=0, height=0, crop_to_fit=False,
crop_offset_x=0.5, crop_offset_y=0.5):
"""Resize the image maintaining the aspect ratio.
If both width and height are specified, the more restricting of the two
values will be used when resizing the image. The maximum dimension allowed
for both width and height is 4000 pixels.
If both width and height are specified and crop_to_fit is True, the less
restricting of the two values will be used when resizing and the image will
be cropped to fit the specified size. In this case the center of cropping
can be adjusted by crop_offset_x and crop_offset_y.
Args:
width: int, width (in pixels) to change the image width to.
height: int, height (in pixels) to change the image height to.
crop_to_fit: If True and both width and height are specified, the image is
cropped after resize to fit the specified dimensions.
crop_offset_x: float value between 0.0 and 1.0, 0 is left and 1 is right,
default is 0.5, the center of image.
crop_offset_y: float value between 0.0 and 1.0, 0 is top and 1 is bottom,
default is 0.5, the center of image.
Raises:
TypeError when width or height is not either 'int' or 'long' types.
BadRequestError when there is something wrong with the given height or
width or if MAX_TRANSFORMS_PER_REQUEST transforms have already been
requested on this image.
"""
if (not isinstance(width, (int, long)) or
not isinstance(height, (int, long))):
raise TypeError("Width and height must be integers.")
if width < 0 or height < 0:
raise BadRequestError("Width and height must be >= 0.")
if not width and not height:
raise BadRequestError("At least one of width or height must be > 0.")
if width > 4000 or height > 4000:
raise BadRequestError("Both width and height must be <= 4000.")
if not isinstance(crop_to_fit, bool):
raise TypeError("crop_to_fit must be boolean.")
if crop_to_fit and not (width and height):
raise BadRequestError("Both width and height must be > 0 when "
"crop_to_fit is specified")
self._validate_crop_arg(crop_offset_x, "crop_offset_x")
self._validate_crop_arg(crop_offset_y, "crop_offset_y")
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_width(width)
transform.set_height(height)
transform.set_crop_to_fit(crop_to_fit)
transform.set_crop_offset_x(crop_offset_x)
transform.set_crop_offset_y(crop_offset_y)
self._transforms.append(transform)
def rotate(self, degrees):
"""Rotate an image a given number of degrees clockwise.
Args:
degrees: int, must be a multiple of 90.
Raises:
TypeError when degrees is not either 'int' or 'long' types.
BadRequestError when there is something wrong with the given degrees or
if MAX_TRANSFORMS_PER_REQUEST transforms have already been requested.
"""
if not isinstance(degrees, (int, long)):
raise TypeError("Degrees must be integers.")
if degrees % 90 != 0:
raise BadRequestError("degrees argument must be multiple of 90.")
degrees = degrees % 360
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_rotate(degrees)
self._transforms.append(transform)
def horizontal_flip(self):
"""Flip the image horizontally.
Raises:
BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
requested on the image.
"""
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_horizontal_flip(True)
self._transforms.append(transform)
def vertical_flip(self):
"""Flip the image vertically.
Raises:
BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
requested on the image.
"""
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_vertical_flip(True)
self._transforms.append(transform)
def _validate_crop_arg(self, val, val_name):
"""Validate the given value of a Crop() method argument.
Args:
val: float, value of the argument.
val_name: str, name of the argument.
Raises:
TypeError if the args are not of type 'float'.
BadRequestError when there is something wrong with the given bounding box.
"""
if type(val) != float:
raise TypeError("arg '%s' must be of type 'float'." % val_name)
if not (0 <= val <= 1.0):
raise BadRequestError("arg '%s' must be between 0.0 and 1.0 "
"(inclusive)" % val_name)
def crop(self, left_x, top_y, right_x, bottom_y):
"""Crop the image.
The four arguments are the scaling numbers to describe the bounding box
which will crop the image. The upper left point of the bounding box will
be at (left_x*image_width, top_y*image_height) the lower right point will
be at (right_x*image_width, bottom_y*image_height).
Args:
left_x: float value between 0.0 and 1.0 (inclusive).
top_y: float value between 0.0 and 1.0 (inclusive).
right_x: float value between 0.0 and 1.0 (inclusive).
bottom_y: float value between 0.0 and 1.0 (inclusive).
Raises:
TypeError if the args are not of type 'float'.
BadRequestError when there is something wrong with the given bounding box
or if MAX_TRANSFORMS_PER_REQUEST transforms have already been requested
for this image.
"""
self._validate_crop_arg(left_x, "left_x")
self._validate_crop_arg(top_y, "top_y")
self._validate_crop_arg(right_x, "right_x")
self._validate_crop_arg(bottom_y, "bottom_y")
if left_x >= right_x:
raise BadRequestError("left_x must be less than right_x")
if top_y >= bottom_y:
raise BadRequestError("top_y must be less than bottom_y")
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_crop_left_x(left_x)
transform.set_crop_top_y(top_y)
transform.set_crop_right_x(right_x)
transform.set_crop_bottom_y(bottom_y)
self._transforms.append(transform)
def im_feeling_lucky(self):
"""Automatically adjust image contrast and color levels.
This is similar to the "I'm Feeling Lucky" button in Picasa.
Raises:
BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already
been requested for this image.
"""
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_autolevels(True)
self._transforms.append(transform)
def get_original_metadata(self):
"""Metadata of the original image.
Returns a dictionary of metadata extracted from the original image during
execute_transform.
Note, that some of the EXIF fields are processed, e.g., fields with multiple
values returned as lists, rational types are returned as floats, GPS
coordinates already parsed to signed floats, etc.
ImageWidth and ImageLength fields are corrected if they did not correspond
to the actual dimensions of the original image.
Returns:
dict with string keys. If execute_transform was called with parse_metadata
being True, this dictionary contains information about various properties
of the original image, such as dimensions, color profile, and properties
from EXIF.
Even if parse_metadata was False or the images did not have any metadata,
the dictionary will contain a limited set of metadata, at least
'ImageWidth' and 'ImageLength', corresponding to the dimensions of the
original image.
It will return None, if it is called before a successful
execute_transfrom.
"""
return self._original_metadata
def _set_imagedata(self, imagedata):
"""Fills in an ImageData PB from this Image instance.
Args:
imagedata: An ImageData PB instance
"""
if self._blob_key:
imagedata.set_content("")
imagedata.set_blob_key(self._blob_key)
else:
imagedata.set_content(self._image_data)
def execute_transforms(self, output_encoding=PNG, quality=None,
parse_source_metadata=False):
"""Perform transformations on a given image.
Args:
output_encoding: A value from OUTPUT_ENCODING_TYPES.
quality: A value between 1 and 100 to specify the quality of the
encoding. This value is only used for JPEG & WEBP quality control.
parse_source_metadata: when True the metadata (EXIF) of the source image
is parsed before any transformations. The results can be retrieved
via Image.get_original_metadata.
Returns:
str, image data after the transformations have been performed on it.
Raises:
BadRequestError when there is something wrong with the request
specifications.
NotImageError when the image data given is not an image.
BadImageError when the image data given is corrupt.
LargeImageError when the image data given is too large to process.
InvalidBlobKeyError when the blob key provided is invalid.
TransformtionError when something errors during image manipulation.
Error when something unknown, but bad, happens.
"""
if output_encoding not in OUTPUT_ENCODING_TYPES:
raise BadRequestError("Output encoding type not in recognized set "
"%s" % OUTPUT_ENCODING_TYPES)
if not self._transforms:
raise BadRequestError("Must specify at least one transformation.")
self.CheckValidIntParameter(quality, 1, 100, "quality")
request = images_service_pb.ImagesTransformRequest()
response = images_service_pb.ImagesTransformResponse()
input_settings = request.mutable_input()
input_settings.set_correct_exif_orientation(
self._correct_orientation)
if parse_source_metadata:
input_settings.set_parse_metadata(True)
self._set_imagedata(request.mutable_image())
for transform in self._transforms:
request.add_transform().CopyFrom(transform)
request.mutable_output().set_mime_type(output_encoding)
if ((output_encoding == JPEG or output_encoding == WEBP) and
(quality is not None)):
request.mutable_output().set_quality(quality)
try:
apiproxy_stub_map.MakeSyncCall("images",
"Transform",
request,
response)
except apiproxy_errors.ApplicationError, e:
if (e.application_error ==
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA):
raise BadRequestError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.NOT_IMAGE):
raise NotImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.BAD_IMAGE_DATA):
raise BadImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
raise LargeImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.INVALID_BLOB_KEY):
raise InvalidBlobKeyError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR):
raise TransformationError()
else:
raise Error()
self._image_data = response.image().content()
self._blob_key = None
self._transforms = []
self._width = None
self._height = None
self._format = None
if response.source_metadata():
self._original_metadata = json.loads(response.source_metadata())
return self._image_data
@property
def width(self):
"""Gets the width of the image."""
if self._width is None:
self._update_dimensions()
return self._width
@property
def height(self):
"""Gets the height of the image."""
if self._height is None:
self._update_dimensions()
return self._height
@property
def format(self):
"""Gets the format of the image."""
if self._format is None:
self._update_dimensions()
return self._format
def histogram(self):
"""Calculates the histogram of the image.
Returns: 3 256-element lists containing the number of occurences of each
value of each color in the order RGB. As described at
http://en.wikipedia.org/wiki/Color_histogram for N = 256. i.e. the first
value of the first list contains the number of pixels with a red value of
0, the second the number with a red value of 1.
Raises:
NotImageError when the image data given is not an image.
BadImageError when the image data given is corrupt.
LargeImageError when the image data given is too large to process.
Error when something unknown, but bad, happens.
"""
request = images_service_pb.ImagesHistogramRequest()
response = images_service_pb.ImagesHistogramResponse()
self._set_imagedata(request.mutable_image())
try:
apiproxy_stub_map.MakeSyncCall("images",
"Histogram",
request,
response)
except apiproxy_errors.ApplicationError, e:
if (e.application_error ==
images_service_pb.ImagesServiceError.NOT_IMAGE):
raise NotImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.BAD_IMAGE_DATA):
raise BadImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
raise LargeImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.INVALID_BLOB_KEY):
raise InvalidBlobKeyError()
else:
raise Error()
histogram = response.histogram()
return [histogram.red_list(),
histogram.green_list(),
histogram.blue_list()]
@staticmethod
def CheckValidIntParameter(parameter, min_value, max_value, name):
"""Checks that a parameters is an integer within the specified range."""
if parameter is not None:
if not isinstance(parameter, (int, long)):
raise TypeError("%s must be an integer." % name)
if parameter > max_value or parameter < min_value:
raise BadRequestError("%s must be between %s and %s."
% name, str(min_value), str(max_value))
def resize(image_data, width=0, height=0, output_encoding=PNG, quality=None,
correct_orientation=UNCHANGED_ORIENTATION,
crop_to_fit=False, crop_offset_x=0.5, crop_offset_y=0.5):
"""Resize a given image file maintaining the aspect ratio.
If both width and height are specified, the more restricting of the two
values will be used when resizing the image. The maximum dimension allowed
for both width and height is 4000 pixels.
If both width and height are specified and crop_to_fit is True, the less
restricting of the two values will be used when resizing and the image will be
cropped to fit the specified size. In this case the center of cropping can be
adjusted by crop_offset_x and crop_offset_y.
Args:
image_data: str, source image data.
width: int, width (in pixels) to change the image width to.
height: int, height (in pixels) to change the image height to.
output_encoding: a value from OUTPUT_ENCODING_TYPES.
quality: A value between 1 and 100 to specify the quality of the
encoding. This value is only used for JPEG quality control.
correct_orientation: one of ORIENTATION_CORRECTION_TYPE, to indicate if
orientation correction should be performed during the transformation.
crop_to_fit: If True and both width and height are specified, the image is
cropped after resize to fit the specified dimensions.
crop_offset_x: float value between 0.0 and 1.0, 0 is left and 1 is right,
default is 0.5, the center of image.
crop_offset_y: float value between 0.0 and 1.0, 0 is top and 1 is bottom,
default is 0.5, the center of image.
Raises:
TypeError when width or height not either 'int' or 'long' types.
BadRequestError when there is something wrong with the given height or
width.
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.resize(width, height, crop_to_fit=crop_to_fit,
crop_offset_x=crop_offset_x, crop_offset_y=crop_offset_y)
image.set_correct_orientation(correct_orientation)
return image.execute_transforms(output_encoding=output_encoding,
quality=quality)
def rotate(image_data, degrees, output_encoding=PNG, quality=None,
correct_orientation=UNCHANGED_ORIENTATION):
"""Rotate a given image a given number of degrees clockwise.
Args:
image_data: str, source image data.
degrees: value from ROTATE_DEGREE_VALUES.
output_encoding: a value from OUTPUT_ENCODING_TYPES.
quality: A value between 1 and 100 to specify the quality of the
encoding. This value is only used for JPEG quality control.
correct_orientation: one of ORIENTATION_CORRECTION_TYPE, to indicate if
orientation correction should be performed during the transformation.
Raises:
TypeError when degrees is not either 'int' or 'long' types.
BadRequestError when there is something wrong with the given degrees.
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.rotate(degrees)
image.set_correct_orientation(correct_orientation)
return image.execute_transforms(output_encoding=output_encoding,
quality=quality)
def horizontal_flip(image_data, output_encoding=PNG, quality=None,
correct_orientation=UNCHANGED_ORIENTATION):
"""Flip the image horizontally.
Args:
image_data: str, source image data.
output_encoding: a value from OUTPUT_ENCODING_TYPES.
quality: A value between 1 and 100 to specify the quality of the
encoding. This value is only used for JPEG quality control.
correct_orientation: one of ORIENTATION_CORRECTION_TYPE, to indicate if
orientation correction should be performed during the transformation.
Raises:
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.horizontal_flip()
image.set_correct_orientation(correct_orientation)
return image.execute_transforms(output_encoding=output_encoding,
quality=quality)
def vertical_flip(image_data, output_encoding=PNG, quality=None,
correct_orientation=UNCHANGED_ORIENTATION):
"""Flip the image vertically.
Args:
image_data: str, source image data.
output_encoding: a value from OUTPUT_ENCODING_TYPES.
quality: A value between 1 and 100 to specify the quality of the
encoding. This value is only used for JPEG quality control.
correct_orientation: one of ORIENTATION_CORRECTION_TYPE, to indicate if
orientation correction should be performed during the transformation.
Raises:
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.vertical_flip()
image.set_correct_orientation(correct_orientation)
return image.execute_transforms(output_encoding=output_encoding,
quality=quality)
def crop(image_data, left_x, top_y, right_x, bottom_y, output_encoding=PNG,
quality=None, correct_orientation=UNCHANGED_ORIENTATION):
"""Crop the given image.
The four arguments are the scaling numbers to describe the bounding box
which will crop the image. The upper left point of the bounding box will
be at (left_x*image_width, top_y*image_height) the lower right point will
be at (right_x*image_width, bottom_y*image_height).
Args:
image_data: str, source image data.
left_x: float value between 0.0 and 1.0 (inclusive).
top_y: float value between 0.0 and 1.0 (inclusive).
right_x: float value between 0.0 and 1.0 (inclusive).
bottom_y: float value between 0.0 and 1.0 (inclusive).
output_encoding: a value from OUTPUT_ENCODING_TYPES.
quality: A value between 1 and 100 to specify the quality of the
encoding. This value is only used for JPEG quality control.
correct_orientation: one of ORIENTATION_CORRECTION_TYPE, to indicate if
orientation correction should be performed during the transformation.
Raises:
TypeError if the args are not of type 'float'.
BadRequestError when there is something wrong with the given bounding box.
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.crop(left_x, top_y, right_x, bottom_y)
image.set_correct_orientation(correct_orientation)
return image.execute_transforms(output_encoding=output_encoding,
quality=quality)
def im_feeling_lucky(image_data, output_encoding=PNG, quality=None,
correct_orientation=UNCHANGED_ORIENTATION):
"""Automatically adjust image levels.
This is similar to the "I'm Feeling Lucky" button in Picasa.
Args:
image_data: str, source image data.
output_encoding: a value from OUTPUT_ENCODING_TYPES.
quality: A value between 1 and 100 to specify the quality of the
encoding. This value is only used for JPEG quality control.
correct_orientation: one of ORIENTATION_CORRECTION_TYPE, to indicate if
orientation correction should be performed during the transformation.
Raises:
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.im_feeling_lucky()
image.set_correct_orientation(correct_orientation)
return image.execute_transforms(output_encoding=output_encoding,
quality=quality)
def composite(inputs, width, height, color=0, output_encoding=PNG, quality=None):
"""Composite one or more images onto a canvas.
Args:
inputs: a list of tuples (image_data, x_offset, y_offset, opacity, anchor)
where
image_data: str, source image data.
x_offset: x offset in pixels from the anchor position
y_offset: y offset in piyels from the anchor position
opacity: opacity of the image specified as a float in range [0.0, 1.0]
anchor: anchoring point from ANCHOR_POINTS. The anchor point of the image
is aligned with the same anchor point of the canvas. e.g. TOP_RIGHT would
place the top right corner of the image at the top right corner of the
canvas then apply the x and y offsets.
width: canvas width in pixels.
height: canvas height in pixels.
color: canvas background color encoded as a 32 bit unsigned int where each
color channel is represented by one byte in order ARGB.
output_encoding: a value from OUTPUT_ENCODING_TYPES.
quality: A value between 1 and 100 to specify the quality of the
encoding. This value is only used for JPEG quality control.
Returns:
str, image data of the composited image.
Raises:
TypeError If width, height, color, x_offset or y_offset are not of type
int or long or if opacity is not a float
BadRequestError If more than MAX_TRANSFORMS_PER_REQUEST compositions have
been requested, if the canvas width or height is greater than 4000 or less
than or equal to 0, if the color is invalid or if for any composition
option, the opacity is outside the range [0,1] or the anchor is invalid.
"""
if (not isinstance(width, (int, long)) or
not isinstance(height, (int, long)) or
not isinstance(color, (int, long))):
raise TypeError("Width, height and color must be integers.")
if output_encoding not in OUTPUT_ENCODING_TYPES:
raise BadRequestError("Output encoding type '%s' not in recognized set "
"%s" % (output_encoding, OUTPUT_ENCODING_TYPES))
if quality is not None:
if not isinstance(quality, (int, long)):
raise TypeError("Quality must be an integer.")
if quality > 100 or quality < 1:
raise BadRequestError("Quality must be between 1 and 100.")
if not inputs:
raise BadRequestError("Must provide at least one input")
if len(inputs) > MAX_COMPOSITES_PER_REQUEST:
raise BadRequestError("A maximum of %d composition operations can be"
"performed in a single request" %
MAX_COMPOSITES_PER_REQUEST)
if width <= 0 or height <= 0:
raise BadRequestError("Width and height must be > 0.")
if width > 4000 or height > 4000:
raise BadRequestError("Width and height must be <= 4000.")
if color > 0xffffffff or color < 0:
raise BadRequestError("Invalid color")
if color >= 0x80000000:
color -= 0x100000000
image_map = {}
request = images_service_pb.ImagesCompositeRequest()
response = images_service_pb.ImagesTransformResponse()
for (image, x, y, opacity, anchor) in inputs:
if not image:
raise BadRequestError("Each input must include an image")
if (not isinstance(x, (int, long)) or
not isinstance(y, (int, long)) or
not isinstance(opacity, (float))):
raise TypeError("x_offset, y_offset must be integers and opacity must"
"be a float")
if x > 4000 or x < -4000:
raise BadRequestError("xOffsets must be in range [-4000, 4000]")
if y > 4000 or y < -4000:
raise BadRequestError("yOffsets must be in range [-4000, 4000]")
if opacity < 0 or opacity > 1:
raise BadRequestError("Opacity must be in the range 0.0 to 1.0")
if anchor not in ANCHOR_TYPES:
raise BadRequestError("Anchor type '%s' not in recognized set %s" %
(anchor, ANCHOR_TYPES))
if image not in image_map:
image_map[image] = request.image_size()
if isinstance(image, Image):
image._set_imagedata(request.add_image())
else:
request.add_image().set_content(image)
option = request.add_options()
option.set_x_offset(x)
option.set_y_offset(y)
option.set_opacity(opacity)
option.set_anchor(anchor)
option.set_source_index(image_map[image])
request.mutable_canvas().mutable_output().set_mime_type(output_encoding)
request.mutable_canvas().set_width(width)
request.mutable_canvas().set_height(height)
request.mutable_canvas().set_color(color)
if ((output_encoding == JPEG or output_encoding == WEBP) and
(quality is not None)):
request.mutable_canvas().mutable_output().set_quality(quality)
try:
apiproxy_stub_map.MakeSyncCall("images",
"Composite",
request,
response)
except apiproxy_errors.ApplicationError, e:
if (e.application_error ==
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA):
raise BadRequestError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.NOT_IMAGE):
raise NotImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.BAD_IMAGE_DATA):
raise BadImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
raise LargeImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.INVALID_BLOB_KEY):
raise InvalidBlobKeyError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR):
raise TransformationError()
else:
raise Error()
return response.image().content()
def histogram(image_data):
"""Calculates the histogram of the given image.
Args:
image_data: str, source image data.
Returns: 3 256-element lists containing the number of occurences of each
value of each color in the order RGB.
Raises:
NotImageError when the image data given is not an image.
BadImageError when the image data given is corrupt.
LargeImageError when the image data given is too large to process.
Error when something unknown, but bad, happens.
"""
image = Image(image_data)
return image.histogram()
IMG_SERVING_SIZES_LIMIT = 1600
IMG_SERVING_SIZES = [
32, 48, 64, 72, 80, 90, 94, 104, 110, 120, 128, 144,
150, 160, 200, 220, 288, 320, 400, 512, 576, 640, 720,
800, 912, 1024, 1152, 1280, 1440, 1600]
IMG_SERVING_CROP_SIZES = [32, 48, 64, 72, 80, 104, 136, 144, 150, 160]
def get_serving_url(blob_key,
size=None,
crop=False):
"""Obtain a url that will serve the underlying image.
This URL is served by a high-performance dynamic image serving infrastructure.
This URL format also allows dynamic resizing and crop with certain
restrictions. To get dynamic resizing and cropping, specify size and crop
arguments, or simply append options to the end of the default url obtained via
this call. Here is an example:
get_serving_url -> "http://lh3.ggpht.com/SomeCharactersGoesHere"
To get a 32 pixel sized version (aspect-ratio preserved) simply append
"=s32" to the url:
"http://lh3.ggpht.com/SomeCharactersGoesHere=s32"
To get a 32 pixel cropped version simply append "=s32-c":
"http://lh3.ggpht.com/SomeCharactersGoesHere=s32-c"
Available sizes are any interger in the range [0, 1600] and is available as
IMG_SERVING_SIZES_LIMIT.
Args:
blob_key: BlobKey, BlobInfo, str, or unicode representation of BlobKey of
blob to get URL of.
size: int, size of resulting images
crop: bool, True requests a cropped image, False a resized one.
Returns:
str, a url
Raises:
BlobKeyRequiredError: when no blobkey was specified in the ctor.
UnsupportedSizeError: when size parameters uses unsupported sizes.
BadRequestError: when crop/size are present in wrong combination.
"""
if not blob_key:
raise BlobKeyRequiredError("A Blobkey is required for this operation.")
if crop and not size:
raise BadRequestError("Size should be set for crop operation")
if size and (size > IMG_SERVING_SIZES_LIMIT or size < 0):
raise UnsupportedSizeError("Unsupported size")
request = images_service_pb.ImagesGetUrlBaseRequest()
response = images_service_pb.ImagesGetUrlBaseResponse()
request.set_blob_key(_extract_blob_key(blob_key))
try:
apiproxy_stub_map.MakeSyncCall("images",
"GetUrlBase",
request,
response)
except apiproxy_errors.ApplicationError, e:
if (e.application_error ==
images_service_pb.ImagesServiceError.NOT_IMAGE):
raise NotImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.BAD_IMAGE_DATA):
raise BadImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
raise LargeImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.INVALID_BLOB_KEY):
raise InvalidBlobKeyError()
else:
raise Error()
url = response.url()
if size:
url += "=s%s" % size
if crop:
url += "-c"
return url
def _extract_blob_key(blob):
"""Extract a unicode blob key from a str, BlobKey, or BlobInfo.
Args:
blob: The str, unicode, BlobKey, or BlobInfo that contains the blob key.
"""
if isinstance(blob, str):
return blob.decode('utf-8')
elif isinstance(blob, BlobKey):
return str(blob).decode('utf-8')
elif blob.__class__.__name__ == 'BlobInfo':
return str(blob.key()).decode('utf-8')
return blob
|
|
import json
import os
import posixpath
from mlflow.entities import FileInfo
from mlflow.exceptions import MlflowException
from mlflow.protos.databricks_pb2 import INVALID_PARAMETER_VALUE
from mlflow.store.tracking.rest_store import RestStore
from mlflow.store.artifact.artifact_repo import ArtifactRepository
from mlflow.store.artifact.databricks_artifact_repo import DatabricksArtifactRepository
from mlflow.store.artifact.local_artifact_repo import LocalArtifactRepository
from mlflow.tracking._tracking_service import utils
from mlflow.utils.databricks_utils import get_databricks_host_creds
from mlflow.utils.file_utils import relative_path_to_artifact_path
from mlflow.utils.rest_utils import http_request, http_request_safe, RESOURCE_DOES_NOT_EXIST
from mlflow.utils.string_utils import strip_prefix
from mlflow.utils.uri import (
get_databricks_profile_uri_from_artifact_uri,
is_databricks_acled_artifacts_uri,
is_databricks_model_registry_artifacts_uri,
is_valid_dbfs_uri,
remove_databricks_profile_info_from_artifact_uri,
)
import mlflow.utils.databricks_utils
LIST_API_ENDPOINT = "/api/2.0/dbfs/list"
GET_STATUS_ENDPOINT = "/api/2.0/dbfs/get-status"
DOWNLOAD_CHUNK_SIZE = 1024
USE_FUSE_ENV_VAR = "MLFLOW_ENABLE_DBFS_FUSE_ARTIFACT_REPO"
class DbfsRestArtifactRepository(ArtifactRepository):
"""
Stores artifacts on DBFS using the DBFS REST API.
This repository is used with URIs of the form ``dbfs:/<path>``. The repository can only be used
together with the RestStore.
"""
def __init__(self, artifact_uri):
if not is_valid_dbfs_uri(artifact_uri):
raise MlflowException(
message="DBFS URI must be of the form dbfs:/<path> or "
+ "dbfs://profile@databricks/<path>",
error_code=INVALID_PARAMETER_VALUE,
)
# The dbfs:/ path ultimately used for artifact operations should not contain the
# Databricks profile info, so strip it before setting ``artifact_uri``.
super().__init__(remove_databricks_profile_info_from_artifact_uri(artifact_uri))
databricks_profile_uri = get_databricks_profile_uri_from_artifact_uri(artifact_uri)
if databricks_profile_uri:
hostcreds_from_uri = get_databricks_host_creds(databricks_profile_uri)
self.get_host_creds = lambda: hostcreds_from_uri
else:
self.get_host_creds = _get_host_creds_from_default_store()
def _databricks_api_request(self, endpoint, method, **kwargs):
host_creds = self.get_host_creds()
return http_request_safe(host_creds=host_creds, endpoint=endpoint, method=method, **kwargs)
def _dbfs_list_api(self, json):
host_creds = self.get_host_creds()
return http_request(
host_creds=host_creds, endpoint=LIST_API_ENDPOINT, method="GET", params=json
)
def _dbfs_download(self, output_path, endpoint):
with open(output_path, "wb") as f:
response = self._databricks_api_request(endpoint=endpoint, method="GET", stream=True)
try:
for content in response.iter_content(chunk_size=DOWNLOAD_CHUNK_SIZE):
f.write(content)
finally:
response.close()
def _is_directory(self, artifact_path):
if artifact_path:
dbfs_path = self._get_dbfs_path(artifact_path)
else:
dbfs_path = self._get_dbfs_path("")
return self._dbfs_is_dir(dbfs_path)
def _dbfs_is_dir(self, dbfs_path):
response = self._databricks_api_request(
endpoint=GET_STATUS_ENDPOINT, method="GET", params={"path": dbfs_path}
)
json_response = json.loads(response.text)
try:
return json_response["is_dir"]
except KeyError:
raise MlflowException("DBFS path %s does not exist" % dbfs_path)
def _get_dbfs_path(self, artifact_path):
return "/%s/%s" % (
strip_prefix(self.artifact_uri, "dbfs:/"),
strip_prefix(artifact_path, "/"),
)
def _get_dbfs_endpoint(self, artifact_path):
return "/dbfs%s" % self._get_dbfs_path(artifact_path)
def log_artifact(self, local_file, artifact_path=None):
basename = os.path.basename(local_file)
if artifact_path:
http_endpoint = self._get_dbfs_endpoint(posixpath.join(artifact_path, basename))
else:
http_endpoint = self._get_dbfs_endpoint(basename)
if os.stat(local_file).st_size == 0:
# The API frontend doesn't like it when we post empty files to it using
# `requests.request`, potentially due to the bug described in
# https://github.com/requests/requests/issues/4215
self._databricks_api_request(
endpoint=http_endpoint, method="POST", data="", allow_redirects=False
)
else:
with open(local_file, "rb") as f:
self._databricks_api_request(
endpoint=http_endpoint, method="POST", data=f, allow_redirects=False
)
def log_artifacts(self, local_dir, artifact_path=None):
artifact_path = artifact_path or ""
for (dirpath, _, filenames) in os.walk(local_dir):
artifact_subdir = artifact_path
if dirpath != local_dir:
rel_path = os.path.relpath(dirpath, local_dir)
rel_path = relative_path_to_artifact_path(rel_path)
artifact_subdir = posixpath.join(artifact_path, rel_path)
for name in filenames:
file_path = os.path.join(dirpath, name)
self.log_artifact(file_path, artifact_subdir)
def list_artifacts(self, path=None):
if path:
dbfs_path = self._get_dbfs_path(path)
else:
dbfs_path = self._get_dbfs_path("")
dbfs_list_json = {"path": dbfs_path}
response = self._dbfs_list_api(dbfs_list_json)
try:
json_response = json.loads(response.text)
except ValueError:
raise MlflowException(
"API request to list files under DBFS path %s failed with status code %s. "
"Response body: %s" % (dbfs_path, response.status_code, response.text)
)
# /api/2.0/dbfs/list will not have the 'files' key in the response for empty directories.
infos = []
artifact_prefix = strip_prefix(self.artifact_uri, "dbfs:")
if json_response.get("error_code", None) == RESOURCE_DOES_NOT_EXIST:
return []
dbfs_files = json_response.get("files", [])
for dbfs_file in dbfs_files:
stripped_path = strip_prefix(dbfs_file["path"], artifact_prefix + "/")
# If `path` is a file, the DBFS list API returns a single list element with the
# same name as `path`. The list_artifacts API expects us to return an empty list in this
# case, so we do so here.
if stripped_path == path:
return []
is_dir = dbfs_file["is_dir"]
artifact_size = None if is_dir else dbfs_file["file_size"]
infos.append(FileInfo(stripped_path, is_dir, artifact_size))
return sorted(infos, key=lambda f: f.path)
def _download_file(self, remote_file_path, local_path):
self._dbfs_download(
output_path=local_path, endpoint=self._get_dbfs_endpoint(remote_file_path)
)
def delete_artifacts(self, artifact_path=None):
raise MlflowException("Not implemented yet")
def _get_host_creds_from_default_store():
store = utils._get_store()
if not isinstance(store, RestStore):
raise MlflowException(
"Failed to get credentials for DBFS; they are read from the "
+ "Databricks CLI credentials or MLFLOW_TRACKING* environment "
+ "variables."
)
return store.get_host_creds
def dbfs_artifact_repo_factory(artifact_uri):
"""
Returns an ArtifactRepository subclass for storing artifacts on DBFS.
This factory method is used with URIs of the form ``dbfs:/<path>``. DBFS-backed artifact
storage can only be used together with the RestStore.
In the special case where the URI is of the form
`dbfs:/databricks/mlflow-tracking/<Exp-ID>/<Run-ID>/<path>',
a DatabricksArtifactRepository is returned. This is capable of storing access controlled
artifacts.
:param artifact_uri: DBFS root artifact URI (string).
:return: Subclass of ArtifactRepository capable of storing artifacts on DBFS.
"""
if not is_valid_dbfs_uri(artifact_uri):
raise MlflowException(
"DBFS URI must be of the form dbfs:/<path> or "
+ "dbfs://profile@databricks/<path>, but received "
+ artifact_uri
)
cleaned_artifact_uri = artifact_uri.rstrip("/")
db_profile_uri = get_databricks_profile_uri_from_artifact_uri(cleaned_artifact_uri)
if is_databricks_acled_artifacts_uri(artifact_uri):
return DatabricksArtifactRepository(cleaned_artifact_uri)
elif (
mlflow.utils.databricks_utils.is_dbfs_fuse_available()
and os.environ.get(USE_FUSE_ENV_VAR, "").lower() != "false"
and not is_databricks_model_registry_artifacts_uri(artifact_uri)
and (db_profile_uri is None or db_profile_uri == "databricks")
):
# If the DBFS FUSE mount is available, write artifacts directly to
# /dbfs/... using local filesystem APIs.
# Note: it is possible for a named Databricks profile to point to the current workspace,
# but we're going to avoid doing a complex check and assume users will use `databricks`
# to mean the current workspace. Using `DbfsRestArtifactRepository` to access the current
# workspace's DBFS should still work; it just may be slower.
final_artifact_uri = remove_databricks_profile_info_from_artifact_uri(cleaned_artifact_uri)
file_uri = "file:///dbfs/{}".format(strip_prefix(final_artifact_uri, "dbfs:/"))
return LocalArtifactRepository(file_uri)
return DbfsRestArtifactRepository(cleaned_artifact_uri)
|
|
from unittest import TestCase, main
import datetime
import pandas as pd
import numpy as np
import numpy.testing as npt
import pandas.util.testing as pdt
from break4w.question import (Question,
_check_cmap
)
class QuestionTest(TestCase):
def setUp(self):
self.name = 'player_name'
self.description = 'Samwell Hockey Players'
self.dtype = str
self.map_ = pd.DataFrame([['Bitty', 'Ransom', 'Holster'],
['2', '4', '4'],
['False', 'True', 'True']],
index=['player_name', 'years_on_team',
'team_captain']).T
self.q = Question(name=self.name,
description=self.description,
dtype=self.dtype,
free_response=True,
)
def test_init_default(self):
self.assertEqual(self.name, self.q.name)
self.assertEqual(self.description, self.q.description)
self.assertEqual(self.dtype, self.q.dtype)
self.assertEqual('Player Name', self.q.clean_name)
self.assertEqual('Question', self.q.type)
self.assertTrue(self.q.free_response)
self.assertFalse(self.q.mimarks)
self.assertFalse(self.q.ontology)
self.assertEqual(self.q.missing,
{'not applicable', 'missing: not provided',
'missing: not collected', 'missing: restricted',
'not provided', 'not collected', 'restricted'})
self.assertEqual(self.q.colormap, None)
self.assertEqual(self.q.blanks, None)
self.assertEqual(self.q.log, [])
self.assertEqual(self.q.source_columns, [])
self.assertEqual(self.q.derivative_columns, [])
self.assertEqual(self.q.notes, None)
def test_init_source_derivative_list(self):
q = Question(name=self.name,
description=self.description,
dtype=self.dtype,
source_columns=['SMH'],
derivative_columns=['next_step'],
school='Samwell',
)
self.assertEqual(q.source_columns, ['SMH'])
self.assertEqual(q.derivative_columns, ['next_step'])
self.assertTrue(hasattr(q, 'school'))
self.assertEqual(q.school, 'Samwell')
def test_init_error_name(self):
with self.assertRaises(TypeError):
Question(name=1,
description=self.description,
dtype=self.dtype,
)
def test_init_error_description(self):
with self.assertRaises(TypeError):
Question(name=self.name,
description=self.dtype,
dtype=self.dtype,
)
def test_init_error_description_length(self):
d = ('Check, Please! is a 2013 webcomic written and '
'illustrated by Ngozi Ukazu. The webcomic follows '
'vlogger and figure-turned-ice hockey skater Eric '
'"Bitty" Bittle as he deals with hockey culture in '
'college, as well as his identity as a gay man.')
with self.assertRaises(ValueError):
Question(name=self.name,
description=d,
dtype=self.dtype,
)
def test_init_error_dtype(self):
with self.assertRaises(TypeError):
Question(name=self.name,
description=self.description,
dtype=self.description,
)
def test_init_error_clean_name(self):
with self.assertRaises(TypeError):
Question(name=self.name,
description=self.description,
dtype=self.dtype,
clean_name=self.dtype
)
def test_init_clean_name_missing_str(self):
q = Question(name=self.name,
description=self.description,
dtype=self.dtype,
clean_name='Player',
missing='Bitty')
self.assertEqual(q.clean_name, 'Player')
self.assertEqual(q.missing, set(['Bitty']))
def test_init_missing_list(self):
q = Question(name=self.name,
description=self.description,
dtype=self.dtype,
missing=['Bitty'])
self.assertEqual(q.missing, set(['Bitty']))
def test__str__(self):
known = """
------------------------------------------------------------------------------------
player_name (Question str)
Samwell Hockey Players
------------------------------------------------------------------------------------
"""
test = self.q.__str__()
self.assertEqual(known, test)
def test_update_log(self):
self.assertEqual(len(self.q.log), 0)
self.q._update_log(
command='dibs',
transform_type='replace',
transformation='metaphysical goalie johnson > Bitty'
)
self.assertEqual(len(self.q.log), 1)
log_ = self.q.log[0]
self.assertTrue(isinstance(log_, dict))
self.assertEqual({'timestamp', 'column', 'command', 'transform_type',
'transformation'}, set(log_.keys()))
self.assertTrue(isinstance(log_['timestamp'], datetime.datetime))
self.assertEqual(log_['column'], 'player_name')
self.assertEqual(log_['command'], 'dibs')
self.assertEqual(log_['transform_type'], 'replace')
self.assertEqual(log_['transformation'],
'metaphysical goalie johnson > Bitty')
def test_write_provenance(self):
known_log = pd.DataFrame(
np.array([[datetime.datetime.now(), 'Write Log', 'team_captain',
'recording', '']]),
columns=['timestamp', 'command', 'column', 'transform_type',
'transformation']
)
q = Question(name='team_captain',
description='who is has the C or AC',
dtype=bool
)
log_ = q.write_provenance()
self.assertEqual(known_log.shape, log_.shape)
pdt.assert_index_equal(known_log.columns, log_.columns)
pdt.assert_series_equal(known_log['column'], log_['column'])
pdt.assert_series_equal(known_log['command'], log_['command'])
pdt.assert_series_equal(known_log['transform_type'],
log_['transform_type'])
pdt.assert_series_equal(known_log['transformation'],
log_['transformation'])
def test_read_provenance(self):
with self.assertRaises(NotImplementedError):
self.q._read_provenance('fp_')
def test_check_ontology(self):
with self.assertRaises(NotImplementedError):
self.q._check_ontology()
def test_identify_remap_function_bool_placeholder(self):
iseries = pd.Series(['True', 'true', 1, 'nope',
'False', 'false', 0, 0.0])
# Sets the know values
kseries = pd.Series([True, True, True, 'nope',
False, False, False, False])
f_ = self.q._identify_remap_function(bool, {'nope'})
tseries = iseries.apply(f_)
pdt.assert_series_equal(kseries, tseries)
def test_identify_remap_function_bool_no_placeholder(self):
iseries = pd.Series(['True', 'true', 1, 'nope',
'False', 'false', 0, 0.0])
# Sets the know values
kseries = pd.Series([True, True, True, 'error',
False, False, False, False])
# Gets the test values
f_ = self.q._identify_remap_function(bool, {'cool '})
tseries = iseries.apply(f_)
pdt.assert_series_equal(kseries, tseries)
def test_remap_type_str_pass_no_placeholder(self):
iseries = self.map_['player_name']
kseries = self.map_['player_name']
f_ = self.q._identify_remap_function(str)
tseries = iseries.apply(f_)
pdt.assert_series_equal(kseries, tseries)
def test_remap_type_int_placeholder(self):
iseries = pd.Series(data=['1', '2', '3', 'i dont skate'],
index=['Whiskey', 'Chowder', 'Bitty', 'Lardo'],
name='collegate_hockey_years')
# Sets the known values
kseries = pd.Series(data=[1, 2, 3, 'i dont skate'],
index=['Whiskey', 'Chowder', 'Bitty', 'Lardo'],
name='collegate_hockey_years')
f_ = self.q._identify_remap_function(int, {'i dont skate'})
tseries = iseries.apply(f_)
pdt.assert_series_equal(kseries, tseries)
def test_remap_type_float_log_error(self):
iseries = pd.Series(data=['1', '2', '3', 'i dont skate'],
index=['Whiskey', 'Chowder', 'Bitty', 'Lardo'],
name='collegate_hockey_years')
kseries = pd.Series(data=[1, 2, 3, 'error'],
index=['Whiskey', 'Chowder', 'Bitty', 'Lardo'],
name='collegate_hockey_years')
f_ = self.q._identify_remap_function(float)
tseries = iseries.apply(f_)
pdt.assert_series_equal(kseries, tseries)
def test_iterable_to_str_null(self):
test = self.q._iterable_to_str(None, null_value='---')
self.assertEqual(test, '---')
def test_iterable_to_str_empty(self):
test = self.q._iterable_to_str([])
self.assertTrue('None')
def test_iterable_from_str_null(self):
test = self.q._iterable_from_str('---', null_value='---')
self.assertEqual(test, None)
def test_iterable_from_str_list(self):
known = ['Dorm', 'Haus']
test_ = self.q._iterable_from_str('Dorm | Haus', return_type=list)
self.assertEqual(known, test_)
def test_iterable_to_str_list(self):
known = 'Dorm | Haus'
test_ = self.q._iterable_to_str(['Dorm', 'Haus'])
npt.assert_array_equal(np.array(known), np.array(test_))
def test_iterable_from_str_partial_list(self):
known = ['Dorm', None]
test_ = self.q._iterable_from_str('Dorm | None', return_type=list)
self.assertEqual(known, test_)
def test_iterable_to_str_partial_list(self):
known = 'Dorm | ---'
test_ = self.q._iterable_to_str(['Dorm', None], null_value='---')
self.assertEqual(known, test_)
def test_iterable_from_str_code(self):
known = {0: 'Dorm', 1: 'Haus'}
test_ = self.q._iterable_from_str('0=Dorm | 1=Haus', var_type=int)
self.assertEqual(known, test_)
def test_iterable_to_str_code(self):
known = '0=Dorm | 1=Haus'
test_ = self.q._iterable_to_str({0: 'Dorm', 1: 'Haus'})
self.assertEqual(known, test_)
def test_iterable_from_str_var(self):
known = set(['Boston'])
test_ = self.q._iterable_from_str('Boston')
self.assertEqual(known, test_)
def test_to_series(self):
self.q.order = ['Bitty', 'Ransom', 'Holster']
self.q.missing = {'TBD'}
self.q.var_labels = {1: 'Bitty', 2: 'Ransom', 3: 'Holster'}
known = pd.Series({'name': self.name,
'description': self.description,
'dtype': 'str',
'type': 'Question',
'clean_name': 'Player Name',
'free_response': 'True',
'missing': "TBD",
'order': 'Bitty | Ransom | Holster',
'var_labels': '1=Bitty | 2=Ransom | 3=Holster'
})
# known = known[['name', 'description', 'dtype', 'type', 'clean_name',
# 'free_response', 'missing', 'order']]
test_ = self.q._to_series()
pdt.assert_series_equal(known, test_)
def test_read_series(self):
var_ = pd.Series({'name': self.name,
'description': self.description,
'dtype': 'str',
'clean_name': 'Player Name',
'free_response': 'True',
'missing': "TBD",
'order': 'Bitty=1 | Ransom=2 | Holster=3',
'colormap': 'Reds',
'ref_value': 'Ransom',
'sig_figs': '3',
'i_dont_know': np.pi,
'meh': np.nan,
'ambigious': 'Lardo',
})
q = Question._read_series(var_)
# Checks set values
self.assertTrue(isinstance(q, Question))
self.assertEqual(self.name, q.name)
self.assertEqual(self.description, q.description)
self.assertEqual(self.dtype, q.dtype)
self.assertEqual('Question', q.type)
self.assertTrue(q.free_response)
self.assertEqual('Player Name', q.clean_name)
self.assertEqual(q.missing, {'TBD'})
self.assertEqual(q.order, ['Bitty', 'Ransom', 'Holster'])
self.assertEqual(q.var_labels,
{'Bitty': '1', 'Ransom': '2', 'Holster': '3'})
self.assertEqual(q.colormap, 'Reds')
self.assertEqual(q.ref_value, 'Ransom')
self.assertEqual(q.sig_figs, 3)
self.assertEqual(q.ambigious, {'Lardo'})
npt.assert_almost_equal(q.i_dont_know, np.pi, 5)
# Checks defaults
self.assertFalse(q.mimarks)
self.assertEqual(q.ontology, None)
self.assertEqual(q.blanks, None)
self.assertEqual(q.log, [])
self.assertEqual(q.source_columns, [])
self.assertEqual(q.derivative_columns, [])
self.assertEqual(q.notes, None)
def test_read_series_bool(self):
var_ = pd.Series({'name': self.name,
'description': self.description,
'dtype': 'bool',
'order': 'False | True',
'ref_value': 'False'})
q = Question._read_series(var_)
self.assertEqual(q.dtype, bool)
self.assertFalse(q.ref_value)
self.assertEqual(q.order, [False, True])
def test_series_round_trip(self):
var_ = self.q._to_series()
new_ = Question._read_series(var_)
self.assertEqual(self.q.__dict__, new_.__dict__)
def test_check_cmap(self):
self.assertEqual(_check_cmap('Reds'), 'Reds')
if __name__ == '__main__':
main()
|
|
from ethereum import utils
from ethereum.abi import is_numeric
import copy
from ethereum import opcodes
import time
from ethereum.slogging import get_logger
from rlp.utils import encode_hex, ascii_chr
from ethereum.utils import to_string, safe_ord
log_log = get_logger('eth.vm.log')
log_vm_exit = get_logger('eth.vm.exit')
log_vm_op = get_logger('eth.vm.op')
log_vm_op_stack = get_logger('eth.vm.op.stack')
log_vm_op_memory = get_logger('eth.vm.op.memory')
log_vm_op_storage = get_logger('eth.vm.op.storage')
TT256 = 2 ** 256
TT256M1 = 2 ** 256 - 1
TT255 = 2 ** 255
class CallData(object):
def __init__(self, parent_memory, offset=0, size=None):
self.data = parent_memory
self.offset = offset
self.size = len(self.data) if size is None else size
self.rlimit = self.offset + self.size
def extract_all(self):
d = self.data[self.offset: self.offset + self.size]
d += [0] * (self.size - len(d))
return b''.join([ascii_chr(x) for x in d])
def extract32(self, i):
if i >= self.size:
return 0
o = self.data[self.offset + i: min(self.offset + i + 32, self.rlimit)]
return utils.bytearray_to_int(o + [0] * (32 - len(o)))
def extract_copy(self, mem, memstart, datastart, size):
for i in range(size):
if datastart + i < self.size:
mem[memstart + i] = self.data[self.offset + datastart + i]
else:
mem[memstart + i] = 0
class Message(object):
def __init__(self, sender, to, value, gas, data,
depth=0, code_address=None, is_create=False):
self.sender = sender
self.to = to
self.value = value
self.gas = gas
self.data = data
self.depth = depth
self.logs = []
self.code_address = code_address
self.is_create = is_create
def __repr__(self):
return '<Message(to:%s...)>' % self.to[:8]
class Compustate():
def __init__(self, **kwargs):
self.memory = []
self.stack = []
self.pc = 0
self.gas = 0
for kw in kwargs:
setattr(self, kw, kwargs[kw])
# Preprocesses code, and determines which locations are in the middle
# of pushdata and thus invalid
def preprocess_code(code):
i = 0
ops = []
while i < len(code):
o = copy.copy(opcodes.opcodes.get(safe_ord(code[i]), ['INVALID', 0, 0, 0]) +
[safe_ord(code[i]), 0])
ops.append(o)
if o[0][:4] == 'PUSH':
for j in range(int(o[0][4:])):
i += 1
byte = safe_ord(code[i]) if i < len(code) else 0
o[-1] = (o[-1] << 8) + byte
if i < len(code):
ops.append(['INVALID', 0, 0, 0, byte, 0])
i += 1
return ops
def mem_extend(mem, compustate, op, start, sz):
if sz:
oldsize = len(mem) // 32
old_totalfee = oldsize * opcodes.GMEMORY + \
oldsize**2 // opcodes.GQUADRATICMEMDENOM
newsize = utils.ceil32(start + sz) // 32
# if newsize > 524288:
# raise Exception("Memory above 16 MB per call not supported by this VM")
new_totalfee = newsize * opcodes.GMEMORY + \
newsize**2 // opcodes.GQUADRATICMEMDENOM
if old_totalfee < new_totalfee:
memfee = new_totalfee - old_totalfee
if compustate.gas < memfee:
compustate.gas = 0
return False
compustate.gas -= memfee
m_extend = (newsize - oldsize) * 32
mem.extend([0] * m_extend)
return True
def data_copy(compustate, size):
if size:
copyfee = opcodes.GCOPY * utils.ceil32(size) / 32
if compustate.gas < copyfee:
compustate.gas = 0
return False
compustate.gas -= copyfee
return True
def vm_exception(error, **kargs):
log_vm_exit.trace('EXCEPTION', cause=error, **kargs)
return 0, 0, []
def peaceful_exit(cause, gas, data, **kargs):
log_vm_exit.trace('EXIT', cause=cause, **kargs)
return 1, gas, data
code_cache = {}
def vm_execute(ext, msg, code):
# precompute trace flag
# if we trace vm, we're in slow mode anyway
trace_vm = log_vm_op.is_active('trace')
compustate = Compustate(gas=msg.gas)
stk = compustate.stack
mem = compustate.memory
if code in code_cache:
processed_code = code_cache[code]
else:
processed_code = preprocess_code(code)
code_cache[code] = processed_code
codelen = len(processed_code)
s = time.time()
op = None
while 1:
# print 'op: ', op, time.time() - s
# s = time.time()
# stack size limit error
if compustate.pc >= codelen:
return peaceful_exit('CODE OUT OF RANGE', compustate.gas, [])
op, in_args, out_args, fee, opcode, pushval = \
processed_code[compustate.pc]
# out of gas error
if fee > compustate.gas:
return vm_exception('OUT OF GAS')
# empty stack error
if in_args > len(compustate.stack):
return vm_exception('INSUFFICIENT STACK',
op=op, needed=to_string(in_args),
available=to_string(len(compustate.stack)))
if len(compustate.stack) + out_args > 1024:
return vm_exception('STACK SIZE LIMIT EXCEEDED')
# Apply operation
compustate.gas -= fee
compustate.pc += 1
if trace_vm:
"""
This diverges from normal logging, as we use the logging namespace
only to decide which features get logged in 'eth.vm.op'
i.e. tracing can not be activated by activating a sub like 'eth.vm.op.stack'
"""
trace_data = {}
if log_vm_op_stack.is_active():
trace_data['stack'] = list(map(to_string, list(compustate.stack)))
if log_vm_op_memory.is_active():
trace_data['memory'] = \
b''.join([encode_hex(ascii_chr(x)) for x in compustate.memory])
if log_vm_op_storage.is_active():
trace_data['storage'] = ext.log_storage(msg.to)
trace_data['gas'] = to_string(compustate.gas + fee)
trace_data['pc'] = to_string(compustate.pc - 1)
trace_data['op'] = op
if op[:4] == 'PUSH':
trace_data['pushvalue'] = pushval
log_vm_op.trace('vm', **trace_data)
# Invalid operation
if op == 'INVALID':
return vm_exception('INVALID OP', opcode=opcode)
# Valid operations
if opcode < 0x10:
if op == 'STOP':
return peaceful_exit('STOP', compustate.gas, [])
elif op == 'ADD':
stk.append((stk.pop() + stk.pop()) & TT256M1)
elif op == 'SUB':
stk.append((stk.pop() - stk.pop()) & TT256M1)
elif op == 'MUL':
stk.append((stk.pop() * stk.pop()) & TT256M1)
elif op == 'DIV':
s0, s1 = stk.pop(), stk.pop()
stk.append(0 if s1 == 0 else s0 // s1)
elif op == 'MOD':
s0, s1 = stk.pop(), stk.pop()
stk.append(0 if s1 == 0 else s0 % s1)
elif op == 'SDIV':
s0, s1 = utils.to_signed(stk.pop()), utils.to_signed(stk.pop())
stk.append(0 if s1 == 0 else (abs(s0) // abs(s1) *
(-1 if s0 * s1 < 0 else 1)) & TT256M1)
elif op == 'SMOD':
s0, s1 = utils.to_signed(stk.pop()), utils.to_signed(stk.pop())
stk.append(0 if s1 == 0 else (abs(s0) % abs(s1) *
(-1 if s0 < 0 else 1)) & TT256M1)
elif op == 'ADDMOD':
s0, s1, s2 = stk.pop(), stk.pop(), stk.pop()
stk.append((s0 + s1) % s2 if s2 else 0)
elif op == 'MULMOD':
s0, s1, s2 = stk.pop(), stk.pop(), stk.pop()
stk.append((s0 * s1) % s2 if s2 else 0)
elif op == 'EXP':
base, exponent = stk.pop(), stk.pop()
# fee for exponent is dependent on its bytes
# calc n bytes to represent exponent
nbytes = len(utils.encode_int(exponent))
expfee = nbytes * opcodes.GEXPONENTBYTE
if compustate.gas < expfee:
compustate.gas = 0
return vm_exception('OOG EXPONENT')
compustate.gas -= expfee
stk.append(pow(base, exponent, TT256))
elif op == 'SIGNEXTEND':
s0, s1 = stk.pop(), stk.pop()
if s0 <= 31:
testbit = s0 * 8 + 7
if s1 & (1 << testbit):
stk.append(s1 | (TT256 - (1 << testbit)))
else:
stk.append(s1 & ((1 << testbit) - 1))
else:
stk.append(s1)
elif opcode < 0x20:
if op == 'LT':
stk.append(1 if stk.pop() < stk.pop() else 0)
elif op == 'GT':
stk.append(1 if stk.pop() > stk.pop() else 0)
elif op == 'SLT':
s0, s1 = utils.to_signed(stk.pop()), utils.to_signed(stk.pop())
stk.append(1 if s0 < s1 else 0)
elif op == 'SGT':
s0, s1 = utils.to_signed(stk.pop()), utils.to_signed(stk.pop())
stk.append(1 if s0 > s1 else 0)
elif op == 'EQ':
stk.append(1 if stk.pop() == stk.pop() else 0)
elif op == 'ISZERO':
stk.append(0 if stk.pop() else 1)
elif op == 'AND':
stk.append(stk.pop() & stk.pop())
elif op == 'OR':
stk.append(stk.pop() | stk.pop())
elif op == 'XOR':
stk.append(stk.pop() ^ stk.pop())
elif op == 'NOT':
stk.append(TT256M1 - stk.pop())
elif op == 'BYTE':
s0, s1 = stk.pop(), stk.pop()
if s0 >= 32:
stk.append(0)
else:
stk.append((s1 // 256 ** (31 - s0)) % 256)
elif opcode < 0x40:
if op == 'SHA3':
s0, s1 = stk.pop(), stk.pop()
compustate.gas -= opcodes.GSHA3WORD * (utils.ceil32(s1) // 32)
if compustate.gas < 0:
return vm_exception('OOG PAYING FOR SHA3')
if not mem_extend(mem, compustate, op, s0, s1):
return vm_exception('OOG EXTENDING MEMORY')
data = b''.join(map(ascii_chr, mem[s0: s0 + s1]))
stk.append(utils.big_endian_to_int(utils.sha3(data)))
elif op == 'ADDRESS':
stk.append(utils.coerce_to_int(msg.to))
elif op == 'BALANCE':
addr = utils.coerce_addr_to_hex(stk.pop() % 2**160)
stk.append(ext.get_balance(addr))
elif op == 'ORIGIN':
stk.append(utils.coerce_to_int(ext.tx_origin))
elif op == 'CALLER':
stk.append(utils.coerce_to_int(msg.sender))
elif op == 'CALLVALUE':
stk.append(msg.value)
elif op == 'CALLDATALOAD':
stk.append(msg.data.extract32(stk.pop()))
elif op == 'CALLDATASIZE':
stk.append(msg.data.size)
elif op == 'CALLDATACOPY':
mstart, dstart, size = stk.pop(), stk.pop(), stk.pop()
if not mem_extend(mem, compustate, op, mstart, size):
return vm_exception('OOG EXTENDING MEMORY')
if not data_copy(compustate, size):
return vm_exception('OOG COPY DATA')
msg.data.extract_copy(mem, mstart, dstart, size)
elif op == 'CODESIZE':
stk.append(len(processed_code))
elif op == 'CODECOPY':
start, s1, size = stk.pop(), stk.pop(), stk.pop()
if not mem_extend(mem, compustate, op, start, size):
return vm_exception('OOG EXTENDING MEMORY')
if not data_copy(compustate, size):
return vm_exception('OOG COPY DATA')
for i in range(size):
if s1 + i < len(processed_code):
mem[start + i] = processed_code[s1 + i][4]
else:
mem[start + i] = 0
elif op == 'GASPRICE':
stk.append(ext.tx_gasprice)
elif op == 'EXTCODESIZE':
addr = utils.coerce_addr_to_hex(stk.pop() % 2**160)
stk.append(len(ext.get_code(addr) or b''))
elif op == 'EXTCODECOPY':
addr = utils.coerce_addr_to_hex(stk.pop() % 2**160)
start, s2, size = stk.pop(), stk.pop(), stk.pop()
extcode = ext.get_code(addr) or b''
if not mem_extend(mem, compustate, op, start, size):
return vm_exception('OOG EXTENDING MEMORY')
if not data_copy(compustate, size):
return vm_exception('OOG COPY DATA')
for i in range(size):
if s2 + i < len(extcode):
mem[start + i] = safe_ord(extcode[s2 + i])
else:
mem[start + i] = 0
elif opcode < 0x50:
if op == 'BLOCKHASH':
stk.append(utils.big_endian_to_int(ext.block_hash(stk.pop())))
elif op == 'COINBASE':
stk.append(utils.big_endian_to_int(ext.block_coinbase))
elif op == 'TIMESTAMP':
stk.append(ext.block_timestamp)
elif op == 'NUMBER':
stk.append(ext.block_number)
elif op == 'DIFFICULTY':
stk.append(ext.block_difficulty)
elif op == 'GASLIMIT':
stk.append(ext.block_gas_limit)
elif opcode < 0x60:
if op == 'POP':
stk.pop()
elif op == 'MLOAD':
s0 = stk.pop()
if not mem_extend(mem, compustate, op, s0, 32):
return vm_exception('OOG EXTENDING MEMORY')
data = b''.join(map(ascii_chr, mem[s0: s0 + 32]))
stk.append(utils.big_endian_to_int(data))
elif op == 'MSTORE':
s0, s1 = stk.pop(), stk.pop()
if not mem_extend(mem, compustate, op, s0, 32):
return vm_exception('OOG EXTENDING MEMORY')
v = s1
for i in range(31, -1, -1):
mem[s0 + i] = v % 256
v //= 256
elif op == 'MSTORE8':
s0, s1 = stk.pop(), stk.pop()
if not mem_extend(mem, compustate, op, s0, 1):
return vm_exception('OOG EXTENDING MEMORY')
mem[s0] = s1 % 256
elif op == 'SLOAD':
stk.append(ext.get_storage_data(msg.to, stk.pop()))
elif op == 'SSTORE':
s0, s1 = stk.pop(), stk.pop()
if ext.get_storage_data(msg.to, s0):
gascost = opcodes.GSTORAGEMOD if s1 else opcodes.GSTORAGEKILL
refund = 0 if s1 else opcodes.GSTORAGEREFUND
else:
gascost = opcodes.GSTORAGEADD if s1 else opcodes.GSTORAGEMOD
refund = 0
if compustate.gas < gascost:
return vm_exception('OUT OF GAS')
compustate.gas -= gascost
ext.add_refund(refund) # adds neg gascost as a refund if below zero
ext.set_storage_data(msg.to, s0, s1)
elif op == 'JUMP':
compustate.pc = stk.pop()
opnew = processed_code[compustate.pc][0] if \
compustate.pc < len(processed_code) else 'STOP'
if opnew != 'JUMPDEST':
return vm_exception('BAD JUMPDEST')
elif op == 'JUMPI':
s0, s1 = stk.pop(), stk.pop()
if s1:
compustate.pc = s0
opnew = processed_code[compustate.pc][0] if \
compustate.pc < len(processed_code) else 'STOP'
if opnew != 'JUMPDEST':
return vm_exception('BAD JUMPDEST')
elif op == 'PC':
stk.append(compustate.pc - 1)
elif op == 'MSIZE':
stk.append(len(mem))
elif op == 'GAS':
stk.append(compustate.gas) # AFTER subtracting cost 1
elif op[:4] == 'PUSH':
pushnum = int(op[4:])
compustate.pc += pushnum
stk.append(pushval)
elif op[:3] == 'DUP':
depth = int(op[3:])
stk.append(stk[-depth])
elif op[:4] == 'SWAP':
depth = int(op[4:])
temp = stk[-depth - 1]
stk[-depth - 1] = stk[-1]
stk[-1] = temp
elif op[:3] == 'LOG':
"""
0xa0 ... 0xa4, 32/64/96/128/160 + len(data) gas
a. Opcodes LOG0...LOG4 are added, takes 2-6 stack arguments
MEMSTART MEMSZ (TOPIC1) (TOPIC2) (TOPIC3) (TOPIC4)
b. Logs are kept track of during tx execution exactly the same way as suicides
(except as an ordered list, not a set).
Each log is in the form [address, [topic1, ... ], data] where:
* address is what the ADDRESS opcode would output
* data is mem[MEMSTART: MEMSTART + MEMSZ]
* topics are as provided by the opcode
c. The ordered list of logs in the transaction are expressed as [log0, log1, ..., logN].
"""
depth = int(op[3:])
mstart, msz = stk.pop(), stk.pop()
topics = [stk.pop() for x in range(depth)]
compustate.gas -= msz * opcodes.GLOGBYTE
if not mem_extend(mem, compustate, op, mstart, msz):
return vm_exception('OOG EXTENDING MEMORY')
data = b''.join(map(ascii_chr, mem[mstart: mstart + msz]))
ext.log(msg.to, topics, data)
log_log.trace('LOG', to=msg.to, topics=topics, data=list(map(safe_ord, data)))
# print('LOG', msg.to, topics, list(map(safe_ord, data)))
elif op == 'CREATE':
value, mstart, msz = stk.pop(), stk.pop(), stk.pop()
if not mem_extend(mem, compustate, op, mstart, msz):
return vm_exception('OOG EXTENDING MEMORY')
if ext.get_balance(msg.to) >= value and msg.depth < 1024:
cd = CallData(mem, mstart, msz)
create_msg = Message(msg.to, b'', value, compustate.gas, cd, msg.depth + 1)
o, gas, addr = ext.create(create_msg)
if o:
stk.append(utils.coerce_to_int(addr))
compustate.gas = gas
else:
stk.append(0)
compustate.gas = 0
else:
stk.append(0)
elif op == 'CALL':
gas, to, value, meminstart, meminsz, memoutstart, memoutsz = \
stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop()
if not mem_extend(mem, compustate, op, meminstart, meminsz) or \
not mem_extend(mem, compustate, op, memoutstart, memoutsz):
return vm_exception('OOG EXTENDING MEMORY')
to = utils.encode_int(to)
to = ((b'\x00' * (32 - len(to))) + to)[12:]
extra_gas = (not ext.account_exists(to)) * opcodes.GCALLNEWACCOUNT + \
(value > 0) * opcodes.GCALLVALUETRANSFER
submsg_gas = gas + opcodes.GSTIPEND * (value > 0)
if compustate.gas < gas + extra_gas:
return vm_exception('OUT OF GAS')
if ext.get_balance(msg.to) >= value and msg.depth < 1024:
compustate.gas -= (gas + extra_gas)
cd = CallData(mem, meminstart, meminsz)
call_msg = Message(msg.to, to, value, submsg_gas, cd,
msg.depth + 1, code_address=to)
result, gas, data = ext.msg(call_msg)
if result == 0:
stk.append(0)
else:
stk.append(1)
compustate.gas += gas
for i in range(min(len(data), memoutsz)):
mem[memoutstart + i] = data[i]
else:
compustate.gas -= (gas + extra_gas - submsg_gas)
stk.append(0)
elif op == 'CALLCODE':
gas, to, value, meminstart, meminsz, memoutstart, memoutsz = \
stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop(), stk.pop()
if not mem_extend(mem, compustate, op, meminstart, meminsz) or \
not mem_extend(mem, compustate, op, memoutstart, memoutsz):
return vm_exception('OOG EXTENDING MEMORY')
extra_gas = (value > 0) * opcodes.GCALLVALUETRANSFER
submsg_gas = gas + opcodes.GSTIPEND * (value > 0)
if compustate.gas < gas + extra_gas:
return vm_exception('OUT OF GAS')
if ext.get_balance(msg.to) >= value and msg.depth < 1024:
compustate.gas -= (gas + extra_gas)
to = utils.encode_int(to)
to = ((b'\x00' * (32 - len(to))) + to)[12:]
cd = CallData(mem, meminstart, meminsz)
call_msg = Message(msg.to, msg.to, value, submsg_gas, cd,
msg.depth + 1, code_address=to)
result, gas, data = ext.msg(call_msg)
if result == 0:
stk.append(0)
else:
stk.append(1)
compustate.gas += gas
for i in range(min(len(data), memoutsz)):
mem[memoutstart + i] = data[i]
else:
compustate.gas -= (gas + extra_gas - submsg_gas)
stk.append(0)
elif op == 'RETURN':
s0, s1 = stk.pop(), stk.pop()
if not mem_extend(mem, compustate, op, s0, s1):
return vm_exception('OOG EXTENDING MEMORY')
return peaceful_exit('RETURN', compustate.gas, mem[s0: s0 + s1])
elif op == 'SUICIDE':
to = utils.encode_int(stk.pop())
to = ((b'\x00' * (32 - len(to))) + to)[12:]
xfer = ext.get_balance(msg.to)
ext.set_balance(msg.to, 0)
ext.set_balance(to, ext.get_balance(to) + xfer)
ext.add_suicide(msg.to)
# print('suiciding %s %s %d' % (msg.to, to, xfer))
return 1, compustate.gas, []
for a in stk:
assert is_numeric(a)
assert a >= 0 and a < 2**256, (a, op, stk)
class VmExtBase():
def __init__(self):
self.get_code = lambda addr: b''
self.get_balance = lambda addr: 0
self.set_balance = lambda addr, balance: 0
self.set_storage_data = lambda addr, key, value: 0
self.get_storage_data = lambda addr, key: 0
self.log_storage = lambda addr: 0
self.add_suicide = lambda addr: 0
self.add_refund = lambda x: 0
self.block_prevhash = 0
self.block_coinbase = 0
self.block_timestamp = 0
self.block_number = 0
self.block_difficulty = 0
self.block_gas_limit = 0
self.log = lambda addr, topics, data: 0
self.tx_origin = b'0' * 40
self.tx_gasprice = 0
self.create = lambda msg: 0, 0, 0
self.call = lambda msg: 0, 0, 0
self.sendmsg = lambda msg: 0, 0, 0
|
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This module classifies NativeHeap objects filtering their allocations.
The only filter currently available is 'stacktrace', which works as follows:
{'name': 'rule-1', 'stacktrace': 'foo' }
{'name': 'rule-2', 'stacktrace': ['foo', r'bar\s+baz']}
{'name': 'rule-3', 'source_path': 'sk.*allocator'}
{'name': 'rule-3', 'source_path': 'sk', 'stacktrace': 'SkAllocator'}
rule-1 will match any allocation that has 'foo' in one of its stack frames.
rule-2 will match any allocation that has a stack frame matching 'foo' AND a
followed by a stack frame matching 'bar baz'. Note that order matters, so rule-2
will not match a stacktrace like ['bar baz', 'foo'].
rule-3 will match any allocation in which at least one of the source paths in
its stack frames matches the regex sk.*allocator.
rule-4 will match any allocation which satisfies both the conditions.
TODO(primiano): introduce more filters after the first prototype with UI, for
instance, filter by library file name or by allocation size.
"""
import collections
import posixpath
import re
from memory_inspector.classification import results
from memory_inspector.classification import rules
from memory_inspector.core import exceptions
from memory_inspector.core import native_heap
_RESULT_KEYS = ['bytes_allocated']
def LoadRules(content):
"""Loads and parses a native-heap rule tree from a content (string).
Returns:
An instance of |rules.Rule|, which nodes are |_NHeapRule| instances.
"""
return rules.Load(content, _NHeapRule)
def Classify(nativeheap, rule_tree):
"""Creates aggregated results of native heaps using the provided rules.
Args:
nativeheap: the heap dump being processed (a |NativeHeap| instance).
rule_tree: the user-defined rules that define the filtering categories.
Returns:
An instance of |AggreatedResults|.
"""
assert(isinstance(nativeheap, native_heap.NativeHeap))
assert(isinstance(rule_tree, rules.Rule))
res = results.AggreatedResults(rule_tree, _RESULT_KEYS)
for allocation in nativeheap.allocations:
res.AddToMatchingNodes(allocation, [allocation.total_size])
return res
def InferHeuristicRulesFromHeap(nheap, max_depth=3, threshold=0.02):
"""Infers the rules tree from a symbolized heap snapshot.
In lack of a specific set of rules, this method can be invoked to infer a
meaningful rule tree starting from a heap snapshot. It will build a compact
radix tree from the source paths of the stack traces, which height is at most
|max_depth|, selecting only those nodes which contribute for at least
|threshold| (1.0 = 100%) w.r.t. the total allocation of the heap snapshot.
"""
assert(isinstance(nheap, native_heap.NativeHeap))
def RadixTreeInsert(node, path):
"""Inserts a string (path) into a radix tree (a python recursive dict).
e.g.: [/a/b/c, /a/b/d, /z/h] -> {'/a/b/': {'c': {}, 'd': {}}, '/z/h': {}}
"""
def GetCommonPrefix(args):
"""Returns the common prefix between two paths (no partial paths).
e.g.: /tmp/bar, /tmp/baz will return /tmp/ (and not /tmp/ba as the dumb
posixpath.commonprefix implementation would do)
"""
parts = posixpath.commonprefix(args).rpartition(posixpath.sep)[0]
return parts + posixpath.sep if parts else ''
for node_path in node.iterkeys():
pfx = GetCommonPrefix([node_path, path])
if not pfx:
continue
if len(pfx) < len(node_path):
node[pfx] = {node_path[len(pfx):] : node[node_path]}
del node[node_path]
if len(path) > len(pfx):
RadixTreeInsert(node[pfx], path[len(pfx):])
return
node[path] = {} # No common prefix, create new child in current node.
# Given an allocation of size N and its stack trace, heuristically determines
# the source directory to be blamed for the N bytes.
# The blamed_dir is the one which appears more times in the top 8 stack frames
# (excluding the first 2, which usually are just the (m|c)alloc call sites).
# At the end, this will generate a *leaderboard* (|blamed_dirs|) which
# associates, to each source path directory, the number of bytes allocated.
blamed_dirs = collections.Counter() # '/s/path' : bytes_from_this_path (int)
total_allocated = 0
for alloc in nheap.allocations:
dir_histogram = collections.Counter()
for frame in alloc.stack_trace.frames[2:10]:
# Compute a histogram (for each allocation) of the top source dirs.
if not frame.symbol or not frame.symbol.source_info:
continue
src_file = frame.symbol.source_info[0].source_file_path
src_dir = posixpath.dirname(src_file.replace('\\', '/')) + '/'
dir_histogram.update([src_dir])
if not dir_histogram:
continue
# Add the blamed dir to the leaderboard.
blamed_dir = dir_histogram.most_common()[0][0]
blamed_dirs.update({blamed_dir : alloc.total_size})
total_allocated += alloc.total_size
# Select only the top paths from the leaderboard which contribute for more
# than |threshold| and make a radix tree out of them.
radix_tree = {}
for blamed_dir, alloc_size in blamed_dirs.most_common():
if (1.0 * alloc_size / total_allocated) < threshold:
break
RadixTreeInsert(radix_tree, blamed_dir)
# The final step consists in generating a rule tree from the radix tree. This
# is a pretty straightforward tree-clone operation, they have the same shape.
def GenRulesFromRadixTree(radix_tree_node, max_depth, parent_path=''):
children = []
if max_depth > 0:
for node_path, node_children in radix_tree_node.iteritems():
child_rule = {
'name': node_path[-16:],
'source_path': '^' + re.escape(parent_path + node_path),
'children': GenRulesFromRadixTree(
node_children, max_depth - 1, parent_path + node_path)}
children += [child_rule]
return children
rules_tree = GenRulesFromRadixTree(radix_tree, max_depth)
return LoadRules(str(rules_tree))
class _NHeapRule(rules.Rule):
def __init__(self, name, filters):
super(_NHeapRule, self).__init__(name)
# The 'stacktrace' filter can be either a string (simple case, one regex) or
# a list of strings (complex case, see doc in the header of this file).
stacktrace_regexs = filters.get('stacktrace', [])
if isinstance(stacktrace_regexs, basestring):
stacktrace_regexs = [stacktrace_regexs]
self._stacktrace_regexs = []
for regex in stacktrace_regexs:
try:
self._stacktrace_regexs.append(re.compile(regex))
except re.error, descr:
raise exceptions.MemoryInspectorException(
'Stacktrace regex error "%s" : %s' % (regex, descr))
# The 'source_path' regex, instead, simply matches the source file path.
self._path_regex = None
path_regex = filters.get('source_path')
if path_regex:
try:
self._path_regex = re.compile(path_regex)
except re.error, descr:
raise exceptions.MemoryInspectorException(
'Path regex error "%s" : %s' % (path_regex, descr))
def Match(self, allocation):
# Match the source file path, if the 'source_path' filter is specified.
if self._path_regex:
path_matches = False
for frame in allocation.stack_trace.frames:
if frame.symbol and frame.symbol.source_info:
if self._path_regex.search(
frame.symbol.source_info[0].source_file_path):
path_matches = True
break
if not path_matches:
return False
# Match the stack traces symbols, if the 'stacktrace' filter is specified.
if not self._stacktrace_regexs:
return True
cur_regex_idx = 0
cur_regex = self._stacktrace_regexs[0]
for frame in allocation.stack_trace.frames:
if frame.symbol and cur_regex.search(frame.symbol.name):
# The current regex has been matched.
if cur_regex_idx == len(self._stacktrace_regexs) - 1:
return True # All the provided regexs have been matched, we're happy.
cur_regex_idx += 1
cur_regex = self._stacktrace_regexs[cur_regex_idx]
return False # Not all the provided regexs have been matched.
|
|
# coding: utf-8
from __future__ import unicode_literals
import itertools
import json
from .common import InfoExtractor
from ..compat import (
compat_HTTPError,
compat_str,
compat_urlparse,
)
from ..utils import (
clean_html,
ExtractorError,
int_or_none,
parse_age_limit,
parse_duration,
unified_timestamp,
)
class DramaFeverBaseIE(InfoExtractor):
_NETRC_MACHINE = 'dramafever'
_CONSUMER_SECRET = 'DA59dtVXYLxajktV'
_consumer_secret = None
def _get_consumer_secret(self):
mainjs = self._download_webpage(
'http://www.dramafever.com/static/51afe95/df2014/scripts/main.js',
None, 'Downloading main.js', fatal=False)
if not mainjs:
return self._CONSUMER_SECRET
return self._search_regex(
r"var\s+cs\s*=\s*'([^']+)'", mainjs,
'consumer secret', default=self._CONSUMER_SECRET)
def _real_initialize(self):
self._consumer_secret = self._get_consumer_secret()
self._login()
def _login(self):
(username, password) = self._get_login_info()
if username is None:
return
login_form = {
'username': username,
'password': password,
}
try:
response = self._download_json(
'https://www.dramafever.com/api/users/login', None, 'Logging in',
data=json.dumps(login_form).encode('utf-8'), headers={
'x-consumer-key': self._consumer_secret,
})
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code in (403, 404):
response = self._parse_json(
e.cause.read().decode('utf-8'), None)
else:
raise
# Successful login
if response.get('result') or response.get('guid') or response.get('user_guid'):
return
errors = response.get('errors')
if errors and isinstance(errors, list):
error = errors[0]
message = error.get('message') or error['reason']
raise ExtractorError('Unable to login: %s' % message, expected=True)
raise ExtractorError('Unable to log in')
class DramaFeverIE(DramaFeverBaseIE):
IE_NAME = 'dramafever'
_VALID_URL = r'https?://(?:www\.)?dramafever\.com/(?:[^/]+/)?drama/(?P<id>[0-9]+/[0-9]+)(?:/|$)'
_TESTS = [{
'url': 'https://www.dramafever.com/drama/4274/1/Heirs/',
'info_dict': {
'id': '4274.1',
'ext': 'wvm',
'title': 'Heirs - Episode 1',
'description': 'md5:362a24ba18209f6276e032a651c50bc2',
'thumbnail': r're:^https?://.*\.jpg',
'duration': 3783,
'timestamp': 1381354993,
'upload_date': '20131009',
'series': 'Heirs',
'season_number': 1,
'episode': 'Episode 1',
'episode_number': 1,
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'http://www.dramafever.com/drama/4826/4/Mnet_Asian_Music_Awards_2015/?ap=1',
'info_dict': {
'id': '4826.4',
'ext': 'flv',
'title': 'Mnet Asian Music Awards 2015',
'description': 'md5:3ff2ee8fedaef86e076791c909cf2e91',
'episode': 'Mnet Asian Music Awards 2015 - Part 3',
'episode_number': 4,
'thumbnail': r're:^https?://.*\.jpg',
'timestamp': 1450213200,
'upload_date': '20151215',
'duration': 5359,
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'https://www.dramafever.com/zh-cn/drama/4972/15/Doctor_Romantic/',
'only_matching': True,
}]
def _call_api(self, path, video_id, note, fatal=False):
return self._download_json(
'https://www.dramafever.com/api/5/' + path,
video_id, note=note, headers={
'x-consumer-key': self._consumer_secret,
}, fatal=fatal)
def _get_subtitles(self, video_id):
subtitles = {}
subs = self._call_api(
'video/%s/subtitles/webvtt/' % video_id, video_id,
'Downloading subtitles JSON', fatal=False)
if not subs or not isinstance(subs, list):
return subtitles
for sub in subs:
if not isinstance(sub, dict):
continue
sub_url = sub.get('url')
if not sub_url or not isinstance(sub_url, compat_str):
continue
subtitles.setdefault(
sub.get('code') or sub.get('language') or 'en', []).append({
'url': sub_url
})
return subtitles
def _real_extract(self, url):
video_id = self._match_id(url).replace('/', '.')
series_id, episode_number = video_id.split('.')
video = self._call_api(
'series/%s/episodes/%s/' % (series_id, episode_number), video_id,
'Downloading video JSON')
formats = []
download_assets = video.get('download_assets')
if download_assets and isinstance(download_assets, dict):
for format_id, format_dict in download_assets.items():
if not isinstance(format_dict, dict):
continue
format_url = format_dict.get('url')
if not format_url or not isinstance(format_url, compat_str):
continue
formats.append({
'url': format_url,
'format_id': format_id,
'filesize': int_or_none(video.get('filesize')),
})
stream = self._call_api(
'video/%s/stream/' % video_id, video_id, 'Downloading stream JSON',
fatal=False)
if stream:
stream_url = stream.get('stream_url')
if stream_url:
formats.extend(self._extract_m3u8_formats(
stream_url, video_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id='hls', fatal=False))
self._sort_formats(formats)
title = video.get('title') or 'Episode %s' % episode_number
description = video.get('description')
thumbnail = video.get('thumbnail')
timestamp = unified_timestamp(video.get('release_date'))
duration = parse_duration(video.get('duration'))
age_limit = parse_age_limit(video.get('tv_rating'))
series = video.get('series_title')
season_number = int_or_none(video.get('season'))
if series:
title = '%s - %s' % (series, title)
subtitles = self.extract_subtitles(video_id)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'timestamp': timestamp,
'age_limit': age_limit,
'series': series,
'season_number': season_number,
'episode_number': int_or_none(episode_number),
'formats': formats,
'subtitles': subtitles,
}
class DramaFeverSeriesIE(DramaFeverBaseIE):
IE_NAME = 'dramafever:series'
_VALID_URL = r'https?://(?:www\.)?dramafever\.com/(?:[^/]+/)?drama/(?P<id>[0-9]+)(?:/(?:(?!\d+(?:/|$)).+)?)?$'
_TESTS = [{
'url': 'http://www.dramafever.com/drama/4512/Cooking_with_Shin/',
'info_dict': {
'id': '4512',
'title': 'Cooking with Shin',
'description': 'md5:84a3f26e3cdc3fb7f500211b3593b5c1',
},
'playlist_count': 4,
}, {
'url': 'http://www.dramafever.com/drama/124/IRIS/',
'info_dict': {
'id': '124',
'title': 'IRIS',
'description': 'md5:b3a30e587cf20c59bd1c01ec0ee1b862',
},
'playlist_count': 20,
}]
_PAGE_SIZE = 60 # max is 60 (see http://api.drama9.com/#get--api-4-episode-series-)
def _real_extract(self, url):
series_id = self._match_id(url)
series = self._download_json(
'http://www.dramafever.com/api/4/series/query/?cs=%s&series_id=%s'
% (self._consumer_secret, series_id),
series_id, 'Downloading series JSON')['series'][series_id]
title = clean_html(series['name'])
description = clean_html(series.get('description') or series.get('description_short'))
entries = []
for page_num in itertools.count(1):
episodes = self._download_json(
'http://www.dramafever.com/api/4/episode/series/?cs=%s&series_id=%s&page_size=%d&page_number=%d'
% (self._consumer_secret, series_id, self._PAGE_SIZE, page_num),
series_id, 'Downloading episodes JSON page #%d' % page_num)
for episode in episodes.get('value', []):
episode_url = episode.get('episode_url')
if not episode_url:
continue
entries.append(self.url_result(
compat_urlparse.urljoin(url, episode_url),
'DramaFever', episode.get('guid')))
if page_num == episodes['num_pages']:
break
return self.playlist_result(entries, series_id, title, description)
|
|
"""Test losses"""
import datetime
import numpy as np
import pandas as pd
from .conftest import assert_series_equal
from pvlib.soiling import hsu, kimber
from pvlib.iotools import read_tmy3
from .conftest import DATA_DIR
import pytest
@pytest.fixture
def expected_output():
# Sample output (calculated manually)
dt = pd.date_range(start=pd.Timestamp(2019, 1, 1, 0, 0, 0),
end=pd.Timestamp(2019, 1, 1, 23, 59, 0), freq='1h')
expected_no_cleaning = pd.Series(
data=[0.96998483, 0.94623958, 0.92468139, 0.90465654, 0.88589707,
0.86826366, 0.85167258, 0.83606715, 0.82140458, 0.80764919,
0.79476875, 0.78273241, 0.77150951, 0.76106905, 0.75137932,
0.74240789, 0.73412165, 0.72648695, 0.71946981, 0.7130361,
0.70715176, 0.70178307, 0.69689677, 0.69246034],
index=dt)
return expected_no_cleaning
@pytest.fixture
def expected_output_1():
dt = pd.date_range(start=pd.Timestamp(2019, 1, 1, 0, 0, 0),
end=pd.Timestamp(2019, 1, 1, 23, 59, 0), freq='1h')
expected_output_1 = pd.Series(
data=[0.98484972, 0.97277367, 0.96167471, 0.95119603, 1.,
0.98484972, 0.97277367, 0.96167471, 1., 1.,
0.98484972, 0.97277367, 0.96167471, 0.95119603, 0.94118234,
0.93154854, 0.922242, 0.91322759, 0.90448058, 0.89598283,
0.88772062, 0.87968325, 0.8718622, 0.86425049],
index=dt)
return expected_output_1
@pytest.fixture
def expected_output_2():
dt = pd.date_range(start=pd.Timestamp(2019, 1, 1, 0, 0, 0),
end=pd.Timestamp(2019, 1, 1, 23, 59, 0), freq='1h')
expected_output_2 = pd.Series(
data=[0.95036261, 0.91178179, 0.87774818, 0.84732079, 1.,
1., 1., 0.95036261, 1., 1.,
1., 1., 0.95036261, 0.91178179, 0.87774818,
0.84732079, 0.8201171, 1., 1., 1.,
1., 0.95036261, 0.91178179, 0.87774818],
index=dt)
return expected_output_2
@pytest.fixture
def expected_output_3():
dt = pd.date_range(start=pd.Timestamp(2019, 1, 1, 0, 0, 0),
end=pd.Timestamp(2019, 1, 1, 23, 59, 0), freq='1h')
timedelta = [0, 0, 0, 0, 0, 30, 0, 30, 0, 30, 0, -30,
-30, -30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
dt_new = dt + pd.to_timedelta(timedelta, 'm')
expected_output_3 = pd.Series(
data=[0.96576705, 0.9387675, 0.91437615, 0.89186852, 1.,
1., 0.98093819, 0.9387675, 1., 1.,
1., 1., 0.96576705, 0.9387675, 0.90291005,
0.88122293, 0.86104089, 1., 1., 1.,
0.96576705, 0.9387675, 0.91437615, 0.89186852],
index=dt_new)
return expected_output_3
@pytest.fixture
def rainfall_input():
dt = pd.date_range(start=pd.Timestamp(2019, 1, 1, 0, 0, 0),
end=pd.Timestamp(2019, 1, 1, 23, 59, 0), freq='1h')
rainfall = pd.Series(
data=[0., 0., 0., 0., 1., 0., 0., 0., 0.5, 0.5, 0., 0., 0., 0., 0.,
0., 0.3, 0.3, 0.3, 0.3, 0., 0., 0., 0.], index=dt)
return rainfall
def test_hsu_no_cleaning(rainfall_input, expected_output):
"""Test Soiling HSU function"""
rainfall = rainfall_input
pm2_5 = 1.0
pm10 = 2.0
depo_veloc = {'2_5': 1.0e-5, '10': 1.0e-4}
tilt = 0.
expected_no_cleaning = expected_output
result = hsu(rainfall=rainfall, cleaning_threshold=10., tilt=tilt,
pm2_5=pm2_5, pm10=pm10, depo_veloc=depo_veloc,
rain_accum_period=pd.Timedelta('1h'))
assert_series_equal(result, expected_no_cleaning)
def test_hsu(rainfall_input, expected_output_2):
"""Test Soiling HSU function with cleanings"""
rainfall = rainfall_input
pm2_5 = 1.0
pm10 = 2.0
depo_veloc = {'2_5': 1.0e-4, '10': 1.0e-4}
tilt = 0.
# three cleaning events at 4:00-6:00, 8:00-11:00, and 17:00-20:00
result = hsu(rainfall=rainfall, cleaning_threshold=0.5, tilt=tilt,
pm2_5=pm2_5, pm10=pm10, depo_veloc=depo_veloc,
rain_accum_period=pd.Timedelta('3h'))
assert_series_equal(result, expected_output_2)
def test_hsu_defaults(rainfall_input, expected_output_1):
"""
Test Soiling HSU function with default deposition velocity and default rain
accumulation period.
"""
result = hsu(rainfall=rainfall_input, cleaning_threshold=0.5, tilt=0.0,
pm2_5=1.0e-2, pm10=2.0e-2)
assert np.allclose(result.values, expected_output_1)
def test_hsu_variable_time_intervals(rainfall_input, expected_output_3):
"""
Test Soiling HSU function with variable time intervals.
"""
depo_veloc = {'2_5': 1.0e-4, '10': 1.0e-4}
rain = pd.DataFrame(data=rainfall_input)
# define time deltas in minutes
timedelta = [0, 0, 0, 0, 0, 30, 0, 30, 0, 30, 0, -30,
-30, -30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
rain['mins_added'] = pd.to_timedelta(timedelta, 'm')
rain['new_time'] = rain.index + rain['mins_added']
rain_var_times = rain.set_index('new_time').iloc[:, 0]
result = hsu(
rainfall=rain_var_times, cleaning_threshold=0.5, tilt=50.0,
pm2_5=1, pm10=2, depo_veloc=depo_veloc,
rain_accum_period=pd.Timedelta('2h'))
assert np.allclose(result, expected_output_3)
@pytest.fixture
def greensboro_rain():
# get TMY3 data with rain
greensboro, _ = read_tmy3(DATA_DIR / '723170TYA.CSV', coerce_year=1990)
return greensboro.Lprecipdepth
@pytest.fixture
def expected_kimber_nowash():
return pd.read_csv(
DATA_DIR / 'greensboro_kimber_soil_nowash.dat',
parse_dates=True, index_col='timestamp')
def test_kimber_nowash(greensboro_rain, expected_kimber_nowash):
"""Test Kimber soiling model with no manual washes"""
# Greensboro typical expected annual rainfall is 8345mm
assert greensboro_rain.sum() == 8345
# calculate soiling with no wash dates
nowash = kimber(greensboro_rain)
# test no washes
assert np.allclose(nowash.values, expected_kimber_nowash['soiling'].values)
@pytest.fixture
def expected_kimber_manwash():
return pd.read_csv(
DATA_DIR / 'greensboro_kimber_soil_manwash.dat',
parse_dates=True, index_col='timestamp')
def test_kimber_manwash(greensboro_rain, expected_kimber_manwash):
"""Test Kimber soiling model with a manual wash"""
# a manual wash date
manwash = [datetime.date(1990, 2, 15), ]
# calculate soiling with manual wash
manwash = kimber(greensboro_rain, manual_wash_dates=manwash)
# test manual wash
assert np.allclose(
manwash.values,
expected_kimber_manwash['soiling'].values)
@pytest.fixture
def expected_kimber_norain():
# expected soiling reaches maximum
soiling_loss_rate = 0.0015
max_loss_rate = 0.3
norain = np.ones(8760) * soiling_loss_rate/24
norain[0] = 0.0
norain = np.cumsum(norain)
return np.where(norain > max_loss_rate, max_loss_rate, norain)
def test_kimber_norain(greensboro_rain, expected_kimber_norain):
"""Test Kimber soiling model with no rain"""
# a year with no rain
norain = pd.Series(0, index=greensboro_rain.index)
# calculate soiling with no rain
norain = kimber(norain)
# test no rain, soiling reaches maximum
assert np.allclose(norain.values, expected_kimber_norain)
@pytest.fixture
def expected_kimber_initial_soil():
# expected soiling reaches maximum
soiling_loss_rate = 0.0015
max_loss_rate = 0.3
norain = np.ones(8760) * soiling_loss_rate/24
norain[0] = 0.1
norain = np.cumsum(norain)
return np.where(norain > max_loss_rate, max_loss_rate, norain)
def test_kimber_initial_soil(greensboro_rain, expected_kimber_initial_soil):
"""Test Kimber soiling model with initial soiling"""
# a year with no rain
norain = pd.Series(0, index=greensboro_rain.index)
# calculate soiling with no rain
norain = kimber(norain, initial_soiling=0.1)
# test no rain, soiling reaches maximum
assert np.allclose(norain.values, expected_kimber_initial_soil)
|
|
"""Support for interacting with Spotify Connect."""
from asyncio import run_coroutine_threadsafe
import datetime as dt
from datetime import timedelta
import logging
from typing import Any, Callable, Dict, List, Optional
from aiohttp import ClientError
from spotipy import Spotify, SpotifyException
from yarl import URL
from homeassistant.components.media_player import BrowseMedia, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_CLASS_ALBUM,
MEDIA_CLASS_ARTIST,
MEDIA_CLASS_DIRECTORY,
MEDIA_CLASS_EPISODE,
MEDIA_CLASS_GENRE,
MEDIA_CLASS_PLAYLIST,
MEDIA_CLASS_PODCAST,
MEDIA_CLASS_TRACK,
MEDIA_TYPE_ALBUM,
MEDIA_TYPE_ARTIST,
MEDIA_TYPE_EPISODE,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_PLAYLIST,
MEDIA_TYPE_TRACK,
SUPPORT_BROWSE_MEDIA,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_SELECT_SOURCE,
SUPPORT_SHUFFLE_SET,
SUPPORT_VOLUME_SET,
)
from homeassistant.components.media_player.errors import BrowseError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_ID,
CONF_NAME,
STATE_IDLE,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
from homeassistant.helpers.entity import Entity
from homeassistant.util.dt import utc_from_timestamp
from .const import (
DATA_SPOTIFY_CLIENT,
DATA_SPOTIFY_ME,
DATA_SPOTIFY_SESSION,
DOMAIN,
SPOTIFY_SCOPES,
)
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:spotify"
SCAN_INTERVAL = timedelta(seconds=30)
SUPPORT_SPOTIFY = (
SUPPORT_BROWSE_MEDIA
| SUPPORT_NEXT_TRACK
| SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_SEEK
| SUPPORT_SELECT_SOURCE
| SUPPORT_SHUFFLE_SET
| SUPPORT_VOLUME_SET
)
BROWSE_LIMIT = 48
MEDIA_TYPE_SHOW = "show"
PLAYABLE_MEDIA_TYPES = [
MEDIA_TYPE_PLAYLIST,
MEDIA_TYPE_ALBUM,
MEDIA_TYPE_ARTIST,
MEDIA_TYPE_EPISODE,
MEDIA_TYPE_SHOW,
MEDIA_TYPE_TRACK,
]
LIBRARY_MAP = {
"current_user_playlists": "Playlists",
"current_user_followed_artists": "Artists",
"current_user_saved_albums": "Albums",
"current_user_saved_tracks": "Tracks",
"current_user_saved_shows": "Podcasts",
"current_user_recently_played": "Recently played",
"current_user_top_artists": "Top Artists",
"current_user_top_tracks": "Top Tracks",
"categories": "Categories",
"featured_playlists": "Featured Playlists",
"new_releases": "New Releases",
}
CONTENT_TYPE_MEDIA_CLASS = {
"current_user_playlists": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_PLAYLIST,
},
"current_user_followed_artists": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_ARTIST,
},
"current_user_saved_albums": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_ALBUM,
},
"current_user_saved_tracks": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_TRACK,
},
"current_user_saved_shows": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_PODCAST,
},
"current_user_recently_played": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_TRACK,
},
"current_user_top_artists": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_ARTIST,
},
"current_user_top_tracks": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_TRACK,
},
"featured_playlists": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_PLAYLIST,
},
"categories": {"parent": MEDIA_CLASS_DIRECTORY, "children": MEDIA_CLASS_GENRE},
"category_playlists": {
"parent": MEDIA_CLASS_DIRECTORY,
"children": MEDIA_CLASS_PLAYLIST,
},
"new_releases": {"parent": MEDIA_CLASS_DIRECTORY, "children": MEDIA_CLASS_ALBUM},
MEDIA_TYPE_PLAYLIST: {
"parent": MEDIA_CLASS_PLAYLIST,
"children": MEDIA_CLASS_TRACK,
},
MEDIA_TYPE_ALBUM: {"parent": MEDIA_CLASS_ALBUM, "children": MEDIA_CLASS_TRACK},
MEDIA_TYPE_ARTIST: {"parent": MEDIA_CLASS_ARTIST, "children": MEDIA_CLASS_ALBUM},
MEDIA_TYPE_EPISODE: {"parent": MEDIA_CLASS_EPISODE, "children": None},
MEDIA_TYPE_SHOW: {"parent": MEDIA_CLASS_PODCAST, "children": MEDIA_CLASS_EPISODE},
MEDIA_TYPE_TRACK: {"parent": MEDIA_CLASS_TRACK, "children": None},
}
class MissingMediaInformation(BrowseError):
"""Missing media required information."""
class UnknownMediaType(BrowseError):
"""Unknown media type."""
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up Spotify based on a config entry."""
spotify = SpotifyMediaPlayer(
hass.data[DOMAIN][entry.entry_id][DATA_SPOTIFY_SESSION],
hass.data[DOMAIN][entry.entry_id][DATA_SPOTIFY_CLIENT],
hass.data[DOMAIN][entry.entry_id][DATA_SPOTIFY_ME],
entry.data[CONF_ID],
entry.data[CONF_NAME],
)
async_add_entities([spotify], True)
def spotify_exception_handler(func):
"""Decorate Spotify calls to handle Spotify exception.
A decorator that wraps the passed in function, catches Spotify errors,
aiohttp exceptions and handles the availability of the media player.
"""
def wrapper(self, *args, **kwargs):
try:
result = func(self, *args, **kwargs)
self.player_available = True
return result
except (SpotifyException, ClientError):
self.player_available = False
return wrapper
class SpotifyMediaPlayer(MediaPlayerEntity):
"""Representation of a Spotify controller."""
def __init__(
self,
session: OAuth2Session,
spotify: Spotify,
me: dict,
user_id: str,
name: str,
):
"""Initialize."""
self._id = user_id
self._me = me
self._name = f"Spotify {name}"
self._session = session
self._spotify = spotify
self._scope_ok = set(session.token["scope"].split(" ")) == set(SPOTIFY_SCOPES)
self._currently_playing: Optional[dict] = {}
self._devices: Optional[List[dict]] = []
self._playlist: Optional[dict] = None
self._spotify: Spotify = None
self.player_available = False
@property
def name(self) -> str:
"""Return the name."""
return self._name
@property
def icon(self) -> str:
"""Return the icon."""
return ICON
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self.player_available
@property
def unique_id(self) -> str:
"""Return the unique ID."""
return self._id
@property
def device_info(self) -> Dict[str, Any]:
"""Return device information about this entity."""
if self._me is not None:
model = self._me["product"]
return {
"identifiers": {(DOMAIN, self._id)},
"manufacturer": "Spotify AB",
"model": f"Spotify {model}".rstrip(),
"name": self._name,
}
@property
def state(self) -> Optional[str]:
"""Return the playback state."""
if not self._currently_playing:
return STATE_IDLE
if self._currently_playing["is_playing"]:
return STATE_PLAYING
return STATE_PAUSED
@property
def volume_level(self) -> Optional[float]:
"""Return the device volume."""
return self._currently_playing.get("device", {}).get("volume_percent", 0) / 100
@property
def media_content_id(self) -> Optional[str]:
"""Return the media URL."""
item = self._currently_playing.get("item") or {}
return item.get("uri")
@property
def media_content_type(self) -> Optional[str]:
"""Return the media type."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self) -> Optional[int]:
"""Duration of current playing media in seconds."""
if self._currently_playing.get("item") is None:
return None
return self._currently_playing["item"]["duration_ms"] / 1000
@property
def media_position(self) -> Optional[str]:
"""Position of current playing media in seconds."""
if not self._currently_playing:
return None
return self._currently_playing["progress_ms"] / 1000
@property
def media_position_updated_at(self) -> Optional[dt.datetime]:
"""When was the position of the current playing media valid."""
if not self._currently_playing:
return None
return utc_from_timestamp(self._currently_playing["timestamp"] / 1000)
@property
def media_image_url(self) -> Optional[str]:
"""Return the media image URL."""
if (
self._currently_playing.get("item") is None
or not self._currently_playing["item"]["album"]["images"]
):
return None
return fetch_image_url(self._currently_playing["item"]["album"])
@property
def media_image_remotely_accessible(self) -> bool:
"""If the image url is remotely accessible."""
return False
@property
def media_title(self) -> Optional[str]:
"""Return the media title."""
item = self._currently_playing.get("item") or {}
return item.get("name")
@property
def media_artist(self) -> Optional[str]:
"""Return the media artist."""
if self._currently_playing.get("item") is None:
return None
return ", ".join(
[artist["name"] for artist in self._currently_playing["item"]["artists"]]
)
@property
def media_album_name(self) -> Optional[str]:
"""Return the media album."""
if self._currently_playing.get("item") is None:
return None
return self._currently_playing["item"]["album"]["name"]
@property
def media_track(self) -> Optional[int]:
"""Track number of current playing media, music track only."""
item = self._currently_playing.get("item") or {}
return item.get("track_number")
@property
def media_playlist(self):
"""Title of Playlist currently playing."""
if self._playlist is None:
return None
return self._playlist["name"]
@property
def source(self) -> Optional[str]:
"""Return the current playback device."""
return self._currently_playing.get("device", {}).get("name")
@property
def source_list(self) -> Optional[List[str]]:
"""Return a list of source devices."""
if not self._devices:
return None
return [device["name"] for device in self._devices]
@property
def shuffle(self) -> bool:
"""Shuffling state."""
return bool(self._currently_playing.get("shuffle_state"))
@property
def supported_features(self) -> int:
"""Return the media player features that are supported."""
if self._me["product"] != "premium":
return 0
return SUPPORT_SPOTIFY
@spotify_exception_handler
def set_volume_level(self, volume: int) -> None:
"""Set the volume level."""
self._spotify.volume(int(volume * 100))
@spotify_exception_handler
def media_play(self) -> None:
"""Start or resume playback."""
self._spotify.start_playback()
@spotify_exception_handler
def media_pause(self) -> None:
"""Pause playback."""
self._spotify.pause_playback()
@spotify_exception_handler
def media_previous_track(self) -> None:
"""Skip to previous track."""
self._spotify.previous_track()
@spotify_exception_handler
def media_next_track(self) -> None:
"""Skip to next track."""
self._spotify.next_track()
@spotify_exception_handler
def media_seek(self, position):
"""Send seek command."""
self._spotify.seek_track(int(position * 1000))
@spotify_exception_handler
def play_media(self, media_type: str, media_id: str, **kwargs) -> None:
"""Play media."""
kwargs = {}
# Spotify can't handle URI's with query strings or anchors
# Yet, they do generate those types of URI in their official clients.
media_id = str(URL(media_id).with_query(None).with_fragment(None))
if media_type in (MEDIA_TYPE_TRACK, MEDIA_TYPE_EPISODE, MEDIA_TYPE_MUSIC):
kwargs["uris"] = [media_id]
elif media_type in PLAYABLE_MEDIA_TYPES:
kwargs["context_uri"] = media_id
else:
_LOGGER.error("Media type %s is not supported", media_type)
return
if not self._currently_playing.get("device") and self._devices:
kwargs["device_id"] = self._devices[0].get("id")
self._spotify.start_playback(**kwargs)
@spotify_exception_handler
def select_source(self, source: str) -> None:
"""Select playback device."""
for device in self._devices:
if device["name"] == source:
self._spotify.transfer_playback(
device["id"], self.state == STATE_PLAYING
)
return
@spotify_exception_handler
def set_shuffle(self, shuffle: bool) -> None:
"""Enable/Disable shuffle mode."""
self._spotify.shuffle(shuffle)
@spotify_exception_handler
def update(self) -> None:
"""Update state and attributes."""
if not self.enabled:
return
if not self._session.valid_token or self._spotify is None:
run_coroutine_threadsafe(
self._session.async_ensure_token_valid(), self.hass.loop
).result()
self._spotify = Spotify(auth=self._session.token["access_token"])
current = self._spotify.current_playback()
self._currently_playing = current or {}
self._playlist = None
context = self._currently_playing.get("context")
if context is not None and context["type"] == MEDIA_TYPE_PLAYLIST:
self._playlist = self._spotify.playlist(current["context"]["uri"])
devices = self._spotify.devices() or {}
self._devices = devices.get("devices", [])
async def async_browse_media(self, media_content_type=None, media_content_id=None):
"""Implement the websocket media browsing helper."""
if not self._scope_ok:
raise NotImplementedError
if media_content_type in [None, "library"]:
return await self.hass.async_add_executor_job(library_payload)
payload = {
"media_content_type": media_content_type,
"media_content_id": media_content_id,
}
response = await self.hass.async_add_executor_job(
build_item_response, self._spotify, self._me, payload
)
if response is None:
raise BrowseError(
f"Media not found: {media_content_type} / {media_content_id}"
)
return response
def build_item_response(spotify, user, payload):
"""Create response payload for the provided media query."""
media_content_type = payload["media_content_type"]
media_content_id = payload["media_content_id"]
title = None
image = None
if media_content_type == "current_user_playlists":
media = spotify.current_user_playlists(limit=BROWSE_LIMIT)
items = media.get("items", [])
elif media_content_type == "current_user_followed_artists":
media = spotify.current_user_followed_artists(limit=BROWSE_LIMIT)
items = media.get("artists", {}).get("items", [])
elif media_content_type == "current_user_saved_albums":
media = spotify.current_user_saved_albums(limit=BROWSE_LIMIT)
items = [item["album"] for item in media.get("items", [])]
elif media_content_type == "current_user_saved_tracks":
media = spotify.current_user_saved_tracks(limit=BROWSE_LIMIT)
items = [item["track"] for item in media.get("items", [])]
elif media_content_type == "current_user_saved_shows":
media = spotify.current_user_saved_shows(limit=BROWSE_LIMIT)
items = [item["show"] for item in media.get("items", [])]
elif media_content_type == "current_user_recently_played":
media = spotify.current_user_recently_played(limit=BROWSE_LIMIT)
items = [item["track"] for item in media.get("items", [])]
elif media_content_type == "current_user_top_artists":
media = spotify.current_user_top_artists(limit=BROWSE_LIMIT)
items = media.get("items", [])
elif media_content_type == "current_user_top_tracks":
media = spotify.current_user_top_tracks(limit=BROWSE_LIMIT)
items = media.get("items", [])
elif media_content_type == "featured_playlists":
media = spotify.featured_playlists(country=user["country"], limit=BROWSE_LIMIT)
items = media.get("playlists", {}).get("items", [])
elif media_content_type == "categories":
media = spotify.categories(country=user["country"], limit=BROWSE_LIMIT)
items = media.get("categories", {}).get("items", [])
elif media_content_type == "category_playlists":
media = spotify.category_playlists(
category_id=media_content_id,
country=user["country"],
limit=BROWSE_LIMIT,
)
category = spotify.category(media_content_id, country=user["country"])
title = category.get("name")
image = fetch_image_url(category, key="icons")
items = media.get("playlists", {}).get("items", [])
elif media_content_type == "new_releases":
media = spotify.new_releases(country=user["country"], limit=BROWSE_LIMIT)
items = media.get("albums", {}).get("items", [])
elif media_content_type == MEDIA_TYPE_PLAYLIST:
media = spotify.playlist(media_content_id)
items = [item["track"] for item in media.get("tracks", {}).get("items", [])]
elif media_content_type == MEDIA_TYPE_ALBUM:
media = spotify.album(media_content_id)
items = media.get("tracks", {}).get("items", [])
elif media_content_type == MEDIA_TYPE_ARTIST:
media = spotify.artist_albums(media_content_id, limit=BROWSE_LIMIT)
artist = spotify.artist(media_content_id)
title = artist.get("name")
image = fetch_image_url(artist)
items = media.get("items", [])
elif media_content_type == MEDIA_TYPE_SHOW:
media = spotify.show_episodes(media_content_id, limit=BROWSE_LIMIT)
show = spotify.show(media_content_id)
title = show.get("name")
image = fetch_image_url(show)
items = media.get("items", [])
else:
media = None
items = []
if media is None:
return None
try:
media_class = CONTENT_TYPE_MEDIA_CLASS[media_content_type]
except KeyError:
_LOGGER.debug("Unknown media type received: %s", media_content_type)
return None
if media_content_type == "categories":
media_item = BrowseMedia(
title=LIBRARY_MAP.get(media_content_id),
media_class=media_class["parent"],
children_media_class=media_class["children"],
media_content_id=media_content_id,
media_content_type=media_content_type,
can_play=False,
can_expand=True,
children=[],
)
for item in items:
try:
item_id = item["id"]
except KeyError:
_LOGGER.debug("Missing id for media item: %s", item)
continue
media_item.children.append(
BrowseMedia(
title=item.get("name"),
media_class=MEDIA_CLASS_PLAYLIST,
children_media_class=MEDIA_CLASS_TRACK,
media_content_id=item_id,
media_content_type="category_playlists",
thumbnail=fetch_image_url(item, key="icons"),
can_play=False,
can_expand=True,
)
)
return media_item
if title is None:
if "name" in media:
title = media.get("name")
else:
title = LIBRARY_MAP.get(payload["media_content_id"])
params = {
"title": title,
"media_class": media_class["parent"],
"children_media_class": media_class["children"],
"media_content_id": media_content_id,
"media_content_type": media_content_type,
"can_play": media_content_type in PLAYABLE_MEDIA_TYPES,
"children": [],
"can_expand": True,
}
for item in items:
try:
params["children"].append(item_payload(item))
except (MissingMediaInformation, UnknownMediaType):
continue
if "images" in media:
params["thumbnail"] = fetch_image_url(media)
elif image:
params["thumbnail"] = image
return BrowseMedia(**params)
def item_payload(item):
"""
Create response payload for a single media item.
Used by async_browse_media.
"""
try:
media_type = item["type"]
media_id = item["uri"]
except KeyError as err:
_LOGGER.debug("Missing type or uri for media item: %s", item)
raise MissingMediaInformation from err
try:
media_class = CONTENT_TYPE_MEDIA_CLASS[media_type]
except KeyError as err:
_LOGGER.debug("Unknown media type received: %s", media_type)
raise UnknownMediaType from err
can_expand = media_type not in [
MEDIA_TYPE_TRACK,
MEDIA_TYPE_EPISODE,
]
payload = {
"title": item.get("name"),
"media_class": media_class["parent"],
"children_media_class": media_class["children"],
"media_content_id": media_id,
"media_content_type": media_type,
"can_play": media_type in PLAYABLE_MEDIA_TYPES,
"can_expand": can_expand,
}
if "images" in item:
payload["thumbnail"] = fetch_image_url(item)
elif MEDIA_TYPE_ALBUM in item:
payload["thumbnail"] = fetch_image_url(item[MEDIA_TYPE_ALBUM])
return BrowseMedia(**payload)
def library_payload():
"""
Create response payload to describe contents of a specific library.
Used by async_browse_media.
"""
library_info = {
"title": "Media Library",
"media_class": MEDIA_CLASS_DIRECTORY,
"media_content_id": "library",
"media_content_type": "library",
"can_play": False,
"can_expand": True,
"children": [],
}
for item in [{"name": n, "type": t} for t, n in LIBRARY_MAP.items()]:
library_info["children"].append(
item_payload(
{"name": item["name"], "type": item["type"], "uri": item["type"]}
)
)
response = BrowseMedia(**library_info)
response.children_media_class = MEDIA_CLASS_DIRECTORY
return response
def fetch_image_url(item, key="images"):
"""Fetch image url."""
try:
return item.get(key, [])[0].get("url")
except IndexError:
return None
|
|
import binascii
import _dynStruct
import capstone
unsigned_int_instr = [capstone.x86.X86_INS_ADCX, capstone.x86.X86_INS_ADOX,
capstone.x86.X86_INS_DIV, capstone.x86.X86_INS_MUL,
capstone.x86.X86_INS_MULX]
xmm_regs = [xmm for xmm in range(capstone.x86.X86_REG_XMM0 - 1, capstone.x86.X86_REG_XMM31)]
class Access:
def __init__(self, access, orig, addr_start, block, t):
self.block = block
self.offset = access
self.addr = addr_start + self.offset
self.size = orig["size_access"]
self.t = t
if len(orig["opcode"]) % 2:
orig["opcode"] = "0" + orig["opcode"]
self.instr_op = orig["opcode"]
if orig["ctx_opcode"] and len(orig["ctx_opcode"]) % 2:
orig["ctx_opcode"] = "0" + orig["ctx_opcode"]
self.ctx_opcode = orig["ctx_opcode"]
json_attrib = ["nb_access", "pc", "func_pc",
"func_sym", "func_module", "ctx_addr"]
for k in json_attrib:
setattr(self, k, (orig[k]))
self.disass()
self.instr_display = '<span class="text-success"><strong>%s</strong>\
</span><span class="text-info">%s</span>' % (self.instr.mnemonic,
self.instr.op_str)
self.instr_search = '%s %s' % (self.instr.mnemonic, self.instr.op_str)
if self.ctx_opcode:
if self.ctx_addr > self.pc:
self.ctx_instr_display = "Next : "
else:
self.ctx_instr_display = "Prev : "
self.ctx_instr_display += '<span class="text-success"><strong>%s</strong>\
</span><span class="text-info">%s</span>' % (self.ctx_instr.mnemonic,
self.ctx_instr.op_str)
self.ctx_instr_search = '%s %s' % (self.ctx_instr.mnemonic, self.ctx_instr.op_str)
else:
self.ctx_instr_search = 'No context'
self.ctx_instr_display = '<span class="text-danger">No context</span>'
def is_offset(self, offset):
return self.offset == offset
def is_in_range(self, start, end):
if self.offset >= start and self.offset < end:
return True
if self.offset < start and self.offset + self.size >start:
return True
return False
def in_member(self, member):
if self.is_offset(member.offset):
return True
if self.offset >= member.offset and\
self.offset < member.offset + member.size:
return True
return False
def disass(self):
if not _dynStruct.disasm:
_dynStruct.create_disasm()
if not hasattr(self, 'instr'):
self.instr = [instr for instr in
_dynStruct.disasm.disasm(binascii.unhexlify(self.instr_op),
self.pc)][0]
if self.ctx_opcode:
self.ctx_instr = [instr for instr in
_dynStruct.disasm.disasm(binascii.unhexlify(self.ctx_opcode),
self.ctx_addr)][0]
def analyse_ctx(self, size):
#TODO extend analyse to other instruction and
# SSEX, AVX and other intel extension
if not hasattr(self, 'instr'):
self.disass()
if self.t == 'write':
# Detect if the written val is the result from a floating point register
if self.instr.mnemonic.startswith('mov'):
src_op = self.instr.operands[1]
if src_op.type == capstone.x86.X86_OP_FP or\
(src_op.type == capstone.x86.X86_OP_REG and src_op.reg in xmm_regs):
if size == 4:
return _dynStruct.float_str
elif size == 8:
return _dynStruct.double_str
else:
return None
elif self.ctx_opcode and self.ctx_instr.mnemonic.startswith('mov'):
dest_ctx_reg = self.ctx_instr.operands[0].reg
src_ctx_op = self.ctx_instr.operands[1]
if self.instr.operands[1].reg == dest_ctx_reg and\
src_ctx_op.type == capstone.x86.X86_OP_REG and src_ctx_op.reg in xmm_regs:
if size == 4:
return _dynStruct.float_str
elif size == 8:
return _dynStruct.double_str
else:
return None
# Next analysis need a ctx_instr
if not self.ctx_opcode:
return None
# detect ptr if ctx = lea and instr = mov with the reg value
# get from lea. If yes it's a ptr
if self.ctx_instr.id == capstone.x86.X86_INS_LEA:
dest_reg = self.ctx_instr.operands[0].reg
if self.instr.mnemonic.startswith('mov') and\
self.instr.op_find(capstone.x86.X86_OP_REG, 1) and\
self.instr.op_find(capstone.x86.X86_OP_REG, 1).reg == dest_reg:
# if ptr is on the same memory page than rip/eip it's a func ptr
op_src = self.ctx_instr.operands[1]
if op_src.type == capstone.x86.X86_OP_MEM:
if op_src.mem.base in [capstone.x86.X86_REG_RIP,
capstone.x86.X86_REG_EIP]:
if op_src.mem.index == 0 and\
int((op_src.mem.disp + self.instr.address) / 4096)\
== int(self.instr.address / 4096):
return _dynStruct.ptr_func_str
# if not it's just a ptr because we cannot have more information
return _dynStruct.ptr_str
# when the mov is an imm value on the same page than rip => func_ptr
if self.instr.mnemonic.startswith('mov') and\
self.instr.op_find(capstone.x86.X86_OP_IMM, 1) and\
size == _dynStruct.bits / 8:
if int(self.instr.address / 4096) ==\
int(self.instr.operands[1].imm / 4096):
return _dynStruct.ptr_func_str
# detecting if signed or unsigned
if self.instr.mnemonic.startswith('mov') and len(self.ctx_instr.operands) == 2:
dest_ctx_op = self.ctx_instr.operands[0]
src_op = self.instr.operands[1]
if dest_ctx_op.type == capstone.x86.X86_OP_REG and\
src_op.type == capstone.x86.X86_OP_REG and\
src_op.reg == dest_ctx_op.reg:
if self.instr.id in unsigned_int_instr:
return _dynStruct.unsigned_str % (size)
# For read access we can only detect ptr because a use of the value read
# Basically member is pointer if the value read is dereferenced
else:
if self.instr.id == capstone.x86.X86_INS_CALL:
return _dynStruct.ptr_func_str
# For other instruction we need context to perform the analysis
if not self.ctx_instr:
return None
if not self.instr.mnemonic.startswith('mov'):
return None
# usually if the value is used later (not just a copy) the value
# is load into a register
dest_op = self.instr.operands[0]
if dest_op.type == capstone.x86.X86_OP_REG:
# if the register is an xmm register, the value is a floating
# point
if dest_op.reg in xmm_regs:
if size == 4:
return _dynStruct.float_str
elif size == 8:
return _dynStruct.double_str
else:
return None
# if the context instr is a call using the previously right
# reg, the value is a ptr to func
if self.ctx_instr.id == capstone.x86.X86_INS_CALL and\
self.ctx_instr.operands[0].type == capstone.x86.X86_INS_CALL and\
self.ctx_instr.operands[0].reg == dest_op.reg:
return _dynStruct.ptr_func_str
for ctx_src_op in self.ctx_instr.operands:
# if it's a mov with just base + disp and base == written register
# it's likely to be a ptr sur struct or array
if ctx_src_op.type == capstone.x86.X86_OP_MEM and\
ctx_src_op.mem.base == dest_op.reg:
# if disp != 0 it's certainly a struct ptr
if ctx_src_op.mem.segment == 0 and ctx_src_op.mem.disp != 0:
return _dynStruct.ptr_struct_str
# if disp == 0 and index != 0 it's certainly an array
if ctx_src_op.mem.segment == 0 and ctx_src_op.mem.index != 0:
return _dynStruct.ptr_array_str
# else it's a pointer with no more information
return _dynStruct.ptr_str
# if the context instr have 2 operand and the second one use
# the written ptr as base, it's ptr
if (self.ctx_instr.operands) == 2 and\
self.ctx_instr.operands[1].type == capstone.x86.X86_OP_MEM and\
self.ctx_instr.operands[1].reg == ctx_src_op:
return _dynStruct.ptr_str
return None
@staticmethod
def remove_instrs(access_list):
for access in access_list:
if hasattr(access, 'instr'):
del access.instr
if hasattr(access, 'ctx_instr'):
del access.ctx_instr
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import sys
from .yugen_core import list_api_keys, get_lambdas, delete_api, \
export_to_swagger, create_api_key, list_apis, \
deploy_custom_domain, delete_api_key, deploy_api
from . import utils
from .gcdt_cmd_dispatcher import cmd
from . import gcdt_lifecycle
# creating docopt parameters and usage help
DOC = '''Usage:
yugen deploy [-v]
yugen delete -f [-v]
yugen export [-v]
yugen list [-v]
yugen apikey-create <keyname> [-v]
yugen apikey-list [-v]
yugen apikey-delete [-v]
yugen custom-domain-create [-v]
yugen version
-h --help show this
-v --verbose show debug messages
'''
# TODO support changing API keys
# TODO investigate base path problem
@cmd(spec=['version'])
def version_cmd():
utils.version()
@cmd(spec=['list'])
def list_cmd(**tooldata):
context = tooldata.get('context')
awsclient = context.get('_awsclient')
return list_apis(awsclient)
@cmd(spec=['deploy'])
def deploy_cmd(**tooldata):
context = tooldata.get('context')
config = tooldata.get('config')
awsclient = context.get('_awsclient')
api_name = config['api'].get('name')
api_description = config['api'].get('description')
target_stage = config['api'].get('targetStage')
api_key = config['api'].get('apiKey')
lambdas = get_lambdas(awsclient, config, add_arn=True)
cache_cluster_enabled = config['api'].get('cacheClusterEnabled', False)
cache_cluster_size = config['api'].get('cacheClusterSize', False)
method_settings = config['api'].get('methodSettings', {})
exit_code = deploy_api(
awsclient=awsclient,
api_name=api_name,
api_description=api_description,
stage_name=target_stage,
api_key=api_key,
lambdas=lambdas,
cache_cluster_enabled=cache_cluster_enabled,
cache_cluster_size=cache_cluster_size,
method_settings=method_settings
)
if 'customDomain' in config:
domain_name = config['customDomain'].get('domainName')
route_53_record = config['customDomain'].get('route53Record')
#ssl_cert = {
# 'name': config['customDomain'].get('certificateName'),
# 'body': config['customDomain'].get('certificateBody'),
# 'private_key': config['customDomain'].get('certificatePrivateKey'),
# 'chain': config['customDomain'].get('certificateChain')
#}
cert_name = config['customDomain'].get('certificateName')
cert_arn = config['customDomain'].get('certificateArn')
hosted_zone_id = config['customDomain'].get('hostedDomainZoneId')
api_base_path = config['customDomain'].get('basePath')
ensure_cname = config['customDomain'].get('ensureCname', True)
deploy_custom_domain(
awsclient=awsclient,
api_name=api_name,
api_target_stage=target_stage,
api_base_path=api_base_path,
domain_name=domain_name,
route_53_record=route_53_record,
cert_name=cert_name,
cert_arn=cert_arn,
hosted_zone_id=hosted_zone_id,
ensure_cname=ensure_cname,
)
return exit_code
@cmd(spec=['delete', '-f'])
def delete_cmd(force, **tooldata):
context = tooldata.get('context')
config = tooldata.get('config')
awsclient = context.get('_awsclient')
exit_code = delete_api(
awsclient=awsclient,
api_name=config['api'].get('name')
)
return exit_code
@cmd(spec=['export'])
def export_cmd(**tooldata):
context = tooldata.get('context')
config = tooldata.get('config')
awsclient = context.get('_awsclient')
api_name = config['api'].get('name')
target_stage = config['api'].get('targetStage')
api_description = config['api'].get('description')
lambdas = get_lambdas(awsclient, config, add_arn=True)
return export_to_swagger(
awsclient=awsclient,
api_name=api_name,
stage_name=target_stage,
api_description=api_description,
lambdas=lambdas,
custom_hostname=(config['customDomain'].get('domainName')
if 'customDomain' in config else False),
custom_base_path=(config['customDomain'].get('basePath')
if 'customDomain' in config else False)
)
@cmd(spec=['apikey-create', '<keyname>'])
def apikey_create_cmd(keyname, **tooldata):
context = tooldata.get('context')
config = tooldata.get('config')
awsclient = context.get('_awsclient')
api_name = config['api'].get('name')
create_api_key(awsclient, api_name, keyname)
@cmd(spec=['apikey-delete'])
def apikey_delete_cmd(**tooldata):
context = tooldata.get('context')
config = tooldata.get('config')
awsclient = context.get('_awsclient')
api_key = config['api'].get('apiKey')
delete_api_key(awsclient, api_key)
@cmd(spec=['apikey-list'])
def apikey_list_cmd(**tooldata):
context = tooldata.get('context')
awsclient = context.get('_awsclient')
list_api_keys(awsclient)
@cmd(spec=['custom-domain-create'])
def custom_domain_create_cmd(**tooldata):
context = tooldata.get('context')
config = tooldata.get('config')
awsclient = context.get('_awsclient')
api_name = config['api'].get('name')
api_target_stage = config['api'].get('targetStage')
domain_name = config['customDomain'].get('domainName')
route_53_record = config['customDomain'].get('route53Record')
api_base_path = config['customDomain'].get('basePath')
#ssl_cert = {
# 'name': config['customDomain'].get('certificateName'),
# 'body': config['customDomain'].get('certificateBody'),
# 'private_key': config['customDomain'].get('certificatePrivateKey'),
# 'chain': config['customDomain'].get('certificateChain')
#}
cert_name = config['customDomain'].get('certificateName')
cert_arn = config['customDomain'].get('certificateArn')
hosted_zone_id = config['customDomain'].get('hostedDomainZoneId')
ensure_cname = config['customDomain'].get('ensureCname', True)
return deploy_custom_domain(
awsclient=awsclient,
api_name=api_name,
api_target_stage=api_target_stage,
api_base_path=api_base_path,
domain_name=domain_name,
route_53_record=route_53_record,
cert_name=cert_name,
cert_arn=cert_arn,
hosted_zone_id=hosted_zone_id,
ensure_cname=ensure_cname,
)
def main():
sys.exit(gcdt_lifecycle.main(
DOC, 'yugen', dispatch_only=['version', 'clean']))
if __name__ == '__main__':
main()
|
|
# Copyright (c) 2012 Citrix Systems, Inc.
# Copyright 2010 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Management class for host-related functions (start, reboot, etc).
"""
import re
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from nova.compute import arch
from nova.compute import hv_type
from nova.compute import task_states
from nova.compute import vm_mode
from nova.compute import vm_states
from nova import context
from nova import exception
from nova.i18n import _, _LE, _LI, _LW
from nova import objects
from nova.virt.xenapi import pool_states
from nova.virt.xenapi import vm_utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class Host(object):
"""Implements host related operations."""
def __init__(self, session, virtapi):
self._session = session
self._virtapi = virtapi
def host_power_action(self, action):
"""Reboots or shuts down the host."""
args = {"action": jsonutils.dumps(action)}
methods = {"reboot": "host_reboot", "shutdown": "host_shutdown"}
response = call_xenhost(self._session, methods[action], args)
return response.get("power_action", response)
def host_maintenance_mode(self, host, mode):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation.
"""
if not mode:
return 'off_maintenance'
host_list = [host_ref for host_ref in
self._session.host.get_all()
if host_ref != self._session.host_ref]
migrations_counter = vm_counter = 0
ctxt = context.get_admin_context()
for vm_ref, vm_rec in vm_utils.list_vms(self._session):
for host_ref in host_list:
try:
# Ensure only guest instances are migrated
uuid = vm_rec['other_config'].get('nova_uuid')
if not uuid:
name = vm_rec['name_label']
uuid = _uuid_find(ctxt, host, name)
if not uuid:
LOG.info(_LI('Instance %(name)s running on '
'%(host)s could not be found in '
'the database: assuming it is a '
'worker VM and skip ping migration '
'to a new host'),
{'name': name, 'host': host})
continue
instance = objects.Instance.get_by_uuid(ctxt, uuid)
vm_counter = vm_counter + 1
aggregate = objects.AggregateList.get_by_host(
ctxt, host, key=pool_states.POOL_FLAG)
if not aggregate:
msg = _('Aggregate for host %(host)s count not be'
' found.') % dict(host=host)
raise exception.NotFound(msg)
dest = _host_find(ctxt, self._session, aggregate[0],
host_ref)
instance.host = dest
instance.task_state = task_states.MIGRATING
instance.save()
self._session.VM.pool_migrate(vm_ref, host_ref,
{"live": "true"})
migrations_counter = migrations_counter + 1
instance.vm_state = vm_states.ACTIVE
instance.save()
break
except self._session.XenAPI.Failure:
LOG.exception(_LE('Unable to migrate VM %(vm_ref)s '
'from %(host)s'),
{'vm_ref': vm_ref, 'host': host})
instance.host = host
instance.vm_state = vm_states.ACTIVE
instance.save()
if vm_counter == migrations_counter:
return 'on_maintenance'
else:
raise exception.NoValidHost(reason='Unable to find suitable '
'host for VMs evacuation')
def set_host_enabled(self, enabled):
"""Sets the compute host's ability to accept new instances."""
# Since capabilities are gone, use service table to disable a node
# in scheduler
cntxt = context.get_admin_context()
service = objects.Service.get_by_args(cntxt, CONF.host,
'nova-compute')
service.disabled = not enabled
service.disabled_reason = 'set by xenapi host_state'
service.save()
args = {"enabled": jsonutils.dumps(enabled)}
response = call_xenhost(self._session, "set_host_enabled", args)
return response.get("status", response)
def get_host_uptime(self):
"""Returns the result of calling "uptime" on the target host."""
response = call_xenhost(self._session, "host_uptime", {})
return response.get("uptime", response)
class HostState(object):
"""Manages information about the XenServer host this compute
node is running on.
"""
def __init__(self, session):
super(HostState, self).__init__()
self._session = session
self._stats = {}
self.update_status()
def _get_passthrough_devices(self):
"""Get a list pci devices that are available for pci passthtough.
We use a plugin to get the output of the lspci command runs on dom0.
From this list we will extract pci devices that are using the pciback
kernel driver.
:returns: a list of pci devices on the node
"""
def _compile_hex(pattern):
"""Return a compiled regular expression pattern into which we have
replaced occurrences of hex by [\da-fA-F].
"""
return re.compile(pattern.replace("hex", r"[\da-fA-F]"))
def _parse_pci_device_string(dev_string):
"""Exctract information from the device string about the slot, the
vendor and the product ID. The string is as follow:
"Slot:\tBDF\nClass:\txxxx\nVendor:\txxxx\nDevice:\txxxx\n..."
Return a dictionary with informations about the device.
"""
slot_regex = _compile_hex(r"Slot:\t"
r"((?:hex{4}:)?" # Domain: (optional)
r"hex{2}:" # Bus:
r"hex{2}\." # Device.
r"hex{1})") # Function
vendor_regex = _compile_hex(r"\nVendor:\t(hex+)")
product_regex = _compile_hex(r"\nDevice:\t(hex+)")
slot_id = slot_regex.findall(dev_string)
vendor_id = vendor_regex.findall(dev_string)
product_id = product_regex.findall(dev_string)
if not slot_id or not vendor_id or not product_id:
raise exception.NovaException(
_("Failed to parse information about"
" a pci device for passthrough"))
type_pci = self._session.call_plugin_serialized(
'xenhost', 'get_pci_type', slot_id[0])
return {'label': '_'.join(['label',
vendor_id[0],
product_id[0]]),
'vendor_id': vendor_id[0],
'product_id': product_id[0],
'address': slot_id[0],
'dev_id': '_'.join(['pci', slot_id[0]]),
'dev_type': type_pci,
'status': 'available'}
# Devices are separated by a blank line. That is why we
# use "\n\n" as separator.
lspci_out = self._session.call_plugin_serialized(
'xenhost', 'get_pci_device_details')
pci_list = lspci_out.split("\n\n")
# For each device of the list, check if it uses the pciback
# kernel driver and if it does, get informations and add it
# to the list of passthrough_devices. Ignore it if the driver
# is not pciback.
passthrough_devices = []
for dev_string_info in pci_list:
if "Driver:\tpciback" in dev_string_info:
new_dev = _parse_pci_device_string(dev_string_info)
passthrough_devices.append(new_dev)
return passthrough_devices
def get_host_stats(self, refresh=False):
"""Return the current state of the host. If 'refresh' is
True, run the update first.
"""
if refresh or not self._stats:
self.update_status()
return self._stats
def update_status(self):
"""Since under Xenserver, a compute node runs on a given host,
we can get host status information using xenapi.
"""
LOG.debug("Updating host stats")
data = call_xenhost(self._session, "host_data", {})
if data:
sr_ref = vm_utils.scan_default_sr(self._session)
sr_rec = self._session.SR.get_record(sr_ref)
total = int(sr_rec["physical_size"])
used = int(sr_rec["physical_utilisation"])
data["disk_total"] = total
data["disk_used"] = used
data["disk_allocated"] = int(sr_rec["virtual_allocation"])
data["disk_available"] = total - used
data["supported_instances"] = to_supported_instances(
data.get("host_capabilities")
)
data["cpu_model"] = to_cpu_model(
data.get("host_cpu_info")
)
host_memory = data.get('host_memory', None)
if host_memory:
data["host_memory_total"] = host_memory.get('total', 0)
data["host_memory_overhead"] = host_memory.get('overhead', 0)
data["host_memory_free"] = host_memory.get('free', 0)
data["host_memory_free_computed"] = host_memory.get(
'free-computed', 0)
del data['host_memory']
if (data['host_hostname'] !=
self._stats.get('host_hostname', data['host_hostname'])):
LOG.error(_LE('Hostname has changed from %(old)s to %(new)s. '
'A restart is required to take effect.') %
{'old': self._stats['host_hostname'],
'new': data['host_hostname']})
data['host_hostname'] = self._stats['host_hostname']
data['hypervisor_hostname'] = data['host_hostname']
vcpus_used = 0
for vm_ref, vm_rec in vm_utils.list_vms(self._session):
vcpus_used = vcpus_used + int(vm_rec['VCPUs_max'])
data['vcpus_used'] = vcpus_used
data['pci_passthrough_devices'] = self._get_passthrough_devices()
self._stats = data
def to_supported_instances(host_capabilities):
if not host_capabilities:
return []
result = []
for capability in host_capabilities:
try:
# 'capability'is unicode but we want arch/ostype
# to be strings to match the standard constants
capability = str(capability)
ostype, _version, guestarch = capability.split("-")
guestarch = arch.canonicalize(guestarch)
ostype = vm_mode.canonicalize(ostype)
result.append((guestarch, hv_type.XEN, ostype))
except ValueError:
LOG.warning(_LW("Failed to extract instance support from %s"),
capability)
return result
def to_cpu_model(host_cpu_info):
# The XenAPI driver returns data in the format
#
# {"physical_features": "0098e3fd-bfebfbff-00000001-28100800",
# "modelname": "Intel(R) Xeon(R) CPU X3430 @ 2.40GHz",
# "vendor": "GenuineIntel",
# "features": "0098e3fd-bfebfbff-00000001-28100800",
# "family": 6,
# "maskable": "full",
# "cpu_count": 4,
# "socket_count": "1",
# "flags": "fpu de tsc msr pae mce cx8 apic sep mtrr mca cmov
# pat clflush acpi mmx fxsr sse sse2 ss ht nx
# constant_tsc nonstop_tsc aperfmperf pni vmx est
# ssse3 sse4_1 sse4_2 popcnt hypervisor ida
# tpr_shadow vnmi flexpriority ept vpid",
# "stepping": 5,
# "model": 30,
# "features_after_reboot": "0098e3fd-bfebfbff-00000001-28100800",
# "speed": "2394.086"}
if host_cpu_info is None:
return None
cpu_info = dict()
# TODO(berrange) the data we're putting in model is not
# exactly comparable to what libvirt puts in model. The
# libvirt model names are a well defined short string
# which is really an aliass for a particular set of
# feature flags. The Xen model names are raw printable
# strings from the kernel with no specific semantics
cpu_info["model"] = host_cpu_info["modelname"]
cpu_info["vendor"] = host_cpu_info["vendor"]
# TODO(berrange) perhaps we could fill in 'arch' field too
# by looking at 'host_capabilities' for the Xen host ?
topology = dict()
topology["sockets"] = int(host_cpu_info["socket_count"])
topology["cores"] = (int(host_cpu_info["cpu_count"]) /
int(host_cpu_info["socket_count"]))
# TODO(berrange): if 'ht' is present in the 'flags' list
# is it possible to infer that the 'cpu_count' is in fact
# sockets * cores * threads ? Unclear if 'ht' would remain
# visible when threads are disabled in BIOS ?
topology["threads"] = 1
cpu_info["topology"] = topology
cpu_info["features"] = host_cpu_info["flags"].split(" ")
return cpu_info
def call_xenhost(session, method, arg_dict):
"""There will be several methods that will need this general
handling for interacting with the xenhost plugin, so this abstracts
out that behavior.
"""
# Create a task ID as something that won't match any instance ID
try:
result = session.call_plugin('xenhost', method, args=arg_dict)
if not result:
return ''
return jsonutils.loads(result)
except ValueError:
LOG.exception(_LE("Unable to get updated status"))
return None
except session.XenAPI.Failure as e:
LOG.error(_LE("The call to %(method)s returned "
"an error: %(e)s."), {'method': method, 'e': e})
return e.details[1]
def _uuid_find(context, host, name_label):
"""Return instance uuid by name_label."""
for i in objects.InstanceList.get_by_host(context, host):
if i.name == name_label:
return i.uuid
return None
def _host_find(context, session, src_aggregate, host_ref):
"""Return the host from the xenapi host reference.
:param src_aggregate: the aggregate that the compute host being put in
maintenance (source of VMs) belongs to
:param host_ref: the hypervisor host reference (destination of VMs)
:return: the compute host that manages host_ref
"""
# NOTE: this would be a lot simpler if nova-compute stored
# CONF.host in the XenServer host's other-config map.
# TODO(armando-migliaccio): improve according the note above
uuid = session.host.get_uuid(host_ref)
for compute_host, host_uuid in src_aggregate.metadetails.iteritems():
if host_uuid == uuid:
return compute_host
raise exception.NoValidHost(reason='Host %(host_uuid)s could not be found '
'from aggregate metadata: %(metadata)s.' %
{'host_uuid': uuid,
'metadata': src_aggregate.metadetails})
|
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import datetime
import ipaddress
from email.utils import parseaddr
import idna
import six
from six.moves import urllib_parse
from cryptography import utils, x509
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.x509.oid import (
CRLExtensionOID, CertificatePoliciesOID, ExtensionOID
)
def _obj2txt(backend, obj):
# Set to 80 on the recommendation of
# https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
buf_len = 80
buf = backend._ffi.new("char[]", buf_len)
res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
backend.openssl_assert(res > 0)
return backend._ffi.buffer(buf, res)[:].decode()
def _decode_x509_name_entry(backend, x509_name_entry):
obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry)
backend.openssl_assert(obj != backend._ffi.NULL)
data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry)
backend.openssl_assert(data != backend._ffi.NULL)
value = backend._asn1_string_to_utf8(data)
oid = _obj2txt(backend, obj)
return x509.NameAttribute(x509.ObjectIdentifier(oid), value)
def _decode_x509_name(backend, x509_name):
count = backend._lib.X509_NAME_entry_count(x509_name)
attributes = []
for x in range(count):
entry = backend._lib.X509_NAME_get_entry(x509_name, x)
attributes.append(_decode_x509_name_entry(backend, entry))
return x509.Name(attributes)
def _decode_general_names(backend, gns):
num = backend._lib.sk_GENERAL_NAME_num(gns)
names = []
for i in range(num):
gn = backend._lib.sk_GENERAL_NAME_value(gns, i)
backend.openssl_assert(gn != backend._ffi.NULL)
names.append(_decode_general_name(backend, gn))
return names
def _decode_general_name(backend, gn):
if gn.type == backend._lib.GEN_DNS:
data = backend._asn1_string_to_bytes(gn.d.dNSName)
if not data:
decoded = u""
elif data.startswith(b"*."):
# This is a wildcard name. We need to remove the leading wildcard,
# IDNA decode, then re-add the wildcard. Wildcard characters should
# always be left-most (RFC 2595 section 2.4).
decoded = u"*." + idna.decode(data[2:])
else:
# Not a wildcard, decode away. If the string has a * in it anywhere
# invalid this will raise an InvalidCodePoint
decoded = idna.decode(data)
if data.startswith(b"."):
# idna strips leading periods. Name constraints can have that
# so we need to re-add it. Sigh.
decoded = u"." + decoded
return x509.DNSName(decoded)
elif gn.type == backend._lib.GEN_URI:
data = backend._asn1_string_to_ascii(gn.d.uniformResourceIdentifier)
parsed = urllib_parse.urlparse(data)
if parsed.hostname:
hostname = idna.decode(parsed.hostname)
else:
hostname = ""
if parsed.port:
netloc = hostname + u":" + six.text_type(parsed.port)
else:
netloc = hostname
# Note that building a URL in this fashion means it should be
# semantically indistinguishable from the original but is not
# guaranteed to be exactly the same.
uri = urllib_parse.urlunparse((
parsed.scheme,
netloc,
parsed.path,
parsed.params,
parsed.query,
parsed.fragment
))
return x509.UniformResourceIdentifier(uri)
elif gn.type == backend._lib.GEN_RID:
oid = _obj2txt(backend, gn.d.registeredID)
return x509.RegisteredID(x509.ObjectIdentifier(oid))
elif gn.type == backend._lib.GEN_IPADD:
data = backend._asn1_string_to_bytes(gn.d.iPAddress)
data_len = len(data)
if data_len == 8 or data_len == 32:
# This is an IPv4 or IPv6 Network and not a single IP. This
# type of data appears in Name Constraints. Unfortunately,
# ipaddress doesn't support packed bytes + netmask. Additionally,
# IPv6Network can only handle CIDR rather than the full 16 byte
# netmask. To handle this we convert the netmask to integer, then
# find the first 0 bit, which will be the prefix. If another 1
# bit is present after that the netmask is invalid.
base = ipaddress.ip_address(data[:data_len // 2])
netmask = ipaddress.ip_address(data[data_len // 2:])
bits = bin(int(netmask))[2:]
prefix = bits.find('0')
# If no 0 bits are found it is a /32 or /128
if prefix == -1:
prefix = len(bits)
if "1" in bits[prefix:]:
raise ValueError("Invalid netmask")
ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix))
else:
ip = ipaddress.ip_address(data)
return x509.IPAddress(ip)
elif gn.type == backend._lib.GEN_DIRNAME:
return x509.DirectoryName(
_decode_x509_name(backend, gn.d.directoryName)
)
elif gn.type == backend._lib.GEN_EMAIL:
data = backend._asn1_string_to_ascii(gn.d.rfc822Name)
name, address = parseaddr(data)
parts = address.split(u"@")
if name or not address:
# parseaddr has found a name (e.g. Name <email>) or the entire
# value is an empty string.
raise ValueError("Invalid rfc822name value")
elif len(parts) == 1:
# Single label email name. This is valid for local delivery. No
# IDNA decoding can be done since there is no domain component.
return x509.RFC822Name(address)
else:
# A normal email of the form [email protected]. Let's attempt to
# decode the domain component and return the entire address.
return x509.RFC822Name(
parts[0] + u"@" + idna.decode(parts[1])
)
elif gn.type == backend._lib.GEN_OTHERNAME:
type_id = _obj2txt(backend, gn.d.otherName.type_id)
value = backend._asn1_to_der(gn.d.otherName.value)
return x509.OtherName(x509.ObjectIdentifier(type_id), value)
else:
# x400Address or ediPartyName
raise x509.UnsupportedGeneralNameType(
"{0} is not a supported type".format(
x509._GENERAL_NAMES.get(gn.type, gn.type)
),
gn.type
)
def _decode_ocsp_no_check(backend, ext):
return x509.OCSPNoCheck()
class _X509ExtensionParser(object):
def __init__(self, ext_count, get_ext, handlers, unsupported_exts=None):
self.ext_count = ext_count
self.get_ext = get_ext
self.handlers = handlers
self.unsupported_exts = unsupported_exts
def parse(self, backend, x509_obj):
extensions = []
seen_oids = set()
for i in range(self.ext_count(backend, x509_obj)):
ext = self.get_ext(backend, x509_obj, i)
backend.openssl_assert(ext != backend._ffi.NULL)
crit = backend._lib.X509_EXTENSION_get_critical(ext)
critical = crit == 1
oid = x509.ObjectIdentifier(_obj2txt(backend, ext.object))
if oid in seen_oids:
raise x509.DuplicateExtension(
"Duplicate {0} extension found".format(oid), oid
)
try:
handler = self.handlers[oid]
except KeyError:
if critical:
raise x509.UnsupportedExtension(
"Critical extension {0} is not currently supported"
.format(oid), oid
)
else:
# For extensions which are not supported by OpenSSL we pass the
# extension object directly to the parsing routine so it can
# be decoded manually.
if self.unsupported_exts and oid in self.unsupported_exts:
ext_data = ext
else:
ext_data = backend._lib.X509V3_EXT_d2i(ext)
if ext_data == backend._ffi.NULL:
backend._consume_errors()
raise ValueError(
"The {0} extension is invalid and can't be "
"parsed".format(oid)
)
value = handler(backend, ext_data)
extensions.append(x509.Extension(oid, critical, value))
seen_oids.add(oid)
return x509.Extensions(extensions)
@utils.register_interface(x509.Certificate)
class _Certificate(object):
def __init__(self, backend, x509):
self._backend = backend
self._x509 = x509
def __repr__(self):
return "<Certificate(subject={0}, ...)>".format(self.subject)
def __eq__(self, other):
if not isinstance(other, x509.Certificate):
return NotImplemented
res = self._backend._lib.X509_cmp(self._x509, other._x509)
return res == 0
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.public_bytes(serialization.Encoding.DER))
def fingerprint(self, algorithm):
h = hashes.Hash(algorithm, self._backend)
h.update(self.public_bytes(serialization.Encoding.DER))
return h.finalize()
@property
def version(self):
version = self._backend._lib.X509_get_version(self._x509)
if version == 0:
return x509.Version.v1
elif version == 2:
return x509.Version.v3
else:
raise x509.InvalidVersion(
"{0} is not a valid X509 version".format(version), version
)
@property
def serial(self):
asn1_int = self._backend._lib.X509_get_serialNumber(self._x509)
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
return self._backend._asn1_integer_to_int(asn1_int)
def public_key(self):
pkey = self._backend._lib.X509_get_pubkey(self._x509)
if pkey == self._backend._ffi.NULL:
# Remove errors from the stack.
self._backend._consume_errors()
raise ValueError("Certificate public key is of an unknown type")
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
return self._backend._evp_pkey_to_public_key(pkey)
@property
def not_valid_before(self):
asn1_time = self._backend._lib.X509_get_notBefore(self._x509)
return self._backend._parse_asn1_time(asn1_time)
@property
def not_valid_after(self):
asn1_time = self._backend._lib.X509_get_notAfter(self._x509)
return self._backend._parse_asn1_time(asn1_time)
@property
def issuer(self):
issuer = self._backend._lib.X509_get_issuer_name(self._x509)
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, issuer)
@property
def subject(self):
subject = self._backend._lib.X509_get_subject_name(self._x509)
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, subject)
@property
def signature_hash_algorithm(self):
oid = _obj2txt(self._backend, self._x509.sig_alg.algorithm)
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{0} not recognized".format(oid)
)
@property
def extensions(self):
return _CERTIFICATE_EXTENSION_PARSER.parse(self._backend, self._x509)
def public_bytes(self, encoding):
bio = self._backend._create_mem_bio()
if encoding is serialization.Encoding.PEM:
res = self._backend._lib.PEM_write_bio_X509(bio, self._x509)
elif encoding is serialization.Encoding.DER:
res = self._backend._lib.i2d_X509_bio(bio, self._x509)
else:
raise TypeError("encoding must be an item from the Encoding enum")
self._backend.openssl_assert(res == 1)
return self._backend._read_mem_bio(bio)
def _decode_certificate_policies(backend, cp):
cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp)
cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free)
num = backend._lib.sk_POLICYINFO_num(cp)
certificate_policies = []
for i in range(num):
qualifiers = None
pi = backend._lib.sk_POLICYINFO_value(cp, i)
oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid))
if pi.qualifiers != backend._ffi.NULL:
qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers)
qualifiers = []
for j in range(qnum):
pqi = backend._lib.sk_POLICYQUALINFO_value(
pi.qualifiers, j
)
pqualid = x509.ObjectIdentifier(
_obj2txt(backend, pqi.pqualid)
)
if pqualid == CertificatePoliciesOID.CPS_QUALIFIER:
cpsuri = backend._ffi.buffer(
pqi.d.cpsuri.data, pqi.d.cpsuri.length
)[:].decode('ascii')
qualifiers.append(cpsuri)
else:
assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE
user_notice = _decode_user_notice(
backend, pqi.d.usernotice
)
qualifiers.append(user_notice)
certificate_policies.append(
x509.PolicyInformation(oid, qualifiers)
)
return x509.CertificatePolicies(certificate_policies)
def _decode_user_notice(backend, un):
explicit_text = None
notice_reference = None
if un.exptext != backend._ffi.NULL:
explicit_text = backend._asn1_string_to_utf8(un.exptext)
if un.noticeref != backend._ffi.NULL:
organization = backend._asn1_string_to_utf8(un.noticeref.organization)
num = backend._lib.sk_ASN1_INTEGER_num(
un.noticeref.noticenos
)
notice_numbers = []
for i in range(num):
asn1_int = backend._lib.sk_ASN1_INTEGER_value(
un.noticeref.noticenos, i
)
notice_num = backend._asn1_integer_to_int(asn1_int)
notice_numbers.append(notice_num)
notice_reference = x509.NoticeReference(
organization, notice_numbers
)
return x509.UserNotice(notice_reference, explicit_text)
def _decode_basic_constraints(backend, bc_st):
basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st)
basic_constraints = backend._ffi.gc(
basic_constraints, backend._lib.BASIC_CONSTRAINTS_free
)
# The byte representation of an ASN.1 boolean true is \xff. OpenSSL
# chooses to just map this to its ordinal value, so true is 255 and
# false is 0.
ca = basic_constraints.ca == 255
if basic_constraints.pathlen == backend._ffi.NULL:
path_length = None
else:
path_length = backend._asn1_integer_to_int(basic_constraints.pathlen)
return x509.BasicConstraints(ca, path_length)
def _decode_subject_key_identifier(backend, asn1_string):
asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string)
asn1_string = backend._ffi.gc(
asn1_string, backend._lib.ASN1_OCTET_STRING_free
)
return x509.SubjectKeyIdentifier(
backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
)
def _decode_authority_key_identifier(backend, akid):
akid = backend._ffi.cast("AUTHORITY_KEYID *", akid)
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
key_identifier = None
authority_cert_issuer = None
authority_cert_serial_number = None
if akid.keyid != backend._ffi.NULL:
key_identifier = backend._ffi.buffer(
akid.keyid.data, akid.keyid.length
)[:]
if akid.issuer != backend._ffi.NULL:
authority_cert_issuer = _decode_general_names(
backend, akid.issuer
)
if akid.serial != backend._ffi.NULL:
authority_cert_serial_number = backend._asn1_integer_to_int(
akid.serial
)
return x509.AuthorityKeyIdentifier(
key_identifier, authority_cert_issuer, authority_cert_serial_number
)
def _decode_authority_information_access(backend, aia):
aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia)
aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free)
num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia)
access_descriptions = []
for i in range(num):
ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i)
backend.openssl_assert(ad.method != backend._ffi.NULL)
oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method))
backend.openssl_assert(ad.location != backend._ffi.NULL)
gn = _decode_general_name(backend, ad.location)
access_descriptions.append(x509.AccessDescription(oid, gn))
return x509.AuthorityInformationAccess(access_descriptions)
def _decode_key_usage(backend, bit_string):
bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string)
bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free)
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
digital_signature = get_bit(bit_string, 0) == 1
content_commitment = get_bit(bit_string, 1) == 1
key_encipherment = get_bit(bit_string, 2) == 1
data_encipherment = get_bit(bit_string, 3) == 1
key_agreement = get_bit(bit_string, 4) == 1
key_cert_sign = get_bit(bit_string, 5) == 1
crl_sign = get_bit(bit_string, 6) == 1
encipher_only = get_bit(bit_string, 7) == 1
decipher_only = get_bit(bit_string, 8) == 1
return x509.KeyUsage(
digital_signature,
content_commitment,
key_encipherment,
data_encipherment,
key_agreement,
key_cert_sign,
crl_sign,
encipher_only,
decipher_only
)
def _decode_general_names_extension(backend, gns):
gns = backend._ffi.cast("GENERAL_NAMES *", gns)
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
general_names = _decode_general_names(backend, gns)
return general_names
def _decode_subject_alt_name(backend, ext):
return x509.SubjectAlternativeName(
_decode_general_names_extension(backend, ext)
)
def _decode_issuer_alt_name(backend, ext):
return x509.IssuerAlternativeName(
_decode_general_names_extension(backend, ext)
)
def _decode_name_constraints(backend, nc):
nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc)
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
permitted = _decode_general_subtrees(backend, nc.permittedSubtrees)
excluded = _decode_general_subtrees(backend, nc.excludedSubtrees)
return x509.NameConstraints(
permitted_subtrees=permitted, excluded_subtrees=excluded
)
def _decode_general_subtrees(backend, stack_subtrees):
if stack_subtrees == backend._ffi.NULL:
return None
num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees)
subtrees = []
for i in range(num):
obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i)
backend.openssl_assert(obj != backend._ffi.NULL)
name = _decode_general_name(backend, obj.base)
subtrees.append(name)
return subtrees
def _decode_extended_key_usage(backend, sk):
sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk)
sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free)
num = backend._lib.sk_ASN1_OBJECT_num(sk)
ekus = []
for i in range(num):
obj = backend._lib.sk_ASN1_OBJECT_value(sk, i)
backend.openssl_assert(obj != backend._ffi.NULL)
oid = x509.ObjectIdentifier(_obj2txt(backend, obj))
ekus.append(oid)
return x509.ExtendedKeyUsage(ekus)
_DISTPOINT_TYPE_FULLNAME = 0
_DISTPOINT_TYPE_RELATIVENAME = 1
def _decode_crl_distribution_points(backend, cdps):
cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps)
cdps = backend._ffi.gc(cdps, backend._lib.sk_DIST_POINT_free)
num = backend._lib.sk_DIST_POINT_num(cdps)
dist_points = []
for i in range(num):
full_name = None
relative_name = None
crl_issuer = None
reasons = None
cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
if cdp.reasons != backend._ffi.NULL:
# We will check each bit from RFC 5280
# ReasonFlags ::= BIT STRING {
# unused (0),
# keyCompromise (1),
# cACompromise (2),
# affiliationChanged (3),
# superseded (4),
# cessationOfOperation (5),
# certificateHold (6),
# privilegeWithdrawn (7),
# aACompromise (8) }
reasons = []
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
if get_bit(cdp.reasons, 1):
reasons.append(x509.ReasonFlags.key_compromise)
if get_bit(cdp.reasons, 2):
reasons.append(x509.ReasonFlags.ca_compromise)
if get_bit(cdp.reasons, 3):
reasons.append(x509.ReasonFlags.affiliation_changed)
if get_bit(cdp.reasons, 4):
reasons.append(x509.ReasonFlags.superseded)
if get_bit(cdp.reasons, 5):
reasons.append(x509.ReasonFlags.cessation_of_operation)
if get_bit(cdp.reasons, 6):
reasons.append(x509.ReasonFlags.certificate_hold)
if get_bit(cdp.reasons, 7):
reasons.append(x509.ReasonFlags.privilege_withdrawn)
if get_bit(cdp.reasons, 8):
reasons.append(x509.ReasonFlags.aa_compromise)
reasons = frozenset(reasons)
if cdp.CRLissuer != backend._ffi.NULL:
crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
# Certificates may have a crl_issuer/reasons and no distribution
# point so make sure it's not null.
if cdp.distpoint != backend._ffi.NULL:
# Type 0 is fullName, there is no #define for it in the code.
if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME:
full_name = _decode_general_names(
backend, cdp.distpoint.name.fullname
)
# OpenSSL code doesn't test for a specific type for
# relativename, everything that isn't fullname is considered
# relativename.
else:
rns = cdp.distpoint.name.relativename
rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
attributes = []
for i in range(rnum):
rn = backend._lib.sk_X509_NAME_ENTRY_value(
rns, i
)
backend.openssl_assert(rn != backend._ffi.NULL)
attributes.append(
_decode_x509_name_entry(backend, rn)
)
relative_name = x509.Name(attributes)
dist_points.append(
x509.DistributionPoint(
full_name, relative_name, reasons, crl_issuer
)
)
return x509.CRLDistributionPoints(dist_points)
def _decode_inhibit_any_policy(backend, asn1_int):
asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int)
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
skip_certs = backend._asn1_integer_to_int(asn1_int)
return x509.InhibitAnyPolicy(skip_certs)
_CRL_REASON_CODE_TO_ENUM = {
0: x509.ReasonFlags.unspecified,
1: x509.ReasonFlags.key_compromise,
2: x509.ReasonFlags.ca_compromise,
3: x509.ReasonFlags.affiliation_changed,
4: x509.ReasonFlags.superseded,
5: x509.ReasonFlags.cessation_of_operation,
6: x509.ReasonFlags.certificate_hold,
8: x509.ReasonFlags.remove_from_crl,
9: x509.ReasonFlags.privilege_withdrawn,
10: x509.ReasonFlags.aa_compromise,
}
def _decode_crl_reason(backend, enum):
enum = backend._ffi.cast("ASN1_ENUMERATED *", enum)
enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free)
code = backend._lib.ASN1_ENUMERATED_get(enum)
try:
return _CRL_REASON_CODE_TO_ENUM[code]
except KeyError:
raise ValueError("Unsupported reason code: {0}".format(code))
def _decode_invalidity_date(backend, inv_date):
generalized_time = backend._ffi.cast(
"ASN1_GENERALIZEDTIME *", inv_date
)
generalized_time = backend._ffi.gc(
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
)
time = backend._ffi.string(
backend._lib.ASN1_STRING_data(
backend._ffi.cast("ASN1_STRING *", generalized_time)
)
).decode("ascii")
return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
def _decode_cert_issuer(backend, ext):
"""
This handler decodes the CertificateIssuer entry extension directly
from the X509_EXTENSION object. This is necessary because this entry
extension is not directly supported by OpenSSL 0.9.8.
"""
data_ptr_ptr = backend._ffi.new("const unsigned char **")
data_ptr_ptr[0] = ext.value.data
gns = backend._lib.d2i_GENERAL_NAMES(
backend._ffi.NULL, data_ptr_ptr, ext.value.length
)
# Check the result of d2i_GENERAL_NAMES() is valid. Usually this is covered
# in _X509ExtensionParser but since we are responsible for decoding this
# entry extension ourselves, we have to this here.
if gns == backend._ffi.NULL:
backend._consume_errors()
raise ValueError(
"The {0} extension is corrupted and can't be parsed".format(
CRLExtensionOID.CERTIFICATE_ISSUER))
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
return x509.GeneralNames(_decode_general_names(backend, gns))
@utils.register_interface(x509.RevokedCertificate)
class _RevokedCertificate(object):
def __init__(self, backend, x509_revoked):
self._backend = backend
self._x509_revoked = x509_revoked
@property
def serial_number(self):
asn1_int = self._x509_revoked.serialNumber
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
return self._backend._asn1_integer_to_int(asn1_int)
@property
def revocation_date(self):
return self._backend._parse_asn1_time(
self._x509_revoked.revocationDate)
@property
def extensions(self):
return _REVOKED_CERTIFICATE_EXTENSION_PARSER.parse(
self._backend, self._x509_revoked
)
@utils.register_interface(x509.CertificateRevocationList)
class _CertificateRevocationList(object):
def __init__(self, backend, x509_crl):
self._backend = backend
self._x509_crl = x509_crl
def __eq__(self, other):
if not isinstance(other, x509.CertificateRevocationList):
return NotImplemented
res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl)
return res == 0
def __ne__(self, other):
return not self == other
def fingerprint(self, algorithm):
h = hashes.Hash(algorithm, self._backend)
bio = self._backend._create_mem_bio()
res = self._backend._lib.i2d_X509_CRL_bio(
bio, self._x509_crl
)
self._backend.openssl_assert(res == 1)
der = self._backend._read_mem_bio(bio)
h.update(der)
return h.finalize()
@property
def signature_hash_algorithm(self):
oid = _obj2txt(self._backend, self._x509_crl.sig_alg.algorithm)
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{0} not recognized".format(oid)
)
@property
def issuer(self):
issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl)
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, issuer)
@property
def next_update(self):
nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl)
self._backend.openssl_assert(nu != self._backend._ffi.NULL)
return self._backend._parse_asn1_time(nu)
@property
def last_update(self):
lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl)
self._backend.openssl_assert(lu != self._backend._ffi.NULL)
return self._backend._parse_asn1_time(lu)
def _revoked_certificates(self):
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
self._backend.openssl_assert(revoked != self._backend._ffi.NULL)
num = self._backend._lib.sk_X509_REVOKED_num(revoked)
revoked_list = []
for i in range(num):
r = self._backend._lib.sk_X509_REVOKED_value(revoked, i)
self._backend.openssl_assert(r != self._backend._ffi.NULL)
revoked_list.append(_RevokedCertificate(self._backend, r))
return revoked_list
def __iter__(self):
return iter(self._revoked_certificates())
def __getitem__(self, idx):
return self._revoked_certificates()[idx]
def __len__(self):
return len(self._revoked_certificates())
@property
def extensions(self):
raise NotImplementedError()
@utils.register_interface(x509.CertificateSigningRequest)
class _CertificateSigningRequest(object):
def __init__(self, backend, x509_req):
self._backend = backend
self._x509_req = x509_req
def __eq__(self, other):
if not isinstance(other, _CertificateSigningRequest):
return NotImplemented
self_bytes = self.public_bytes(serialization.Encoding.DER)
other_bytes = other.public_bytes(serialization.Encoding.DER)
return self_bytes == other_bytes
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.public_bytes(serialization.Encoding.DER))
def public_key(self):
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
return self._backend._evp_pkey_to_public_key(pkey)
@property
def subject(self):
subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req)
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, subject)
@property
def signature_hash_algorithm(self):
oid = _obj2txt(self._backend, self._x509_req.sig_alg.algorithm)
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{0} not recognized".format(oid)
)
@property
def extensions(self):
x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts)
def public_bytes(self, encoding):
bio = self._backend._create_mem_bio()
if encoding is serialization.Encoding.PEM:
res = self._backend._lib.PEM_write_bio_X509_REQ(
bio, self._x509_req
)
elif encoding is serialization.Encoding.DER:
res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req)
else:
raise TypeError("encoding must be an item from the Encoding enum")
self._backend.openssl_assert(res == 1)
return self._backend._read_mem_bio(bio)
_EXTENSION_HANDLERS = {
ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
ExtensionOID.KEY_USAGE: _decode_key_usage,
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name,
ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_decode_authority_information_access
),
ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies,
ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points,
ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check,
ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy,
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints,
}
_REVOKED_EXTENSION_HANDLERS = {
CRLExtensionOID.CRL_REASON: _decode_crl_reason,
CRLExtensionOID.INVALIDITY_DATE: _decode_invalidity_date,
CRLExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer,
}
_REVOKED_UNSUPPORTED_EXTENSIONS = set([
CRLExtensionOID.CERTIFICATE_ISSUER,
])
_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
handlers=_EXTENSION_HANDLERS
)
_CSR_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x),
get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i),
handlers=_EXTENSION_HANDLERS
)
_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i),
handlers=_REVOKED_EXTENSION_HANDLERS,
unsupported_exts=_REVOKED_UNSUPPORTED_EXTENSIONS
)
|
|
'''
This module provides the class PIDClientCredentials
which handles the credentials for Handle server
Interaction and for the Search Servlet.
Author: Merret Buurman (DKRZ), 2015-2016
'''
import json
import os
import logging
import pyhandle
from pyhandle.handleexceptions import CredentialsFormatError, HandleSyntaxError
import pyhandle.utilhandle as utilhandle
import pyhandle.util as util
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(util.NullHandler())
class PIDClientCredentials(object):
'''
Provides authentication information to access a Handle server, either by
specifying username and password or by providing a json file containing
the relevant information.
'''
@staticmethod
def load_from_JSON(json_filename):
'''
Create a new instance of a PIDClientCredentials with information read
from a local JSON file.
:param json_filename: The path to the json credentials file. The json
file should have the following format:
.. code:: json
{
"handle_server_url": "https://url.to.your.handle.server",
"username": "index:prefix/suffix",
"password": "ZZZZZZZ",
"prefix": "prefix_to_use_for_writing_handles",
"handleowner": "username_to_own_handles"
}
Any additional key-value-pairs are stored in the instance as
config.
:raises: :exc:`~pyhandle.handleexceptions.CredentialsFormatError`
:raises: :exc:`~pyhandle.handleexceptions.HandleSyntaxError`
:return: An instance.
'''
try:
jsonfilecontent = json.loads(open(json_filename, 'r').read())
except ValueError as exc:
raise CredentialsFormatError(msg="Invalid JSON syntax: "+str(exc))
instance = PIDClientCredentials(credentials_filename=json_filename,**jsonfilecontent)
return instance
def __init__(self, **args):
'''
Initialize client credentials instance.
The constructor checks if enough arguments are passed to
authenticate at a handle server or search servlet. For this,
the following parameters are checked. Depending on the
chosen authentication method, only a subset of them are
required.
All other parameters passed are stored and can be retrieved
using 'get_config()'. If a credentials objects is used to
initialize the client, these key-value pairs are passed on
to the client constructor.
:param client: Client object to the HS ('rest' or 'db')
:param handle_server_url: Optional. The URL of the Handle System
server to read from. Defaults to 'https://hdl.handle.net'
:param username: Optional. This must be a handle value reference in
the format "index:prefix/suffix". The method will throw an exception
upon bad syntax or non-existing Handle. The existence or validity
of the password in the handle is not checked at this moment.
:param password: Optional. This is the password stored as secret key
in the actual Handle value the username points to.
:param handleowner: Optional. The username that will be given admin
permissions over every newly created handle. By default, it is
'200:0.NA/xyz' (where xyz is the prefix of the handle being created.
:param private_key: Optional. The path to a file containing the private
key that will be used for authentication in write mode. If this is
specified, a certificate needs to be specified too.
:param certificate_only: Optional. The path to a file containing the
client certificate that will be used for authentication in write
mode. If this is specified, a private key needs to be specified too.
:param certificate_and_key: Optional. The path to a file containing both
certificate and private key, used for authentication in write mode.
:param prefix: Prefix. This is not used by the library, but may be
retrieved by the user.
:credentials_filename: This is the file location of the credentials file,
if read from JSON. It is used to find the certificate/key files, if any.
:param \**args: Any other key-value pairs are stored and can be accessed
using 'get_config()'.
:raises: :exc:`~pyhandle.handleexceptions.HandleSyntaxError`
'''
util.log_instantiation(LOGGER, 'PIDClientCredentials', args, ['password','reverselookup_password'])
# Possible arguments:
useful_args = [
'client',
'handle_server_url',
'username',
'password',
'private_key',
'certificate_only',
'certificate_and_key',
'prefix',
'handleowner',
'reverselookup_password',
'reverselookup_username',
'reverselookup_baseuri',
'credentials_filename',
'db_host',
'db_user',
'db_password',
'db_name',
'passphrase',
]
util.add_missing_optional_args_with_value_none(args, useful_args)
# Store args
self.__all_args = args
# Args that the constructor understands:
self.__client = args['client']
self.__handle_server_url = args['handle_server_url']
self.__username = args['username']
self.__password = args['password']
self.__prefix = args['prefix']
self.__handleowner = args['handleowner']
self.__private_key = args['private_key']
self.__certificate_only = args['certificate_only']
self.__certificate_and_key = args['certificate_and_key']
self.__reverselookup_password = args['reverselookup_password']
self.__reverselookup_username = args['reverselookup_username']
self.__reverselookup_baseuri = args['reverselookup_baseuri']
self.__credentials_filename = args['credentials_filename']
self.__db_host = args['db_host']
self.__db_user = args['db_user']
self.__db_password = args['db_password']
self.__db_name = args['db_name']
self.__passphrase = args['passphrase']
# All the other args collected as "additional config":
self.__additional_config = self.__collect_additional_arguments(args, useful_args)
# Some checks:
self.__check_handle_syntax()
self.__check_file_existence()
if self.__check_client_existence():
if self.__client == 'db':
self.__check_if_enough_args_for_hs_auth_db(args)
elif self.__client == 'rest':
self.__check_if_enough_args_for_revlookup_auth(args)
self.__check_if_enough_args_for_hs_auth()
elif self.__client == 'batch':
if self.__private_key:
self.__check_if_enough_args_for_hs_auth_batch_pubkey(args)
else:
self.__check_if_enough_args_for_hs_auth_batch_seckey(args)
else:
msg = 'Client not provided or empty'
raise CredentialsFormatError(msg=msg)
def __check_client_existence(self):
if not self.__client:
return False
return True
def __check_if_enough_args_for_hs_auth_batch_seckey(self, args):
batch_args_seckey = ['username', 'password']
empty_args = []
for k in batch_args_seckey:
if not args[k]:
empty_args.append(k)
if empty_args:
msg = '(%s) are missing or empty' % empty_args
raise CredentialsFormatError(msg=msg)
def __check_if_enough_args_for_hs_auth_batch_pubkey(self, args):
batch_args_pubkey = ['username', 'private_key']
empty_args = []
for k in batch_args_pubkey:
if not args[k]:
empty_args.append(k)
if not self.__passphrase:
if self.__passphrase is not None:
empty_args.append('passphrase')
if empty_args:
msg = '(%s) are missing or empty' % empty_args
raise CredentialsFormatError(msg=msg)
def __check_if_enough_args_for_hs_auth_db(self, args):
db_args = ['db_host', 'db_user', 'db_password', 'db_name']
empty_args = []
for k in db_args:
if not args[k]:
empty_args.append(k)
if empty_args:
msg = '(%s) are missing or empty' % empty_args
raise CredentialsFormatError(msg=msg)
def __collect_additional_arguments(self, args, used_args):
temp_additional_config = {}
for argname in args.keys():
if argname not in used_args:
temp_additional_config[argname] = args[argname]
if len(temp_additional_config) > 0:
return temp_additional_config
else:
return None
def __check_if_enough_args_for_revlookup_auth(self, args):
user = args['reverselookup_username'] or args['username']
pw = args['reverselookup_password'] or args['password']
url = args['reverselookup_baseuri'] or args['handle_server_url']
if user and pw and url:
self.__reverselookup = True
self.__reverselookup_username = user
self.__reverselookup_password = pw
self.__reverselookup_baseuri = url
LOGGER.debug('Sufficient information given for reverselookup.')
else:
self.__reverselookup = False
def __check_handle_syntax(self):
if self.__handleowner:
pyhandle.utilhandle.check_handle_syntax_with_index(self.__handleowner)
if self.__username:
pyhandle.utilhandle.check_handle_syntax_with_index(self.__username)
def __check_file_existence(self):
if self.__certificate_only:
try:
self.__certificate_only = self.__get_path_and_check_file_existence(self.__certificate_only)
except ValueError as e:
msg = '(certficate file): '+e.__str__()
raise CredentialsFormatError(msg=msg)
if self.__certificate_and_key:
try:
self.__certificate_and_key = self.__get_path_and_check_file_existence(self.__certificate_and_key)
except ValueError as e:
msg = '(certficate and key file): '+e.__str__()
raise CredentialsFormatError(msg=msg)
if self.__private_key:
try:
self.__private_key = self.__get_path_and_check_file_existence(self.__private_key)
except ValueError as e:
msg = '(private key file): '+e.__str__()
raise CredentialsFormatError(msg=msg)
def __get_path_and_check_file_existence(self, path):
try:
path = util.get_absolute_path(path, self.__credentials_filename)
except ValueError: # not a valid path
thisdir = util.get_this_directory(self.__credentials_filename)
msg = ('Please provide an absolute path or a path relative to '
'the location of the credentials file\'s location (%s), '
'starting with %s.' % (thisdir, os.path.curdir))
raise ValueError(msg)
if not os.path.isfile(path): # file does not exist
msg = 'The file was not found at the specified path: '+path
raise ValueError(msg)
return path
def __check_if_enough_args_for_hs_auth(self):
# Which authentication method?
authentication_method = None
# DB authentication
if self.__db_host and self.__db_user and self.__db_password and self.__db_name:
authentication_method = 'db_auth'
# Username and Password
if self.__username and self.__password:
authentication_method = 'user_password'
# Certificate file and Key file
if self.__certificate_only and self.__private_key:
authentication_method = 'auth_cert_2files'
# Certificate and Key in one file
if self.__certificate_and_key:
authentication_method = 'auth_cert_1file'
# None was provided:
if authentication_method is None:
if self.__reverselookup is True:
msg = ('Insufficient credentials for writing to handle '
'server, but sufficient credentials for searching.')
LOGGER.info(msg)
else:
msg = ''
if self.__username and not self.__password:
msg += 'Username was provided, but no password. '
elif self.__password and not self.__username:
msg += 'Password was provided, but no username. '
if self.__certificate_only and not self.__private_key:
msg += 'A client certificate was provided, but no private key. '
elif self.__private_key and not self.__certificate_only:
msg += 'A private key was provided, but no client certificate. '
if self.__reverselookup is None:
msg += 'Reverse lookup credentials not checked yet.'
elif self.__reverselookup is False:
msg += 'Insufficient credentials for searching.'
raise CredentialsFormatError(msg=msg)
def get_all_args(self):
# pylint: disable=missing-docstring
return self.__all_args
def get_client(self):
# pylint: disable=missing-docstring
return self.__client
def get_username(self):
# pylint: disable=missing-docstring
return self.__username
def get_password(self):
# pylint: disable=missing-docstring
return self.__password
def get_server_URL(self):
# pylint: disable=missing-docstring
return self.__handle_server_url
def get_prefix(self):
# pylint: disable=missing-docstring
return self.__prefix
def get_handleowner(self):
# pylint: disable=missing-docstring
return self.__handleowner
def get_config(self):
# pylint: disable=missing-docstring
return self.__additional_config
def get_path_to_private_key(self):
# pylint: disable=missing-docstring
return self.__private_key
def get_path_to_file_certificate(self):
# pylint: disable=missing-docstring
return self.__certificate_only or self.__certificate_and_key
def get_path_to_file_certificate_only(self):
# pylint: disable=missing-docstring
return self.__certificate_only
def get_path_to_file_certificate_and_key(self):
# pylint: disable=missing-docstring
return self.__certificate_and_key
def get_reverselookup_username(self):
# pylint: disable=missing-docstring
return self.__reverselookup_username
def get_reverselookup_password(self):
# pylint: disable=missing-docstring
return self.__reverselookup_password
def get_reverselookup_baseuri(self):
# pylint: disable=missing-docstring
return self.__reverselookup_baseuri
def get_db_host(self):
# pylint: disable=missing-docstring
return self.__db_host
def get_db_user(self):
# pylint: disable=missing-docstring
return self.__db_user
def get_db_password(self):
# pylint: disable=missing-docstring
return self.__db_password
def get_db_name(self):
# pylint: disable=missing-docstring
return self.__db_name
def get_key_passphrase(self):
# pylint: disable=missing-docstring
return self.__passphrase
|
|
#!/usr/bin/env python
# cardinal_pythonlib/typing_helpers.py
"""
===============================================================================
Original code copyright (C) 2009-2021 Rudolf Cardinal ([email protected]).
This file is part of cardinal_pythonlib.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================================================================
**Methods and unusual types for type hints.**
"""
from abc import abstractmethod
import csv
from typing import (
Any,
Iterator,
List,
Optional,
Sequence,
Tuple,
Type,
TYPE_CHECKING,
TypeVar,
Union,
)
# =============================================================================
# Constants
# =============================================================================
T = TypeVar('T')
# =============================================================================
# with_typehint
# =============================================================================
def with_typehint(baseclass: Type[T]) -> Type[T]:
"""
Useful function to make mixins with type hints from a base class.
From
https://stackoverflow.com/questions/51930339/how-do-i-correctly-add-type-hints-to-mixin-classes.
Specimen usage:
.. code-block:: python
class MyMixin1(with_typehint(SomeBaseClass))):
# ...
""" # noqa
if TYPE_CHECKING:
return baseclass
return object
def with_typehints(*baseclasses: Type[T]) -> Type[T]:
"""
Useful function to make mixins with type hints from multiple base classes.
From
https://stackoverflow.com/questions/51930339/how-do-i-correctly-add-type-hints-to-mixin-classes.
Specimen usage:
.. code-block:: python
class MyMixin2(*with_typehints(SomeBaseClass, AnotherBaseClass))):
# ...
""" # noqa
if TYPE_CHECKING:
return baseclasses
return object
# =============================================================================
# csv.writer
# =============================================================================
class CSVWriterType(object):
"""
Type hint for the result of ``csv.writer()``
See https://stackoverflow.com/questions/51264355/how-to-type-annotate-object-returned-by-csv-writer
""" # noqa
@abstractmethod
def writerow(self, row: List[str]) -> None:
pass
@abstractmethod
def writerows(self, rows: List[List[str]]) -> None:
pass
@property
@abstractmethod
def dialect(self) -> csv.Dialect:
pass
# =============================================================================
# Pep249DatabaseConnectionType
# =============================================================================
class Pep249DatabaseConnectionType(object):
"""
Type hint for a database connection compliant with PEP 249. See
https://www.python.org/dev/peps/pep-0249/.
Not supported:
- https://www.python.org/dev/peps/pep-0249/#optional-error-handling-extensions
- https://www.python.org/dev/peps/pep-0249/#optional-two-phase-commit-extensions
""" # noqa
@abstractmethod
def close(self) -> None:
"""
See https://www.python.org/dev/peps/pep-0249/#connection-objects
"""
pass
@abstractmethod
def commit(self) -> None:
"""
See https://www.python.org/dev/peps/pep-0249/#connection-objects
"""
pass
@abstractmethod
def rollback(self) -> None:
"""
See https://www.python.org/dev/peps/pep-0249/#connection-objects
"""
pass
@abstractmethod
def cursor(self) -> "Pep249DatabaseCursorType":
"""
See https://www.python.org/dev/peps/pep-0249/#connection-objects
"""
pass
@property
@abstractmethod
def messages(self) -> List[Tuple[Type, Any]]:
"""
See
https://www.python.org/dev/peps/pep-0249/#optional-db-api-extensions
"""
pass
# =============================================================================
# Pep249DatabaseConnectionType
# =============================================================================
_DATABASE_ROW_TYPE = Sequence[Any]
class Pep249DatabaseCursorType(object):
"""
Type hint for a database cursor compliant with PEP 249. See
https://www.python.org/dev/peps/pep-0249/#cursor-objects
Example, as per https://docs.python.org/3.6/library/sqlite3.html:
.. code-block:: python
import sqlite3
conn = sqlite3.connect(':memory:')
c = conn.cursor()
c.execute('''
CREATE TABLE stocks
(date text, trans text, symbol text, qty real, price real)
''')
c.execute('''
INSERT INTO stocks VALUES ('2006-01-05','BUY','RHAT',100,35.14)
''')
conn.commit()
c.execute("SELECT * FROM stocks")
print(repr(c.description))
help(c)
See also:
- https://www.psycopg.org/docs/cursor.html
"""
@abstractmethod
def __init__(self, *args, **kwargs) -> None:
pass
@abstractmethod
def __iter__(self) -> Iterator[_DATABASE_ROW_TYPE]:
"""
See
https://www.python.org/dev/peps/pep-0249/#optional-db-api-extensions
"""
pass
@abstractmethod
def __new__(cls, *args, **kwargs) -> "Pep249DatabaseCursorType":
pass
@abstractmethod
def __next__(self) -> None:
pass
@property
@abstractmethod
def description(self) \
-> Optional[Sequence[Sequence[Any]]]:
"""
A sequence of column_description objects, where each column_description
describes one result column and has the following items:
- name: ``str``
- type_code: ``Optional[Type]``? Not sure.
- display_size: ``Optional[int]``
- internal_size: ``Optional[int]``
- precision: ``Optional[int]``
- scale: ``Optional[int]``
- null_ok: ``Optional[bool]``
The attribute is ``None`` for operations that don't return rows, and
for un-executed cursors.
"""
pass
@property
@abstractmethod
def rowcount(self) -> int:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
pass
@abstractmethod
def callproc(self, procname: str, *args, **kwargs) -> None:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
pass
@abstractmethod
def close(self) -> None:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
pass
@abstractmethod
def execute(self, operation: str, *args, **kwargs) -> None:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
pass
@abstractmethod
def executemany(self, operation: str, *args, **kwargs) -> None:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
pass
@abstractmethod
def fetchone(self) -> Optional[_DATABASE_ROW_TYPE]:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
pass
@abstractmethod
def fetchmany(self, size: int = None) -> Sequence[_DATABASE_ROW_TYPE]:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
pass
@abstractmethod
def fetchall(self) -> Sequence[_DATABASE_ROW_TYPE]:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
pass
@abstractmethod
def nextset(self) -> Optional[bool]:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
pass
@property
@abstractmethod
def arraysize(self) -> int:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
# read/write attribute; see below
pass
# noinspection PyUnresolvedReferences
@arraysize.setter
@abstractmethod
def arraysize(self, val: int) -> None:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
# https://stackoverflow.com/questions/35344209/python-abstract-property-setter-with-concrete-getter
pass
@abstractmethod
def setinputsizes(self, sizes: Sequence[Union[Type, int]]) -> None:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
pass
@abstractmethod
def setoutputsize(self, size: int, column: Optional[int]) -> None:
"""
See https://www.python.org/dev/peps/pep-0249/#cursor-objects
"""
pass
@property
@abstractmethod
def connection(self) -> Pep249DatabaseConnectionType:
"""
See
https://www.python.org/dev/peps/pep-0249/#optional-db-api-extensions
"""
pass
@property
@abstractmethod
def lastrowid(self) -> Optional[int]:
"""
See
https://www.python.org/dev/peps/pep-0249/#optional-db-api-extensions
"""
pass
@property
@abstractmethod
def rownumber(self) -> Optional[int]:
"""
See
https://www.python.org/dev/peps/pep-0249/#optional-db-api-extensions
"""
pass
@abstractmethod
def scroll(self, value: int, mode: str = 'relative') -> None:
"""
See
https://www.python.org/dev/peps/pep-0249/#optional-db-api-extensions
"""
pass
@property
@abstractmethod
def messages(self) -> List[Tuple[Type, Any]]:
"""
See
https://www.python.org/dev/peps/pep-0249/#optional-db-api-extensions
"""
pass
@abstractmethod
def next(self) -> _DATABASE_ROW_TYPE:
"""
See
https://www.python.org/dev/peps/pep-0249/#optional-db-api-extensions
"""
pass
|
|
import weakref
import six
import itertools
from contextlib import contextmanager
from functools import partial
_MAPPING = {"&" : "&", "'" : "'", '"' : """, "<" : "<", ">" : ">"}
def xml_escape(text):
if text is None:
return ""
return "".join(_MAPPING.get(ch, ch) for ch in str(text))
class Htmlable(object):
__slots__ = []
def render_html(self):
raise NotImplementedError()
class HtmlElement(Htmlable):
__slots__ = ["doc", "tag", "attrs", "elements"]
MULTILINE = True
def __init__(self, doc, tag, elems, attrs):
self.doc = doc
self.tag = tag
self.attrs = attrs
self.elements = elems
def __enter__(self):
self.doc._push(self)
return self
def __exit__(self, t, v, tb):
self.doc._pop()
def _format_attrs(self):
attrs = []
for k, v in self.attrs.items():
if k.startswith("_") or v is None or v is False:
continue
k = k.rstrip("_").replace("_", "-")
if v is True:
attrs.append(xml_escape(k))
else:
attrs.append('%s="%s"' % (xml_escape(k), xml_escape(v)))
if attrs:
attrs = " " + " ".join(attrs)
else:
attrs = ""
return attrs
def render_html(self):
attrs = self._format_attrs()
if self.elements:
yield 0, self.MULTILINE, "<%s%s>" % (xml_escape(self.tag), attrs)
for elem in self.elements:
if elem is None:
continue
if isinstance(elem, Htmlable):
for level, nl, line in elem.render_html():
yield level + 1, nl, line
else:
yield 1, False, xml_escape(elem)
yield 0, self.MULTILINE, "</%s>" % (xml_escape(self.tag),)
else:
yield 0, self.MULTILINE, "<%s%s/>" % (xml_escape(self.tag), attrs)
class InlineHtmlElement(HtmlElement):
__slots__ = []
MULTILINE = False
class Raw(Htmlable):
__slots__ = ["text"]
def __init__(self, text):
self.text = str(text)
def render_html(self):
return [(-1, False, self.text)]
nbsp = Raw(" ")
copy = Raw("©")
def escaped(val):
if isinstance(val, six.string_types):
val = ord(val[0])
return Raw("&#%04x;" % (val,))
class Comment(Htmlable):
__slots__ = ["lines"]
def __init__(self, lines):
self.lines = lines
def render_html(self):
if not self.lines:
return
if len(self.lines) == 1:
yield 0, True, "<!-- %s -->" % (xml_escape(self.lines[0]).replace("-->", "-- >"))
else:
yield 0, False, "<!-- "
for line in self.lines:
yield 0, True, xml_escape(line).replace("-->", "-- >")
yield 0, False, " -->"
class Selector(object):
__slots__ = ["parent", "names", "properties"]
def __init__(self, parent, names):
self.parent = parent
self.names = names
self.properties = {}
def __setitem__(self, name, value):
self.properties[name] = str(value)
def render_html(self):
nesting = [self.names]
node = self.parent
while node:
nesting.append(node.names)
node = node.parent
for parts in itertools.product(*reversed(nesting)):
parts = [(" " if p.strip()[0] not in "+.:>#[]()," else "") + p.strip() for p in parts]
yield 0, True, "%s {" % ("".join(parts).strip(),)
for key, val in self.properties.items():
yield 1, True, "%s: %s;" % (key.rstrip("_").replace("_", "-"), val)
yield 0, True, "}"
class CSS(Htmlable):
__slots__ = ["_curr", "_selectors"]
def __init__(self):
self._curr = None
self._selectors = []
@contextmanager
def __call__(self, *selectors):
sel = Selector(self._curr if self._curr else "", selectors)
self._selectors.append(sel)
prev = self._curr
self._curr = sel
try:
yield sel
finally:
self._curr = prev
def __setitem__(self, name, value):
self._curr[name] = value
def __bool__(self):
return bool(self._selectors)
__nonzero__ = __bool__
def render_html(self):
for sel in self._selectors:
for level, nl, line in sel.render_html():
yield level, nl, line
class HtmlDocument(object):
DOCTYPE = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'
__slots__ = ["__weakref__", "_root", "_stack", "_head_css", "_head", "_body"]
def __init__(self, xmlns = "http://www.w3.org/1999/xhtml"):
self._root = HtmlElement(weakref.proxy(self), "html", [], attrs = {"xmlns" : xmlns})
self._stack = [self._root]
self._head = None
self._body = None
self._head_css = None
def __str__(self):
return self.render()
def render(self, tabulator = "\t"):
lines = []
prev_nl = False
for level, nl, line in self._root.render_html():
if not prev_nl and not nl:
level = 0
lines.append("%s%s%s" % ("\n" if nl or prev_nl else "", tabulator * level, line))
prev_nl = nl
return self.DOCTYPE + "".join(lines)
def _push(self, elem):
self._stack.append(elem)
def _pop(self):
self._stack.pop(-1)
def text(self, *texts):
self._stack[-1].elements.extend(texts)
def attrs(self, **attrs):
self._stack[-1].attrs.update(attrs)
def raw(self, text):
self._stack[-1].elements.append(Raw(text))
def comment(self, *lines):
self._stack[-1].elements.append(Comment(lines))
def subelem(self, tag, *elems, **attrs):
elem = HtmlElement(weakref.proxy(self), tag, list(elems), attrs)
self._stack[-1].elements.append(elem)
return elem
def inline_subelem(self, tag, *elems, **attrs):
elem = InlineHtmlElement(weakref.proxy(self), tag, list(elems), attrs)
self._stack[-1].elements.append(elem)
return elem
def head_css(self):
if self._head_css is None:
self._head_css = CSS()
with self.head():
with self.style():
self._stack[-1].elements.append(self._head_css)
return self._head_css
def __getattr__(self, name):
if name.startswith("_"):
raise AttributeError(name)
return partial(self.subelem, name)
#===================================================================================================================
# Elements
#===================================================================================================================
def head(self):
if self._head is None:
self._head = self.subelem("head")
return self._head
def body(self, *texts, **attrs):
if self._body is None:
self._body = self.subelem("body", *texts, **attrs)
return self._body
def meta(self, **attrs):
return self.subelem("meta", **attrs)
def title(self, *texts):
return self.inline_subelem("title", *texts)
def base(self, **attrs):
return self.subelem("base", **attrs)
def link(self, **attrs):
return self.subelem("link", **attrs)
def style(self, type_ = "text/css", **attrs):
return self.subelem("style", type_ = type_, **attrs)
def script(self, type_ = "text/javascript", **attrs):
return self.subelem("script", None, type_ = type_, **attrs)
def link_css(self, href):
return self.link(href = href, type = "text/css", rel = "stylesheet")
def script_src(self, src, type_ = "text/javascript"):
return self.inline_subelem("script", None, type_ = type_, src = src)
def div(self, *texts, **attrs):
return self.subelem("div", *texts, **attrs)
def blockquote(self, *texts, **attrs):
return self.subelem("blockquote", *texts, **attrs)
def dl(self, *texts, **attrs):
return self.subelem("dl", *texts, **attrs)
def dt(self, *texts, **attrs):
return self.subelem("dt", *texts, **attrs)
def dd(self, *texts, **attrs):
return self.subelem("dd", *texts, **attrs)
def li(self, *texts, **attrs):
return self.subelem("li", *texts, **attrs)
def ul(self, *texts, **attrs):
return self.subelem("ul", *texts, **attrs)
def ol(self, *texts, **attrs):
return self.subelem("ol", *texts, **attrs)
def form(self, *texts, **attrs):
return self.subelem("form", *texts, **attrs)
def input(self, *texts, **attrs):
return self.subelem("input", *texts, **attrs)
def button(self, *texts, **attrs):
return self.subelem("button", *texts, **attrs)
def select(self, *texts, **attrs):
return self.subelem("select", *texts, **attrs)
def label(self, *texts, **attrs):
return self.subelem("label", *texts, **attrs)
def optgroup(self, *texts, **attrs):
return self.subelem("optgroup", *texts, **attrs)
def option(self, *texts, **attrs):
return self.subelem("option", *texts, **attrs)
def textarea(self, *texts, **attrs):
return self.subelem("textarea", *texts, **attrs)
def legend(self, *texts, **attrs):
return self.subelem("legend", *texts, **attrs)
def table(self, *texts, **attrs):
return self.subelem("table", *texts, **attrs)
def tr(self, *texts, **attrs):
return self.subelem("tr", *texts, **attrs)
def th(self, *texts, **attrs):
return self.subelem("th", *texts, **attrs)
def td(self, *texts, **attrs):
return self.subelem("td", *texts, **attrs)
def colgroup(self, *texts, **attrs):
return self.subelem("colgroup", *texts, **attrs)
def thead(self, *texts, **attrs):
return self.subelem("thead", *texts, **attrs)
def tbody(self, *texts, **attrs):
return self.subelem("tbody", *texts, **attrs)
def tfoot(self, *texts, **attrs):
return self.subelem("tfoot", *texts, **attrs)
def frame(self, *texts, **attrs):
return self.subelem("frame", *texts, **attrs)
def iframe(self, *texts, **attrs):
return self.subelem("iframe", *texts, **attrs)
def noframe(self, *texts, **attrs):
return self.subelem("noframe", *texts, **attrs)
def frameset(self, *texts, **attrs):
return self.subelem("frameset", *texts, **attrs)
def p(self, *texts, **attrs):
return self.subelem("p", *texts, **attrs)
def img(self, *texts, **attrs):
return self.inline_subelem("img", *texts, **attrs)
def pre(self, *texts, **attrs):
return self.inline_subelem("pre", *texts, **attrs)
def code(self, *texts, **attrs):
return self.inline_subelem("code", *texts, **attrs)
def span(self, *texts, **attrs):
return self.inline_subelem("span", *texts, **attrs)
def a(self, *texts, **attrs):
return self.inline_subelem("a", *texts, **attrs)
def b(self, *texts, **attrs):
return self.inline_subelem("b", *texts, **attrs)
def br(self, *texts, **attrs):
return self.inline_subelem("br", *texts, **attrs)
def hr(self, *texts, **attrs):
return self.inline_subelem("hr", *texts, **attrs)
def em(self, *texts, **attrs):
return self.inline_subelem("em", *texts, **attrs)
def strong(self, *texts, **attrs):
return self.inline_subelem("strong", *texts, **attrs)
def cite(self, *texts, **attrs):
return self.inline_subelem("cite", *texts, **attrs)
def i(self, *texts, **attrs):
return self.inline_subelem("i", *texts, **attrs)
def u(self, *texts, **attrs):
return self.inline_subelem("u", *texts, **attrs)
def sub(self, *texts, **attrs):
return self.inline_subelem("sub", *texts, **attrs)
def sup(self, *texts, **attrs):
return self.inline_subelem("sup", *texts, **attrs)
def big(self, *texts, **attrs):
return self.inline_subelem("big", *texts, **attrs)
def small(self, *texts, **attrs):
return self.inline_subelem("small", *texts, **attrs)
def h1(self, *texts, **attrs):
return self.inline_subelem("h1", *texts, **attrs)
def h2(self, *texts, **attrs):
return self.inline_subelem("h2", *texts, **attrs)
def h3(self, *texts, **attrs):
return self.inline_subelem("h3", *texts, **attrs)
def h4(self, *texts, **attrs):
return self.inline_subelem("h4", *texts, **attrs)
def h5(self, *texts, **attrs):
return self.inline_subelem("h5", *texts, **attrs)
def h6(self, *texts, **attrs):
return self.inline_subelem("h6", *texts, **attrs)
|
|
__author__ = 'minhtule'
from exception import ValidateException
from utility import *
import types
import re
import urllib
import random
import sys
REGEX_OBJECT_TYPE = type(re.compile(""))
class Parameter(object):
"""Parameter of http request specified by Google Analytics
Details at https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters
"""
VALUE_TYPE_TEXT = "text"
VALUE_TYPE_CURRENCY = "currency"
VALUE_TYPE_BOOLEAN = "boolean"
VALUE_TYPE_INTEGER = "integer"
def __init__(self, key, value, value_type, is_required=False, max_length=None,
valid_key_pattern=None, valid_value_pattern=None, valid_values=None):
self.__key = key
self.__value = value
self.__value_type = value_type
self.__is_required = is_required
self.__max_length = max_length
self.__valid_key_pattern = valid_key_pattern
self.__valid_value_pattern = valid_value_pattern
self.__valid_values = valid_values
self.validate_key()
self.validate_value()
# boolean values are internally stored as 0 or 1
if self.__value_type == self.VALUE_TYPE_BOOLEAN:
self.__value = int(self.__value)
@property
def key(self):
return self.__key
@property
def value(self):
return self.__value
@property
def value_type(self):
return self.__value_type
@property
def is_required(self):
return self.__is_required
@property
def max_length(self):
return self.__max_length
@property
def valid_key_pattern(self):
return self.__valid_key_pattern
@property
def valid_value_pattern(self):
return self.__valid_value_pattern
@property
def valid_values(self):
return self.__valid_values
def __str__(self):
return "%s=%s" % (self.__key, self.__value)
# Public methods
def url_format(self):
url_formatted_key = urllib.quote(self.key, "")
url_formatted_value = urllib.quote(str(self.value), "")
return "%s=%s" % (url_formatted_key, url_formatted_value)
def validate_key(self):
if self.valid_key_pattern and not self.valid_key_pattern.match(self.key):
raise ValidateException("%s's key has invalid format" % self.__class__.__name__)
def validate_value(self):
validate_func_name = "validate_%s" % self.value_type
validate_func = self.__getattribute__(validate_func_name)
if not validate_func():
raise ValidateException("%s's value must be of type %s" % (self.__class__.__name__, self.value_type))
if isinstance(self.valid_value_pattern, REGEX_OBJECT_TYPE) and not self.valid_value_pattern.match(self.value):
raise ValidateException("%s's value has invalid format" % self.__class__.__name__)
if self.valid_values and self.value not in self.valid_values:
raise ValidateException("%s has invalid value" % self.__class__.__name__)
def validate_text(self):
return isinstance(self.value, types.StringTypes) and not is_empty_string(self.value)
def validate_integer(self):
return isinstance(self.value, int)
def validate_currency(self):
return isinstance(self.value, float)
def validate_boolean(self):
return isinstance(self.value, bool)
###############################################################################
# GENERAL
###############################################################################
class ProtocolVersion(Parameter):
PROTOCOL_VERSION_PARAM_KEY = "v"
def __init__(self, value="1"):
super(ProtocolVersion, self).__init__(self.PROTOCOL_VERSION_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
is_required=True)
class TrackingID(Parameter):
TRACKING_ID_PARAM_KEY = "tid"
TRACKING_ID_VALID_VALUE_PATTERN = re.compile(r"^UA-[0-9]*-[0-9]*$")
def __init__(self, value):
super(TrackingID, self).__init__(self.TRACKING_ID_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
is_required=True,
valid_value_pattern=self.TRACKING_ID_VALID_VALUE_PATTERN)
class AnonymizeIP(Parameter):
ANONYMIZE_IP_PARAM_KEY = "aip"
def __init__(self, value=True):
super(AnonymizeIP, self).__init__(self.ANONYMIZE_IP_PARAM_KEY,
value,
self.VALUE_TYPE_BOOLEAN)
class QueueTime(Parameter):
QUEUE_TIME_PARAM_KEY = "qt"
def __init__(self, value):
super(QueueTime, self).__init__(self.QUEUE_TIME_PARAM_KEY,
value,
self.VALUE_TYPE_INTEGER)
class CacheBuster(Parameter):
CACHE_BUSTER_PARAM_KEY = "z"
def __init__(self):
value = str(random.randrange(sys.maxint))
super(CacheBuster, self).__init__(self.CACHE_BUSTER_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT)
###############################################################################
# VISITOR
###############################################################################
class ClientID(Parameter):
CLIENT_ID_PARAM_KEY = "cid"
def __init__(self, value):
super(ClientID, self).__init__(self.CLIENT_ID_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
is_required=True)
###############################################################################
# SESSION
###############################################################################
class SessionControl(Parameter):
SESSION_CONTROL_PARAM_KEY = "sc"
SESSION_CONTROL_VALID_VALUES = ["start", "end"]
def __init__(self, value):
super(SessionControl, self).__init__(self.SESSION_CONTROL_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
valid_values=self.SESSION_CONTROL_VALID_VALUES)
###############################################################################
# TRAFFIC SOURCES
###############################################################################
class DocumentReferrer(Parameter):
DOCUMENT_REFERRER_PARAM_KEY = "dr"
DOCUMENT_REFERRER_MAX_LENGTH = 2048
def __init__(self, value):
super(DocumentReferrer, self).__init__(self.DOCUMENT_REFERRER_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.DOCUMENT_REFERRER_MAX_LENGTH)
class CampaignName(Parameter):
CAMPAIGN_NAME_PARAM_KEY = "cn"
CAMPAIGN_NAME_MAX_LENGTH = 100
def __init__(self, value):
super(CampaignName, self).__init__(self.CAMPAIGN_NAME_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.CAMPAIGN_NAME_MAX_LENGTH)
class CampaignSource(Parameter):
CAMPAIGN_SOURCE_PARAM_KEY = "cs"
CAMPAIGN_SOURCE_MAX_LENGTH = 100
def __init__(self, value):
super(CampaignSource, self).__init__(self.CAMPAIGN_SOURCE_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.CAMPAIGN_SOURCE_MAX_LENGTH)
class CampaignMedium(Parameter):
CAMPAIGN_MEDIUM_PARAM_KEY = "cm"
CAMPAIGN_MEDIUM_MAX_LENGTH = 50
def __init__(self, value):
super(CampaignMedium, self).__init__(self.CAMPAIGN_MEDIUM_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.CAMPAIGN_MEDIUM_MAX_LENGTH)
class CampaignKeyword(Parameter):
CAMPAIGN_KEYWORD_PARAM_KEY = "ck"
CAMPAIGN_KEYWORD_MAX_LENGTH = 500
def __init__(self, value):
super(CampaignKeyword, self).__init__(self.CAMPAIGN_KEYWORD_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.CAMPAIGN_KEYWORD_MAX_LENGTH)
class CampaignContent(Parameter):
CAMPAIGN_CONTENT_PARAM_KEY = "cc"
CAMPAIGN_CONTENT_MAX_LENGTH = 500
def __init__(self, value):
super(CampaignContent, self).__init__(self.CAMPAIGN_CONTENT_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.CAMPAIGN_CONTENT_MAX_LENGTH)
class CampaignID(Parameter):
CAMPAIGN_ID_PARAM_KEY = "ci"
CAMPAIGN_ID_MAX_LENGTH = 100
def __init__(self, value):
super(CampaignID, self).__init__(self.CAMPAIGN_ID_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.CAMPAIGN_ID_MAX_LENGTH)
class GoogleAdWordsID(Parameter):
GOOGLE_ADWORDS_ID_PARAM_KEY = "gclid"
def __init__(self, value):
super(GoogleAdWordsID, self).__init__(self.GOOGLE_ADWORDS_ID_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT)
class GoogleDisplayAdsID(Parameter):
GOOGLE_DISPLAY_ADS_ID_PARAM_KEY = "dclid"
def __init__(self, value):
super(GoogleDisplayAdsID, self).__init__(self.GOOGLE_DISPLAY_ADS_ID_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT)
###############################################################################
# SYSTEM INFO
###############################################################################
class ScreenResolution(Parameter):
SCREEN_RESOLUTION_PARAM_KEY = "sr"
SCREEN_RESOLUTION_MAX_LENGTH = 20
def __init__(self, value):
super(ScreenResolution, self).__init__(self.SCREEN_RESOLUTION_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.SCREEN_RESOLUTION_MAX_LENGTH)
class ViewportSize(Parameter):
VIEWPORT_SIZE_PARAM_KEY = "vp"
VIEWPORT_SIZE_MAX_LENGTH = 20
def __init__(self, value):
super(ViewportSize, self).__init__(self.VIEWPORT_SIZE_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.VIEWPORT_SIZE_MAX_LENGTH)
class DocumentEncoding(Parameter):
DOCUMENT_ENCODING_PARAM_KEY = "de"
DOCUMENT_ENCODING_DEFAULT_VALUE = "UTF-8"
DOCUMENT_ENCODING_MAX_LENGTH = 20
def __init__(self, value):
super(DocumentEncoding, self).__init__(self.DOCUMENT_ENCODING_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.DOCUMENT_ENCODING_MAX_LENGTH)
class ScreenColors(Parameter):
SCREEN_COLORS_PARAM_KEY = "sd"
SCREEN_COLORS_MAX_LENGTH = 20
def __init__(self, value):
super(ScreenColors, self).__init__(self.SCREEN_COLORS_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.SCREEN_COLORS_MAX_LENGTH)
class UserLanguage(Parameter):
USER_LANGUAGE_PARAM_KEY = "ul"
USER_LANGUAGE_MAX_LENGTH = 20
def __init__(self, value):
super(UserLanguage, self).__init__(self.USER_LANGUAGE_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.USER_LANGUAGE_MAX_LENGTH)
class JavaEnabled(Parameter):
JAVA_ENABLED_PARAM_KEY = "je"
def __init__(self, value):
super(JavaEnabled, self).__init__(self.JAVA_ENABLED_PARAM_KEY,
value,
self.VALUE_TYPE_BOOLEAN)
class FlashVersion(Parameter):
FLASH_VERSION_PARAM_KEY = "fl"
def __init__(self, value):
super(FlashVersion, self).__init__(self.FLASH_VERSION_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=20)
###############################################################################
# HIT
###############################################################################
class HitType(Parameter):
HIT_TYPE_PARAM_KEY = "t"
HIT_TYPE_VALID_VALUES = ["pageview", "appview", "event", "transaction", "item", "social", "exception", "timing"]
def __init__(self, value):
super(HitType, self).__init__(self.HIT_TYPE_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
is_required=True,
valid_values=self.HIT_TYPE_VALID_VALUES)
class NonInteractionHit(Parameter):
NON_INTERACTION_HIT_PARAM_KEY = "ni"
def __init__(self, value):
super(NonInteractionHit, self).__init__(self.NON_INTERACTION_HIT_PARAM_KEY,
value,
self.VALUE_TYPE_BOOLEAN)
###############################################################################
# CONTENT INFORMATION
###############################################################################
class DocumentLocationURL(Parameter):
DOCUMENT_LOCATION_URL_PARAM_KEY = "dl"
DOCUMENT_LOCATION_URL_MAX_LENGTH = 2048
def __init__(self, value):
super(DocumentLocationURL, self).__init__(self.DOCUMENT_LOCATION_URL_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.DOCUMENT_LOCATION_URL_MAX_LENGTH)
class DocumentHostName(Parameter):
DOCUMENT_HOST_NAME_PARAM_KEY = "dh"
DOCUMENT_HOST_NAME_MAX_LENGTH = 100
def __init__(self, value):
super(DocumentHostName, self).__init__(self.DOCUMENT_HOST_NAME_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.DOCUMENT_HOST_NAME_MAX_LENGTH)
class DocumentPath(Parameter):
DOCUMENT_PATH_PARAM_KEY = "dp"
DOCUMENT_PATH_MAX_LENGTH = 2048
DOCUMENT_PATH_VALID_PATTERN = re.compile(r"^/.*")
def __init__(self, value):
super(DocumentPath, self).__init__(self.DOCUMENT_PATH_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.DOCUMENT_PATH_MAX_LENGTH,
valid_value_pattern=self.DOCUMENT_PATH_VALID_PATTERN)
class DocumentTitle(Parameter):
DOCUMENT_TITLE_PARAM_KEY = "dt"
DOCUMENT_TITLE_MAX_LENGTH = 1500
def __init__(self, value):
super(DocumentTitle, self).__init__(self.DOCUMENT_TITLE_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.DOCUMENT_TITLE_MAX_LENGTH)
class ContentDescription(Parameter):
CONTENT_DESCRIPTION_PARAM_KEY = "cd"
CONTENT_DESCRIPTION_MAX_LENGTH = 2048
def __init__(self, value):
super(ContentDescription, self).__init__(self.CONTENT_DESCRIPTION_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.CONTENT_DESCRIPTION_MAX_LENGTH)
###############################################################################
# APP TRACKING
###############################################################################
class ApplicationName(Parameter):
APPLICATION_NAME_PARAM_KEY = "an"
APPLICATION_NAME_MAX_LENGTH = 100
def __init__(self, value):
super(ApplicationName, self).__init__(self.APPLICATION_NAME_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.APPLICATION_NAME_MAX_LENGTH)
class ApplicationVersion(Parameter):
APPLICATION_VERSION_PARAM_KEY = "av"
APPLICATION_VERSION_MAX_LENGTH = 100
def __init__(self, value):
super(ApplicationVersion, self).__init__(self.APPLICATION_VERSION_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.APPLICATION_VERSION_MAX_LENGTH)
###############################################################################
# EVENT TRACKING
###############################################################################
class EventCategory(Parameter):
EVENT_CATEGORY_PARAM_KEY = "ec"
EVENT_CATEGORY_MAX_LENGTH = 150
def __init__(self, value):
super(EventCategory, self).__init__(self.EVENT_CATEGORY_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.EVENT_CATEGORY_MAX_LENGTH)
class EventAction(Parameter):
EVENT_ACTION_PARAM_KEY = "ea"
EVENT_ACTION_MAX_LENGTH = 500
def __init__(self, value):
super(EventAction, self).__init__(self.EVENT_ACTION_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.EVENT_ACTION_MAX_LENGTH)
class EventLabel(Parameter):
EVENT_LABEL_PARAM_KEY = "el"
EVENT_LABEL_MAX_LENGTH = 500
def __init__(self, value):
super(EventLabel, self).__init__(self.EVENT_LABEL_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.EVENT_LABEL_MAX_LENGTH)
class EventValue(Parameter):
EVENT_VALUE_PARAM_KEY = "ev"
def __init__(self, value):
super(EventValue, self).__init__(self.EVENT_VALUE_PARAM_KEY,
value,
self.VALUE_TYPE_INTEGER)
###############################################################################
# E-COMMERCE
###############################################################################
class TransactionID(Parameter):
TRANSACTION_ID_PARAM_KEY = "ti"
TRANSACTION_ID_MAX_LENGTH = 500
def __init__(self, value):
super(TransactionID, self).__init__(self.TRANSACTION_ID_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.TRANSACTION_ID_MAX_LENGTH)
class TransactionAffiliation(Parameter):
TRANSACTION_AFFILIATION_PARAM_KEY = "ta"
TRANSACTION_AFFILIATION_MAX_LENGTH = 500
def __init__(self, value):
super(TransactionAffiliation, self).__init__(self.TRANSACTION_AFFILIATION_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.TRANSACTION_AFFILIATION_MAX_LENGTH)
class TransactionRevenue(Parameter):
TRANSACTION_REVENUE_PARAM_KEY = "tr"
def __init__(self, value):
super(TransactionRevenue, self).__init__(self.TRANSACTION_REVENUE_PARAM_KEY,
value,
self.VALUE_TYPE_CURRENCY)
class TransactionShipping(Parameter):
TRANSACTION_SHIPPING_PARAM_KEY = "ts"
def __init__(self, value):
super(TransactionShipping, self).__init__(self.TRANSACTION_SHIPPING_PARAM_KEY,
value,
self.VALUE_TYPE_CURRENCY)
class TransactionTax(Parameter):
TRANSACTION_TAX_PARAM_KEY = "tt"
def __init__(self, value):
super(TransactionTax, self).__init__(self.TRANSACTION_TAX_PARAM_KEY,
value,
self.VALUE_TYPE_CURRENCY)
class ItemName(Parameter):
ITEM_NAME_PARAM_KEY = "in"
ITEM_NAME_MAX_LENGTH = 500
def __init__(self, value):
super(ItemName, self).__init__(self.ITEM_NAME_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.ITEM_NAME_MAX_LENGTH)
class ItemPrice(Parameter):
ITEM_PRICE_PARAM_KEY = "ip"
def __init__(self, value):
super(ItemPrice, self).__init__(self.ITEM_PRICE_PARAM_KEY,
value,
self.VALUE_TYPE_CURRENCY)
class ItemQuantity(Parameter):
ITEM_QUANTITY_PARAM_KEY = "iq"
def __init__(self, value):
super(ItemQuantity, self).__init__(self.ITEM_QUANTITY_PARAM_KEY,
value,
self.VALUE_TYPE_INTEGER)
class ItemCode(Parameter):
ITEM_CODE_PARAM_KEY = "ic"
ITEM_CODE_MAX_LENGTH = 500
def __init__(self, value):
super(ItemCode, self).__init__(self.ITEM_CODE_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.ITEM_CODE_MAX_LENGTH)
class ItemCategory(Parameter):
ITEM_CATEGORY_PARAM_KEY = "iv"
ITEM_CATEGORY_MAX_LENGTH = 500
def __init__(self, value):
super(ItemCategory, self).__init__(self.ITEM_CATEGORY_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.ITEM_CATEGORY_MAX_LENGTH)
class CurrencyCode(Parameter):
CURRENCY_CODE_PARAM_KEY = "cu"
CURRENCY_CODE_MAX_LENGTH = 10
# TODO check that the value conforms to ISO 4217 currency code
def __init__(self, value):
super(CurrencyCode, self).__init__(self.CURRENCY_CODE_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.CURRENCY_CODE_MAX_LENGTH)
###############################################################################
# SOCIAL INTERACTIONS
###############################################################################
class SocialNetwork(Parameter):
SOCIAL_NETWORK_PARAM_KEY = "sn"
SOCIAL_NETWORK_MAX_LENGTH = 50
def __init__(self, value):
super(SocialNetwork, self).__init__(self.SOCIAL_NETWORK_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.SOCIAL_NETWORK_MAX_LENGTH)
class SocialAction(Parameter):
SOCIAL_ACTION_PARAM_KEY = "sa"
SOCIAL_ACTION_MAX_LENGTH = 50
def __init__(self, value):
super(SocialAction, self).__init__(self.SOCIAL_ACTION_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.SOCIAL_ACTION_MAX_LENGTH)
class SocialActionTarget(Parameter):
SOCIAL_ACTION_TARGET_PARAM_KEY = "st"
SOCIAL_ACTION_TARGET_MAX_LENGTH = 2048
def __init__(self, value):
super(SocialActionTarget, self).__init__(self.SOCIAL_ACTION_TARGET_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.SOCIAL_ACTION_TARGET_MAX_LENGTH)
###############################################################################
# TIMING
###############################################################################
class UserTimingCategory(Parameter):
USER_TIMING_CATEGORY_PARAM_KEY = "utc"
USER_TIMING_CATEGORY_MAX_LENGTH = 150
def __init__(self, value):
super(UserTimingCategory, self).__init__(self.USER_TIMING_CATEGORY_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.USER_TIMING_CATEGORY_MAX_LENGTH)
class UserTimingVariableName(Parameter):
USER_TIMING_VARIABLE_NAME_PARAM_KEY = "utv"
USER_TIMING_VARIABLE_NAME_MAX_LENGTH = 500
def __init__(self, value):
super(UserTimingVariableName, self).__init__(self.USER_TIMING_VARIABLE_NAME_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.USER_TIMING_VARIABLE_NAME_MAX_LENGTH)
class UserTimingTime(Parameter):
USER_TIMING_TIME_PARAM_KEY = "utt"
def __init__(self, value):
super(UserTimingTime, self).__init__(self.USER_TIMING_TIME_PARAM_KEY,
value,
self.VALUE_TYPE_INTEGER)
class UserTimingLabel(Parameter):
USER_TIMING_LABEL_PARAM_KEY = "utl"
USER_TIMING_LABEL_MAX_LENGTH = 500
def __init__(self, value):
super(UserTimingLabel, self).__init__(self.USER_TIMING_LABEL_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.USER_TIMING_LABEL_MAX_LENGTH)
class UserPageLoadTime(Parameter):
USER_PAGE_LOAD_TIME_PARAM_KEY = "plt"
def __init__(self, value):
super(UserPageLoadTime, self).__init__(self.USER_PAGE_LOAD_TIME_PARAM_KEY,
value,
self.VALUE_TYPE_INTEGER)
class UserDNSTime(Parameter):
USER_DNS_TIME_PARAM_KEY = "dns"
def __init__(self, value):
super(UserDNSTime, self).__init__(self.USER_DNS_TIME_PARAM_KEY,
value,
self.VALUE_TYPE_INTEGER)
class PageDownloadTime(Parameter):
PAGE_DOWNLOAD_TIME_PARAM_KEY = "pdt"
def __init__(self, value):
super(PageDownloadTime, self).__init__(self.PAGE_DOWNLOAD_TIME_PARAM_KEY,
value,
self.VALUE_TYPE_INTEGER)
class RedirectResponseTime(Parameter):
REDIRECT_RESPONSE_TIME_PARAM_KEY = "rrt"
def __init__(self, value):
super(RedirectResponseTime, self).__init__(self.REDIRECT_RESPONSE_TIME_PARAM_KEY,
value,
self.VALUE_TYPE_INTEGER)
class TCPConnectTime(Parameter):
TCP_CONNECT_TIME_PARAM_KEY = "tcp"
def __init__(self, value):
super(TCPConnectTime, self).__init__(self.TCP_CONNECT_TIME_PARAM_KEY,
value,
self.VALUE_TYPE_INTEGER)
class ServerResponseTime(Parameter):
SERVER_RESPONSE_TIME_PARAM_KEY = "srt"
def __init__(self, value):
super(ServerResponseTime, self).__init__(self.SERVER_RESPONSE_TIME_PARAM_KEY,
value,
self.VALUE_TYPE_INTEGER)
###############################################################################
# EXCEPTIONS
###############################################################################
class ExceptionDescription(Parameter):
EXCEPTION_DESCRIPTION_PARAM_KEY = "exd"
EXCEPTION_DESCRIPTION_MAX_LENGTH = 150
def __init__(self, value):
super(ExceptionDescription, self).__init__(self.EXCEPTION_DESCRIPTION_PARAM_KEY,
value,
self.VALUE_TYPE_TEXT,
max_length=self.EXCEPTION_DESCRIPTION_MAX_LENGTH)
class IsExceptionFatal(Parameter):
IS_EXCEPTION_FATAL_PARAM_KEY = "exf"
def __init__(self, value=True):
super(IsExceptionFatal, self).__init__(self.IS_EXCEPTION_FATAL_PARAM_KEY,
value,
self.VALUE_TYPE_BOOLEAN)
###############################################################################
# CUSTOM METRIC
###############################################################################
class CustomDimension(Parameter):
CUSTOM_DIMENSION_VALID_KEY_PATTERN = re.compile(r"^cd[1-9][0-9]*$")
CUSTOM_DIMENSION_MAX_LENGTH = 150
def __init__(self, key, value):
super(CustomDimension, self).__init__(key,
value,
self.VALUE_TYPE_TEXT,
max_length=self.CUSTOM_DIMENSION_MAX_LENGTH,
valid_key_pattern=self.CUSTOM_DIMENSION_VALID_KEY_PATTERN)
@staticmethod
def key_for_index(index):
return "cd%d" % index
class CustomMetric(Parameter):
CUSTOM_METRIC_VALID_KEY_PATTERN = re.compile(r"^cm[1-9][0-9]*$")
def __init__(self, key, value):
super(CustomMetric, self).__init__(key,
value,
self.VALUE_TYPE_INTEGER,
valid_key_pattern=self.CUSTOM_METRIC_VALID_KEY_PATTERN)
@staticmethod
def key_for_index(index):
return "cm%d" % index
|
|
# Copyright 2012 by Eric Talevich. All rights reserved.
# This code is part of the Biopython distribution and governed by its license.
# Please see the LICENSE file that should have been included as part of this
# package.
"""Command-line wrapper for the tree inference program RAxML.
Derived from the help page for RAxML version 7.3 by Alexandros Stamatakis, but
should work for any version 7.X (and probably earlier for most options).
"""
__docformat__ = "restructuredtext en"
from Bio.Application import _Option, _Switch, AbstractCommandline
class RaxmlCommandline(AbstractCommandline):
"""Command-line wrapper for the tree inference program RAxML.
The required parameters are 'sequences' (-s), 'model' (-m) and 'name' (-n).
The parameter 'parsimony_seed' (-p) must also be set for RAxML, but if you
do not specify it, this wrapper will set the seed to 10000 for you.
Example:
>>> from Bio.Phylo.Applications import RaxmlCommandline
>>> raxml_cline = RaxmlCommandline(sequences="Tests/Phylip/interlaced2.phy",
... model="PROTCATWAG", name="interlaced2")
>>> print raxml_cline
raxmlHPC -m PROTCATWAG -n interlaced2 -p 10000 -s Tests/Phylip/interlaced2.phy
You would typically run the command line with raxml_cline() or via
the Python subprocess module, as described in the Biopython tutorial.
Citation:
Stamatakis A.
RAxML-VI-HPC: Maximum Likelihood-based Phylogenetic Analyses with
Thousands of Taxa and Mixed Models.
Bioinformatics 2006, 22(21):2688-2690.
Homepage: http://sco.h-its.org/exelixis/software.html
"""
def __init__(self, cmd='raxmlHPC', **kwargs):
self.parameters = [
_Option(['-a', 'weight_filename'],
"Name of a column weight file to assign individual weights "
"to each column of the alignment. Those weights must be "
"integers separated by any type and number of whitespaces "
"within a separate file.",
filename=True,
equate=False,
),
_Option(['-b', 'bootstrap_seed'],
"Random seed for bootstrapping.",
equate=False,
),
_Option(['-c', 'num_categories'],
"Number of distinct rate categories for RAxML when "
"evolution model is set to GTRCAT or GTRMIX."
"Individual per-site rates are categorized into this "
"many rate categories to accelerate computations. "
"Default: 25.",
equate=False,
),
_Switch(['-d', 'random_starting_tree'],
"Start ML optimization from random starting tree."
),
_Option(['-e', 'epsilon'],
"Set model optimization precision in log likelihood units "
"for final optimization of tree topology under MIX/MIXI "
"or GAMMA/GAMMAI."
"Default: 0.1 for models not using proportion of "
"invariant sites estimate; 0.001 for models using "
"proportion of invariant sites estimate.",
equate=False,
),
_Option(['-E', 'exclude_filename'],
"An exclude file name, containing a specification of "
"alignment positions you wish to exclude. Format is "
"similar to Nexus, the file shall contain entries like "
"'100-200 300-400'; to exclude a single column write, "
"e.g., '100-100'. If you use a mixed model, an "
"appropriately adapted model file will be written.",
filename=True,
equate=False,
),
_Option(['-f', 'algorithm'],
"""Select algorithm:
a: Rapid Bootstrap analysis and search for best-scoring ML
tree in one program run.
b: Draw bipartition information on a tree provided with '-t'
based on multiple trees (e.g. form a bootstrap) in a file
specifed by '-z'.
c: Check if the alignment can be properly read by RAxML.
d: New rapid hill-climbing (DEFAULT).
e: Optimize model+branch lengths for given input tree under
GAMMA/GAMMAI only.
g: Compute per site log Likelihoods for one ore more trees
passed via '-z' and write them to a file that can be read
by CONSEL.
h: Compute log likelihood test (SH-test) between best tree
passed via '-t' and a bunch of other trees passed via '-z'.
i: Perform a really thorough bootstrap, refinement of final
bootstrap tree under GAMMA and a more exhaustive algorithm.
j: Generate a bunch of bootstrapped alignment files from an
original alignemnt file.
m: Compare bipartitions between two bunches of trees passed
via '-t' and '-z' respectively. This will return the
Pearson correlation between all bipartitions found in the
two tree files. A file called
RAxML_bipartitionFrequencies.outputFileName will be
printed that contains the pair-wise bipartition
frequencies of the two sets.
n: Compute the log likelihood score of all trees contained
in a tree file provided by '-z' under GAMMA or
GAMMA+P-Invar.
o: Old and slower rapid hill-climbing.
p: Perform pure stepwise MP addition of new sequences to an
incomplete starting tree.
s: Split up a multi-gene partitioned alignment into the
respective subalignments.
t: Do randomized tree searches on one fixed starting tree.
w: Compute ELW test on a bunch of trees passed via '-z'.
x: Compute pair-wise ML distances, ML model parameters will
be estimated on an MP starting tree or a user-defined
tree passed via '-t', only allowed for GAMMA-based models
of rate heterogeneity.""",
checker_function=(lambda x:
isinstance(x, basestring) and len(x) == 1),
equate=False,
),
_Option(['-g', 'grouping_constraint'],
"File name of a multifurcating constraint tree. "
"this tree does not need to be comprehensive, i.e. "
"contain all taxa.",
filename=True,
equate=False,
),
_Option(['-i', 'rearrangements'],
"Initial rearrangement setting for the subsequent "
"application of topological changes phase.",
equate=False,
),
_Switch(['-j', 'checkpoints'],
"Write checkpoints (intermediate tree topologies)."
),
_Switch(['-k', 'bootstrap_branch_lengths'],
"Print bootstrapped trees with branch lengths. "
"The bootstraps will run a bit longer, because model "
"parameters will be optimized at the end of each run. "
"Use with CATMIX/PROTMIX or GAMMA/GAMMAI."
),
_Option(['-l', 'cluster_threshold'],
"Threshold for sequence similarity clustering. "
"RAxML will then print out an alignment to a file "
"called sequenceFileName.reducedBy.threshold that "
"only contains sequences <= the specified threshold "
"that must be between 0.0 and 1.0. RAxML uses the "
"QT-clustering algorithm to perform this task. "
"In addition, a file called "
"RAxML_reducedList.outputFileName will be written "
"that contains clustering information.",
equate=False,
),
_Option(['-L', 'cluster_threshold_fast'],
"Same functionality as '-l', but uses a less "
"exhaustive and thus faster clustering algorithm. "
"This is intended for very large datasets with more "
"than 20,000-30,000 sequences.",
equate=False,
),
_Option(['-m', 'model'],
"""Model of Nucleotide or Amino Acid Substitution:
NUCLEOTIDES:
GTRCAT : GTR + Optimization of substitution rates + Optimization of site-specific
evolutionary rates which are categorized into numberOfCategories distinct
rate categories for greater computational efficiency
if you do a multiple analysis with '-#' or '-N' but without bootstrapping the program
will use GTRMIX instead
GTRGAMMA : GTR + Optimization of substitution rates + GAMMA model of rate
heterogeneity (alpha parameter will be estimated)
GTRMIX : Inference of the tree under GTRCAT
and thereafter evaluation of the final tree topology under GTRGAMMA
GTRCAT_GAMMA : Inference of the tree with site-specific evolutionary rates.
However, here rates are categorized using the 4 discrete GAMMA rates.
Evaluation of the final tree topology under GTRGAMMA
GTRGAMMAI : Same as GTRGAMMA, but with estimate of proportion of invariable sites
GTRMIXI : Same as GTRMIX, but with estimate of proportion of invariable sites
GTRCAT_GAMMAI : Same as GTRCAT_GAMMA, but with estimate of proportion of invariable sites
AMINO ACIDS:
PROTCATmatrixName[F] : specified AA matrix + Optimization of substitution rates + Optimization of site-specific
evolutionary rates which are categorized into numberOfCategories distinct
rate categories for greater computational efficiency
if you do a multiple analysis with '-#' or '-N' but without bootstrapping the program
will use PROTMIX... instead
PROTGAMMAmatrixName[F] : specified AA matrix + Optimization of substitution rates + GAMMA model of rate
heterogeneity (alpha parameter will be estimated)
PROTMIXmatrixName[F] : Inference of the tree under specified AA matrix + CAT
and thereafter evaluation of the final tree topology under specified AA matrix + GAMMA
PROTCAT_GAMMAmatrixName[F] : Inference of the tree under specified AA matrix and site-specific evolutionary rates.
However, here rates are categorized using the 4 discrete GAMMA rates.
Evaluation of the final tree topology under specified AA matrix + GAMMA
PROTGAMMAImatrixName[F] : Same as PROTGAMMAmatrixName[F], but with estimate of proportion of invariable sites
PROTMIXImatrixName[F] : Same as PROTMIXmatrixName[F], but with estimate of proportion of invariable sites
PROTCAT_GAMMAImatrixName[F] : Same as PROTCAT_GAMMAmatrixName[F], but with estimate of proportion of invariable sites
Available AA substitution models: DAYHOFF, DCMUT, JTT, MTREV, WAG, RTREV, CPREV, VT, BLOSUM62, MTMAM, GTR
With the optional 'F' appendix you can specify if you want to use empirical base frequencies
Please not that for mixed models you can in addition specify the per-gene AA model in
the mixed model file (see manual for details)
""",
equate=False,
),
_Switch(['-M', 'partition_branch_lengths'],
"Switch on estimation of individual per-partition "
"branch lengths. Only has effect when used in "
"combination with 'partition_filename' ('-q'). "
"Branch lengths for individual partitions will be "
"printed to separate files. A weighted average of the "
"branch lengths is computed by using the respective "
"partition lengths. "
),
_Option(['-n', 'name'],
"Name used in the output files.",
filename=True,
equate=False,
),
_Option(['-o', 'outgroup'],
"Name of a single outgroup or a comma-separated list "
"of outgroups, eg '-o Rat' or '-o Rat,Mouse'. In case "
"that multiple outgroups are not monophyletic the "
"first name in the list will be selected as outgroup. "
"Don't leave spaces between taxon names!",
checker_function=lambda x: len(x.split()) == 1,
equate=False,
),
_Option(['-q', 'partition_filename'],
"File name containing the assignment of models to "
"alignment partitions for multiple models of "
"substitution. For the syntax of this file please "
"consult the RAxML manual.",
filename=True,
equate=False,
),
_Option(['-p', 'parsimony_seed'],
"Random number seed for the parsimony inferences. "
"This allows you to reproduce your results and will "
"help developers debug the program. This option HAS "
"NO EFFECT in the parallel MPI version.",
equate=False,
),
_Option(['-P', 'protein_model'],
"File name of a user-defined AA (Protein) substitution "
"model. This file must contain 420 entries, the first "
"400 being the AA substitution rates (this must be a "
"symmetric matrix) and the last 20 are the empirical "
"base frequencies.",
filename=True,
equate=False,
),
_Option(['-r', 'binary_constraint'],
"File name of a binary constraint tree. "
"This tree does not need to be comprehensive, i.e. "
"contain all taxa.",
filename=True,
equate=False,
),
_Option(['-s', 'sequences'],
"Name of the alignment data file, in PHYLIP format.",
filename=True,
equate=False,
),
_Option(['-t', 'starting_tree'],
"File name of a user starting tree, in Newick format.",
filename=True,
equate=False,
),
_Option(['-T', 'threads'],
"Number of threads to run. "
"PTHREADS VERSION ONLY! "
"Make sure to set this at most the number of CPUs "
"you have on your machine, otherwise, there will be "
"a huge performance decrease!",
equate=False,
),
_Option(['-u', 'num_bootstrap_searches'],
"Number of multiple bootstrap searches per replicate. "
"Use this to obtain better ML trees for each "
"replicate. Default: 1 ML search per bootstrap "
"replicate.",
equate=False,
),
_Switch(['-v', 'version'],
"Display version information."
),
_Option(['-w', 'working_dir'],
"Name of the working directory where RAxML will "
"write its output files. Default: current directory.",
filename=True,
),
_Option(['-x', 'rapid_bootstrap_seed'],
"Random seed for rapid bootstrapping.",
equate=False,
),
_Switch(['-y', 'parsimony'],
"Only compute a parsimony starting tree, then exit."
),
_Option(['-z', 'bipartition_filename'],
"Name of a file containing multiple trees, e.g. from "
"a bootstrap run, that shall be used to draw "
"bipartition values onto a tree provided with '-t'. "
"It can also be used to compute per-site log "
"likelihoods in combination with '-f g', and to read "
"a bunch of trees for a couple of other options "
"('-f h', '-f m', '-f n').",
filename=True,
equate=False,
),
_Option(['-N', '-#', 'num_replicates'],
"Number of alternative runs on distinct starting trees. "
"In combination with the '-b' option, this will invoke a "
"multiple boostrap analysis. "
"DEFAULT: 1 single analysis."
"Note that '-N' has been added as an alternative since "
"'-#' sometimes caused problems with certain MPI job "
"submission systems, since '-#' is often used to start "
"comments. "
,
equate=False,
),
]
AbstractCommandline.__init__(self, cmd, **kwargs)
# ENH: enforce -s, -n and -m
if not self.parsimony_seed:
self.parsimony_seed = 10000
|
|
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
#
# Copyright (c) 2015, Battelle Memorial Institute
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the FreeBSD Project.
#
# This material was prepared as an account of work sponsored by an
# agency of the United States Government. Neither the United States
# Government nor the United States Department of Energy, nor Battelle,
# nor any of their employees, nor any jurisdiction or organization
# that has cooperated in the development of these materials, makes
# any warranty, express or implied, or assumes any legal liability
# or responsibility for the accuracy, completeness, or usefulness or
# any information, apparatus, product, software, or process disclosed,
# or represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or
# service by trade name, trademark, manufacturer, or otherwise does
# not necessarily constitute or imply its endorsement, recommendation,
# r favoring by the United States Government or any agency thereof,
# or Battelle Memorial Institute. The views and opinions of authors
# expressed herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY
# operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
#}}}
from datetime import datetime, timedelta
import time
from time import strftime
import os
import sys
import httplib, urllib
import json
import requests
import xml.etree.ElementTree as ET
from requests import ConnectionError
from volttron.platform.agent import BaseAgent, PublishMixin, periodic
from volttron.platform.agent import utils, matching
from volttron.platform.messaging import headers as headers_mod, topics
import settings
from settings import DEBUG as DEBUG
requests.adapters.DEFAULT_RETRIES = 5
date_format = "%s000.0"
readable_format = "%m/%d/%Y %H:%M:%S"
def SMDSAgent(config_path, **kwargs):
config = utils.load_config(config_path)
def get_config(name):
try:
value = kwargs.pop(name)
except KeyError:
return config[name]
agent_id = get_config('agentid')
time_window_minutes = get_config('time_window_minutes')
rtu_path = {
'campus': get_config('campus'),
'building': get_config('building'),
'unit': get_config('unit'),
}
class Agent(PublishMixin, BaseAgent):
'''This agent grabs a day's worth of data for a Catalyst's Data points
out of the historian. It then sends the data on to an application
in the cloud.
'''
def __init__(self, **kwargs):
super(Agent, self).__init__(**kwargs)
self._raw_air_temp = None
self._raw_fan_speed = None
self._raw_unit_power = None
def setup(self):
self._agent_id = get_config('agentid')
self._service_url = get_config('service_url')
self._provider_id = get_config('provider_id')
self._unit_power_chan = get_config('unit_power_chan')
self._outdoor_temp_chan = get_config('outdoor_temp_chan')
self._fan_supply_chan = get_config('fan_supply_chan')
self._campusid = get_config('campus')
self._buildingid = get_config('building')
self._deviceid = get_config('unit')
# self._time_window_minutes = int(self.config['time_window_minutes'])
self._backlog_hours = get_config('backlog_hours')
# Always call the base class setup()
super(Agent, self).setup()
self.setup_topics()
self._catching_up = True
self._last_update = datetime.now() - timedelta(hours = self._backlog_hours)
self._query_end_time = None
self.publish_requests()
def setup_topics(self):
self.request_temptopic = topics.ARCHIVER_REQUEST(point='OutsideAirTemperature', **rtu_path)
self.request_powertopic = topics.ARCHIVER_REQUEST(point='UnitPower', **rtu_path)
self.request_fantopic = topics.ARCHIVER_REQUEST(point='SupplyFanSpeed', **rtu_path)
@matching.match_headers({headers_mod.TO: agent_id})
@matching.match_exact(topics.ARCHIVER_RESPONSE(point='OutsideAirTemperature', **rtu_path))
def on_temp_response(self, topic, headers, message, match):
'''Method for dealing with temp data from smap'''
if DEBUG:
print "Topic: {topic}, Headers: {headers}, Message: {message}".format(
topic=topic, headers=headers, message=message)
self._raw_air_temp = message[0]
self.go_if_ready()
@matching.match_exact(topics.ARCHIVER_RESPONSE(point='UnitPower', **rtu_path))
@matching.match_headers({headers_mod.TO: agent_id})
def on_unit_power(self, topic, headers, message, match):
'''Method for dealing with power data from smap'''
if DEBUG:
print "Topic: {topic}, Headers: {headers}, Message: {message}".format(
topic=topic, headers=headers, message=message)
self._raw_unit_power = message[0]
self.go_if_ready()
@matching.match_headers({headers_mod.TO: agent_id})
@matching.match_exact(topics.ARCHIVER_RESPONSE(point='SupplyFanSpeed', **rtu_path))
def on_fan_speed(self, topic, headers, message, match):
'''Method for dealing with fan data from smap'''
if DEBUG:
print "Topic: {topic}, Headers: {headers}, Message: {message}".format(
topic=topic, headers=headers, message=message)
self._raw_fan_speed = message[0]
self.go_if_ready()
def go_if_ready(self):
if (self._raw_air_temp != None and self._raw_fan_speed != None and self._raw_unit_power != None):
message = self.convert_raw()
worked = self.post_data(message)
if (worked):
self._raw_air_temp = None
self._raw_fan_speed = None
self._raw_unit_power = None
self._last_update = self._query_end_time
if self._catching_up:
# self.publish_requests
self.timer(1, self.publish_requests)
def make_dataset(self, message, channelid, units):
list = eval(message)
values = []
if DEBUG:
print len(list)
if len(list) >= 1:
start_time = list[0][0]
time_index = start_time
for data in list:
values.append({"Utc": "/Date({})/".format(str(int(data[0]))),
"Val": data[1]})
return {"ChannelId": channelid,
"Units": units,
"Values": values}
def convert_raw(self):
dataset = []
dataset.append(self.make_dataset(self._raw_air_temp,
self._provider_id+"/"+self._outdoor_temp_chan,
"DegreesF"))
dataset.append(self.make_dataset(self._raw_fan_speed,
self._provider_id+"/"+self._fan_supply_chan,
"%"))
dataset.append(self.make_dataset(self._raw_unit_power,
self._provider_id+"/"+self._unit_power_chan,
"kW"))
providerid = self._provider_id
reply = {"ProviderId" : providerid,
"Datalogs" : dataset}
# reply = json.dumps(reply).replace('/','\/')
if DEBUG:
print json.dumps(reply, sort_keys=True,
indent=4, separators=(',', ': '))
return reply
# Periodically get data and push to cloud service
@periodic(time_window_minutes * 60)
def publish_requests(self):
'''Publish lookup requests to the ArchiverAgent
'''
now = datetime.now()
if (now - self._last_update) > timedelta (minutes = time_window_minutes):
self._catching_up = True
self._query_end_time = self._last_update + timedelta (minutes = time_window_minutes)
else:
self._catching_up = False
self._query_end_time = now
# if DEBUG:
#Print a readable time range
start = self._last_update.strftime(readable_format)
end = (self._query_end_time).strftime(readable_format)
print '({start}, {end})'.format(start=start, end=end)
start = self._last_update.strftime(date_format)
end = (self._query_end_time).strftime(date_format)
if DEBUG:
print '({start}, {end})'.format(start=start, end=end)
headers = {headers_mod.FROM: agent_id, headers_mod.TO: 'ArchiverAgent'}
self.publish(self.request_temptopic,headers,'({start}, {end})'.format(start=start, end=end))
self.publish(self.request_powertopic,headers,'({start}, {end})'.format(start=start, end=end))
self.publish(self.request_fantopic,headers,'({start}, {end})'.format(start=start, end=end))
def post_data(self, params):
post_data=json.dumps(params)
headers_post = {'Content-Type': 'application/json',
'User-Agent' : 'RTUNetwork',
'Accept': 'application/json', 'Connection':'close'}
done = False
tries = 0
while (done != True and tries < 5):
try:
response = requests.post(self._service_url, data=post_data, headers=headers_post)
done = True
except ConnectionError as e:
print '{}: {}: {}'.format(str(tries), str(e), post_data)
tries += 1
worked = False
if (response.content != None and response.content != ""):
root = ET.fromstring(response.content)
is_error = root.find('{http://schemas.datacontract.org/2004/07/RestServiceWebRoles}IsError').text
transaction_id = root.find('{http://schemas.datacontract.org/2004/07/RestServiceWebRoles}TransactionId').text
worked = is_error.lower() == 'false' and int(transaction_id) > 0
else:
worked = False
return worked
Agent.__name__ = 'SMDSAgent'
return Agent(**kwargs)
def main(argv=sys.argv):
'''Main method called by the eggsecutable.'''
utils.default_main(SMDSAgent,
description='SDMS Agent',
argv=argv)
if __name__ == '__main__':
'''Entry point for scripts.'''
try:
sys.exit(main())
except KeyboardInterrupt:
pass
|
|
from __future__ import unicode_literals
import boto
import boto3
from boto.exception import SQSError
from boto.sqs.message import RawMessage, Message
import requests
import sure # noqa
import time
from moto import mock_sqs
from tests.helpers import requires_boto_gte
@mock_sqs
def test_create_queue():
conn = boto.connect_sqs('the_key', 'the_secret')
conn.create_queue("test-queue", visibility_timeout=60)
all_queues = conn.get_all_queues()
all_queues[0].name.should.equal("test-queue")
all_queues[0].get_timeout().should.equal(60)
@mock_sqs
def test_create_queues_in_multiple_region():
west1_conn = boto.sqs.connect_to_region("us-west-1")
west1_conn.create_queue("test-queue")
west2_conn = boto.sqs.connect_to_region("us-west-2")
west2_conn.create_queue("test-queue")
list(west1_conn.get_all_queues()).should.have.length_of(1)
list(west2_conn.get_all_queues()).should.have.length_of(1)
@mock_sqs
def test_get_queue():
conn = boto.connect_sqs('the_key', 'the_secret')
conn.create_queue("test-queue", visibility_timeout=60)
queue = conn.get_queue("test-queue")
queue.name.should.equal("test-queue")
queue.get_timeout().should.equal(60)
nonexisting_queue = conn.get_queue("nonexisting_queue")
nonexisting_queue.should.be.none
@mock_sqs
def test_get_queue_with_prefix():
conn = boto.connect_sqs('the_key', 'the_secret')
conn.create_queue("prefixa-queue")
conn.create_queue("prefixb-queue")
conn.create_queue("test-queue")
conn.get_all_queues().should.have.length_of(3)
queue = conn.get_all_queues("test-")
queue.should.have.length_of(1)
queue[0].name.should.equal("test-queue")
@mock_sqs
def test_delete_queue():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=60)
conn.get_all_queues().should.have.length_of(1)
queue.delete()
conn.get_all_queues().should.have.length_of(0)
queue.delete.when.called_with().should.throw(SQSError)
@mock_sqs
def test_set_queue_attribute():
conn = boto.connect_sqs('the_key', 'the_secret')
conn.create_queue("test-queue", visibility_timeout=60)
queue = conn.get_all_queues()[0]
queue.get_timeout().should.equal(60)
queue.set_attribute("VisibilityTimeout", 45)
queue = conn.get_all_queues()[0]
queue.get_timeout().should.equal(45)
@mock_sqs
def test_send_message():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=60)
queue.set_message_class(RawMessage)
body_one = 'this is a test message'
body_two = 'this is another test message'
queue.write(queue.new_message(body_one))
queue.write(queue.new_message(body_two))
messages = conn.receive_message(queue, number_messages=2)
messages[0].get_body().should.equal(body_one)
messages[1].get_body().should.equal(body_two)
@mock_sqs
def test_send_message_with_xml_characters():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=60)
queue.set_message_class(RawMessage)
body_one = '< & >'
queue.write(queue.new_message(body_one))
messages = conn.receive_message(queue, number_messages=1)
messages[0].get_body().should.equal(body_one)
@requires_boto_gte("2.28")
@mock_sqs
def test_send_message_with_attributes():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=60)
queue.set_message_class(RawMessage)
body = 'this is a test message'
message = queue.new_message(body)
message_attributes = {
'test.attribute_name': {'data_type': 'String', 'string_value': 'attribute value'},
'test.binary_attribute': {'data_type': 'Binary', 'binary_value': 'binary value'},
'test.number_attribute': {'data_type': 'Number', 'string_value': 'string value'}
}
message.message_attributes = message_attributes
queue.write(message)
messages = conn.receive_message(queue)
messages[0].get_body().should.equal(body)
for name, value in message_attributes.items():
dict(messages[0].message_attributes[name]).should.equal(value)
@mock_sqs
def test_send_message_with_delay():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=60)
queue.set_message_class(RawMessage)
body_one = 'this is a test message'
body_two = 'this is another test message'
queue.write(queue.new_message(body_one), delay_seconds=60)
queue.write(queue.new_message(body_two))
queue.count().should.equal(1)
messages = conn.receive_message(queue, number_messages=2)
assert len(messages) == 1
message = messages[0]
assert message.get_body().should.equal(body_two)
queue.count().should.equal(0)
@mock_sqs
def test_message_becomes_inflight_when_received():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=2)
queue.set_message_class(RawMessage)
body_one = 'this is a test message'
queue.write(queue.new_message(body_one))
queue.count().should.equal(1)
messages = conn.receive_message(queue, number_messages=1)
queue.count().should.equal(0)
assert len(messages) == 1
# Wait
time.sleep(3)
queue.count().should.equal(1)
@mock_sqs
def test_change_message_visibility():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=2)
queue.set_message_class(RawMessage)
body_one = 'this is another test message'
queue.write(queue.new_message(body_one))
queue.count().should.equal(1)
messages = conn.receive_message(queue, number_messages=1)
assert len(messages) == 1
queue.count().should.equal(0)
messages[0].change_visibility(2)
# Wait
time.sleep(1)
# Message is not visible
queue.count().should.equal(0)
time.sleep(2)
# Message now becomes visible
queue.count().should.equal(1)
messages = conn.receive_message(queue, number_messages=1)
messages[0].delete()
queue.count().should.equal(0)
@mock_sqs
def test_message_attributes():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=2)
queue.set_message_class(RawMessage)
body_one = 'this is another test message'
queue.write(queue.new_message(body_one))
queue.count().should.equal(1)
messages = conn.receive_message(queue, number_messages=1)
queue.count().should.equal(0)
assert len(messages) == 1
message_attributes = messages[0].attributes
assert message_attributes.get('ApproximateFirstReceiveTimestamp')
assert int(message_attributes.get('ApproximateReceiveCount')) == 1
assert message_attributes.get('SentTimestamp')
assert message_attributes.get('SenderId')
@mock_sqs
def test_read_message_from_queue():
conn = boto.connect_sqs()
queue = conn.create_queue('testqueue')
queue.set_message_class(RawMessage)
body = 'foo bar baz'
queue.write(queue.new_message(body))
message = queue.read(1)
message.get_body().should.equal(body)
@mock_sqs
def test_queue_length():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=60)
queue.set_message_class(RawMessage)
queue.write(queue.new_message('this is a test message'))
queue.write(queue.new_message('this is another test message'))
queue.count().should.equal(2)
@mock_sqs
def test_delete_message():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=60)
queue.set_message_class(RawMessage)
queue.write(queue.new_message('this is a test message'))
queue.write(queue.new_message('this is another test message'))
queue.count().should.equal(2)
messages = conn.receive_message(queue, number_messages=1)
assert len(messages) == 1
messages[0].delete()
queue.count().should.equal(1)
messages = conn.receive_message(queue, number_messages=1)
assert len(messages) == 1
messages[0].delete()
queue.count().should.equal(0)
@mock_sqs
def test_send_batch_operation():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=60)
# See https://github.com/boto/boto/issues/831
queue.set_message_class(RawMessage)
queue.write_batch([
("my_first_message", 'test message 1', 0),
("my_second_message", 'test message 2', 0),
("my_third_message", 'test message 3', 0),
])
messages = queue.get_messages(3)
messages[0].get_body().should.equal("test message 1")
# Test that pulling more messages doesn't break anything
messages = queue.get_messages(2)
@requires_boto_gte("2.28")
@mock_sqs
def test_send_batch_operation_with_message_attributes():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=60)
queue.set_message_class(RawMessage)
message_tuple = ("my_first_message", 'test message 1', 0, {'name1': {'data_type': 'String', 'string_value': 'foo'}})
queue.write_batch([message_tuple])
messages = queue.get_messages()
messages[0].get_body().should.equal("test message 1")
for name, value in message_tuple[3].items():
dict(messages[0].message_attributes[name]).should.equal(value)
@mock_sqs
def test_delete_batch_operation():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=60)
conn.send_message_batch(queue, [
("my_first_message", 'test message 1', 0),
("my_second_message", 'test message 2', 0),
("my_third_message", 'test message 3', 0),
])
messages = queue.get_messages(2)
queue.delete_message_batch(messages)
queue.count().should.equal(1)
@mock_sqs
def test_sqs_method_not_implemented():
requests.post.when.called_with("https://sqs.amazonaws.com/?Action=[foobar]").should.throw(NotImplementedError)
@mock_sqs
def test_queue_attributes():
conn = boto.connect_sqs('the_key', 'the_secret')
queue_name = 'test-queue'
visibility_timeout = 60
queue = conn.create_queue(queue_name, visibility_timeout=visibility_timeout)
attributes = queue.get_attributes()
attributes['QueueArn'].should.look_like(
'arn:aws:sqs:sqs.us-east-1:123456789012:%s' % queue_name)
attributes['VisibilityTimeout'].should.look_like(str(visibility_timeout))
attribute_names = queue.get_attributes().keys()
attribute_names.should.contain('ApproximateNumberOfMessagesNotVisible')
attribute_names.should.contain('MessageRetentionPeriod')
attribute_names.should.contain('ApproximateNumberOfMessagesDelayed')
attribute_names.should.contain('MaximumMessageSize')
attribute_names.should.contain('CreatedTimestamp')
attribute_names.should.contain('ApproximateNumberOfMessages')
attribute_names.should.contain('ReceiveMessageWaitTimeSeconds')
attribute_names.should.contain('DelaySeconds')
attribute_names.should.contain('VisibilityTimeout')
attribute_names.should.contain('LastModifiedTimestamp')
attribute_names.should.contain('QueueArn')
@mock_sqs
def test_change_message_visibility_on_invalid_receipt():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=1)
queue.set_message_class(RawMessage)
queue.write(queue.new_message('this is another test message'))
queue.count().should.equal(1)
messages = conn.receive_message(queue, number_messages=1)
assert len(messages) == 1
original_message = messages[0]
queue.count().should.equal(0)
time.sleep(2)
queue.count().should.equal(1)
messages = conn.receive_message(queue, number_messages=1)
assert len(messages) == 1
original_message.change_visibility.when.called_with(100).should.throw(SQSError)
@mock_sqs
def test_change_message_visibility_on_visible_message():
conn = boto.connect_sqs('the_key', 'the_secret')
queue = conn.create_queue("test-queue", visibility_timeout=1)
queue.set_message_class(RawMessage)
queue.write(queue.new_message('this is another test message'))
queue.count().should.equal(1)
messages = conn.receive_message(queue, number_messages=1)
assert len(messages) == 1
original_message = messages[0]
queue.count().should.equal(0)
time.sleep(2)
queue.count().should.equal(1)
original_message.change_visibility.when.called_with(100).should.throw(SQSError)
@mock_sqs
def test_purge_action():
conn = boto.sqs.connect_to_region("us-east-1")
queue = conn.create_queue('new-queue')
queue.write(queue.new_message('this is another test message'))
queue.count().should.equal(1)
queue.purge()
queue.count().should.equal(0)
@mock_sqs
def test_delete_message_after_visibility_timeout():
VISIBILITY_TIMEOUT = 1
conn = boto.sqs.connect_to_region("us-east-1")
new_queue = conn.create_queue('new-queue', visibility_timeout=VISIBILITY_TIMEOUT)
m1 = Message()
m1.set_body('Message 1!')
new_queue.write(m1)
assert new_queue.count() == 1
m1_retrieved = new_queue.read()
time.sleep(VISIBILITY_TIMEOUT + 1)
m1_retrieved.delete()
assert new_queue.count() == 0
"""
boto3
"""
@mock_sqs
def test_boto3_message_send():
sqs = boto3.resource('sqs', region_name='us-east-1')
queue = sqs.create_queue(QueueName="blah")
queue.send_message(MessageBody="derp")
messages = queue.receive_messages()
messages.should.have.length_of(1)
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from selenium.webdriver.common import by
import selenium.webdriver.support.ui as Support
from openstack_dashboard.test.integration_tests.regions import baseregion
from openstack_dashboard.test.integration_tests.regions import exceptions
from openstack_dashboard.test.integration_tests.regions import menus
class FieldFactory(baseregion.BaseRegion):
"""Factory for creating form field objects."""
FORM_FIELDS_TYPES = set()
def make_form_field(self):
for form_type in self.FORM_FIELDS_TYPES:
if self._is_element_present(*form_type._element_locator):
return form_type(self.driver, self.conf, self.src_elem)
raise exceptions.UnknownFormFieldTypeException()
@classmethod
def register_field_cls(cls, field_class, base_classes=None):
"""Register new field class.
Add new field class and remove all base classes from the set of
registered classes as they should not be in.
"""
cls.FORM_FIELDS_TYPES.add(field_class)
cls.FORM_FIELDS_TYPES -= set(base_classes)
class MetaBaseFormFieldRegion(type):
"""Register form field class in FieldFactory."""
def __init__(cls, name, bases, dct):
FieldFactory.register_field_cls(cls, bases)
super(MetaBaseFormFieldRegion, cls).__init__(name, bases, dct)
@six.add_metaclass(MetaBaseFormFieldRegion)
class BaseFormFieldRegion(baseregion.BaseRegion):
"""Base class for form fields classes."""
_label_locator = None
_element_locator = None
@property
def label(self):
return self._get_element(*self._label_locator)
@property
def element(self):
return self._get_element(*self._element_locator)
@property
def name(self):
return self.element.get_attribute('name')
def is_required(self):
classes = self.driver.get_attribute('class')
return 'required' in classes
def is_displayed(self):
return self.element.is_displayed()
class CheckBoxFormFieldRegion(BaseFormFieldRegion):
"""Checkbox field."""
_element_locator = (by.By.CSS_SELECTOR,
'div > label > input[type=checkbox]')
def is_marked(self):
return self.element.is_selected()
def mark(self):
if not self.is_marked():
self.element.click()
def unmark(self):
if self.is_marked():
self.element.click()
class ProjectPageCheckBoxFormFieldRegion(BaseFormFieldRegion):
"""Checkbox field for Project-page."""
_element_locator = (by.By.CSS_SELECTOR,
'div > input[type=checkbox]')
def is_marked(self):
return self.element.is_selected()
def mark(self):
if not self.is_marked():
self.element.click()
def unmark(self):
if self.is_marked():
self.element.click()
class ChooseFileFormFieldRegion(BaseFormFieldRegion):
"""Choose file field."""
_element_locator = (by.By.CSS_SELECTOR, 'div > input[type=file]')
def choose(self, path):
self.element.send_keys(path)
class BaseTextFormFieldRegion(BaseFormFieldRegion):
_element_locator = None
@property
def text(self):
return self.element.text
@text.setter
def text(self, text):
self._fill_field_element(text, self.element)
class TextInputFormFieldRegion(BaseTextFormFieldRegion):
"""Text input box."""
_element_locator = (by.By.CSS_SELECTOR, 'div > input[type=text],'
'div > input[type=None]')
class FileInputFormFieldRegion(BaseFormFieldRegion):
"""Text input box."""
_element_locator = (by.By.CSS_SELECTOR, 'div > input[type=file]')
@property
def path(self):
return self.element.text
@path.setter
def path(self, path):
# clear does not work on this kind of element
# because it is not user editable
self.element.send_keys(path)
class PasswordInputFormFieldRegion(BaseTextFormFieldRegion):
"""Password text input box."""
_element_locator = (by.By.CSS_SELECTOR, 'div > input[type=password]')
class EmailInputFormFieldRegion(BaseTextFormFieldRegion):
"""Email text input box."""
_element_locator = (by.By.ID, 'id_email')
class TextAreaFormFieldRegion(BaseTextFormFieldRegion):
"""Multi-line text input box."""
_element_locator = (by.By.CSS_SELECTOR, 'div > textarea')
class IntegerFormFieldRegion(BaseFormFieldRegion):
"""Integer input box."""
_element_locator = (by.By.CSS_SELECTOR, 'div > input[type=number]')
@property
def value(self):
return self.element.get_attribute("value")
@value.setter
def value(self, value):
self._fill_field_element(value, self.element)
class SelectFormFieldRegion(BaseFormFieldRegion):
"""Select box field."""
_element_locator = (by.By.CSS_SELECTOR, 'div > select')
def is_displayed(self):
return self.element._el.is_displayed()
@property
def element(self):
select = self._get_element(*self._element_locator)
return Support.Select(select)
@property
def values(self):
results = []
for option in self.element.all_selected_options:
results.append(option.get_attribute('value'))
return results
@property
def name(self):
return self.element._el.get_attribute('name')
@property
def text(self):
return self.element.first_selected_option.text
@text.setter
def text(self, text):
self.element.select_by_visible_text(text)
@property
def value(self):
return self.element.first_selected_option.get_attribute('value')
@value.setter
def value(self, value):
self.element.select_by_value(value)
class BaseFormRegion(baseregion.BaseRegion):
"""Base class for forms."""
_submit_locator = (by.By.CSS_SELECTOR, '*.btn.btn-primary')
_cancel_locator = (by.By.CSS_SELECTOR, '*.btn.cancel')
_default_form_locator = (by.By.CSS_SELECTOR, 'div.modal-dialog')
def __init__(self, driver, conf, src_elem=None):
"""In most cases forms can be located through _default_form_locator,
so specifying source element can be skipped.
"""
if src_elem is None:
# fake self.src_elem must be set up in order self._get_element work
self.src_elem = driver
src_elem = self._get_element(*self._default_form_locator)
super(BaseFormRegion, self).__init__(driver, conf, src_elem)
@property
def submit(self):
return self._get_element(*self._submit_locator)
@property
def cancel(self):
return self._get_element(*self._cancel_locator)
class FormRegion(BaseFormRegion):
"""Standard form."""
_header_locator = (by.By.CSS_SELECTOR, 'div.modal-header > h3')
_side_info_locator = (by.By.CSS_SELECTOR, 'div.right')
_fields_locator = (by.By.CSS_SELECTOR, 'fieldset > div.form-group')
_input_locator = (by.By.CSS_SELECTOR, 'input,select,textarea')
# private methods
def __init__(self, driver, conf, src_elem, form_field_names):
super(FormRegion, self).__init__(driver, conf, src_elem)
self.form_field_names = form_field_names
self._init_form_fields()
# protected methods
def _init_form_fields(self):
self._init_dynamic_properties(self.form_field_names,
self._get_form_fields)
def _get_form_fields(self):
fields_els = self._get_elements(*self._fields_locator)
form_fields = {}
try:
self._turn_off_implicit_wait()
for elem in fields_els:
field_factory = FieldFactory(self.driver, self.conf, elem)
field = field_factory.make_form_field()
form_fields[field.name] = field
finally:
self._turn_on_implicit_wait()
return form_fields
def set_field_values(self, data):
"""Set fields values
data - {field_name: field_value, field_name: field_value ...}
"""
for field_name in data:
field = getattr(self, field_name, None)
# Field form does not exist
if field is None:
raise AttributeError("Unknown form field name.")
value = data[field_name]
# if None - default value is left in field
if value is not None:
# all text fields
if hasattr(field, "text"):
field.text = value
# file upload field
elif hasattr(field, "path"):
field.path = value
# integers fields
elif hasattr(field, "value"):
field.value = value
# properties
@property
def header(self):
"""Form header."""
return self._get_element(*self._header_locator)
@property
def sideinfo(self):
"""Right part of form, usually contains description."""
return self._get_element(*self._side_info_locator)
@property
def fields(self):
"""List of all fields that form contains."""
return self._get_form_fields()
class TabbedFormRegion(FormRegion):
"""Forms that are divided with tabs.
As example is taken form under the
the Project/Network/Networks/Create Network, on initialization form needs
to have form field names divided into tuples, that represents the tabs
and the fields located under them.
Usage:
form_field_names = (("network_name", "admin_state"),
("create_subnet", "subnet_name", "network_address",
"ip_version", "gateway_ip", "disable_gateway"),
("enable_dhcp", "allocation_pools", "dns_name_servers",
"host_routes"))
form = TabbedFormRegion(self.conf, self.driver, None, form_field_names)
form.network_name.text = "test_network_name"
"""
_submit_locator = (by.By.CSS_SELECTOR, '*.btn.btn-primary[type=submit]')
_side_info_locator = (by.By.CSS_SELECTOR, "td.help_text")
_fields_locator = (by.By.CSS_SELECTOR, "div.form-group")
class GetFieldsMethod(object):
def __init__(self, get_fields_method, tab_index, switch_tab_method):
self.get_fields = get_fields_method
self.tab_index = tab_index
self.switch_to_tab = switch_tab_method
def __call__(self, *args, **kwargs):
self.switch_to_tab(self.tab_index)
fields = self.get_fields()
if isinstance(fields, dict):
return dict([(key, field) for (key, field)
in fields.iteritems() if field.is_displayed()])
else:
return [field for field in fields if field.is_displayed()]
@property
def tabs(self):
return menus.TabbedMenuRegion(self.driver, self.conf)
def _init_form_fields(self):
for index, tab_names in enumerate(self.form_field_names):
get_fields = self.GetFieldsMethod(self._get_form_fields, index,
self.tabs.switch_to)
self._init_dynamic_properties(tab_names, get_fields)
class DateFormRegion(BaseFormRegion):
"""Form that queries data to table that is regularly below the form,
typical example is located on Project/Compute/Overview page.
"""
_from_field_locator = (by.By.CSS_SELECTOR, 'input#id_start')
_to_field_locator = (by.By.CSS_SELECTOR, 'input#id_end')
@property
def from_date(self):
return self._get_element(*self._from_field_locator)
@property
def to_date(self):
return self._get_element(*self._to_field_locator)
def query(self, start, end):
self._set_from_field(start)
self._set_to_field(end)
self.submit.click()
def _set_from_field(self, value):
self._fill_field_element(value, self.from_date)
def _set_to_field(self, value):
self._fill_field_element(value, self.to_date)
|
|
#!/usr/bin/env python
# Standard library imports
import argparse
import datetime
import gzip
import json
import logging
import os
import pickle
import sys
from collections import OrderedDict
from functools import partial
from itertools import count, groupby
from urllib.request import Request, urlopen
# External imports
import dateutil.parser
import dateutil.tz
logging.basicConfig(level=logging.INFO)
API_PARAMS = {
'base_url': 'https://api.github.com/repos',
'owner': 'bokeh',
'repo': 'bokeh',
}
IGNORE_ISSUE_TYPE = {
'type: discussion',
'type: tracker',
}
LOG_SECTION = OrderedDict([ # issue type label -> log section heading
('type: bug', 'bugfixes'),
('type: feature', 'features'),
('type: task', 'tasks'),
])
ISSUES_SORT_KEY = lambda issue: (issue_section_order(issue), int(issue['number']))
ISSUES_BY_SECTION = lambda issue: issue_section(issue)
#######################################
# Object Storage
#######################################
def save_object(filename, obj):
"""Compresses and pickles given object to the given filename."""
logging.info('saving {}...'.format(filename))
try:
with gzip.GzipFile(filename, 'wb') as f:
f.write(pickle.dumps(obj, 1))
except Exception as e:
logging.error('save failure: {}'.format(e))
raise
def load_object(filename):
"""Unpickles and decompresses the given filename and returns the created object."""
logging.info('loading {}...'.format(filename))
try:
with gzip.GzipFile(filename, 'rb') as f:
buf = ''
while True:
data = f.read()
if data == '':
break
buf += data
return pickle.loads(buf)
except Exception as e:
logging.error('load failure: {}'.format(e))
raise
#######################################
# Issues
#######################################
def issue_section_order(issue):
"""Returns the section order for the given issue."""
try:
return LOG_SECTION.values().index(issue_section(issue))
except Exception:
return -1
def issue_completed(issue):
"""Returns True iff this issue is has been resolved as completed."""
labels = issue.get('labels', [])
return any(label['name'] == 'reso: completed' for label in labels)
def issue_section(issue):
"""Returns the section heading for the issue, or None if this issue should be ignored."""
labels = issue.get('labels', [])
for label in labels:
if not label['name'].startswith('type: '):
continue
if label['name'] in LOG_SECTION:
return LOG_SECTION[label['name']]
elif label['name'] in IGNORE_ISSUE_TYPE:
return None
else:
logging.warning('unknown issue type: "{}" for: {}'.format(label['name'], issue_line(issue)))
return None
def issue_tags(issue):
"""Returns list of tags for this issue."""
labels = issue.get('labels', [])
return [label['name'].replace('tag: ', '') for label in labels if label['name'].startswith('tag: ')]
def closed_issue(issue, after=None):
"""Returns True iff this issue was closed after given date. If after not given, only checks if issue is closed."""
if issue['state'] == 'closed':
if after is None or parse_timestamp(issue['closed_at']) > after:
return True
return False
def relevent_issue(issue, after):
"""Returns True iff this issue is something we should show in the changelog."""
return (closed_issue(issue, after) and
issue_completed(issue) and
issue_section(issue))
def relevant_issues(issues, after):
"""Yields relevant closed issues (closed after a given datetime) given a list of issues."""
logging.info('finding relevant issues after {}...'.format(after))
seen = set()
for issue in issues:
if relevent_issue(issue, after) and issue['title'] not in seen:
seen.add(issue['title'])
yield issue
def closed_issues(issues, after):
"""Yields closed issues (closed after a given datetime) given a list of issues."""
logging.info('finding closed issues after {}...'.format(after))
seen = set()
for issue in issues:
if closed_issue(issue, after) and issue['title'] not in seen:
seen.add(issue['title'])
yield issue
def all_issues(issues):
"""Yields unique set of issues given a list of issues."""
logging.info('finding issues...')
seen = set()
for issue in issues:
if issue['title'] not in seen:
seen.add(issue['title'])
yield issue
#######################################
# GitHub API
#######################################
def get_labels_url():
"""Returns github API URL for querying labels."""
return '{base_url}/{owner}/{repo}/labels'.format(**API_PARAMS)
def get_issues_url(page, after):
"""Returns github API URL for querying tags."""
template = '{base_url}/{owner}/{repo}/issues?state=closed&per_page=100&page={page}&since={after}'
return template.format(page=page, after=after.isoformat(), **API_PARAMS)
def get_tags_url():
"""Returns github API URL for querying tags."""
return '{base_url}/{owner}/{repo}/tags'.format(**API_PARAMS)
def parse_timestamp(timestamp):
"""Parse ISO8601 timestamps given by github API."""
dt = dateutil.parser.parse(timestamp)
return dt.astimezone(dateutil.tz.tzutc())
def read_url(url):
"""Reads given URL as JSON and returns data as loaded python object."""
logging.debug('reading {url} ...'.format(url=url))
token = os.environ.get("BOKEH_GITHUB_API_TOKEN")
headers = {}
if token:
headers['Authorization'] = 'token %s' % token
request = Request(url, headers=headers)
response = urlopen(request).read()
return json.loads(response.decode("UTF-8"))
def query_tags():
"""Hits the github API for repository tags and returns the data."""
return read_url(get_tags_url())
def query_issues(page, after):
"""Hits the github API for a single page of closed issues and returns the data."""
return read_url(get_issues_url(page, after))
def query_all_issues(after):
"""Hits the github API for all closed issues after the given date, returns the data."""
page = count(1)
data = []
while True:
page_data = query_issues(next(page), after)
if not page_data:
break
data.extend(page_data)
return data
def dateof(tag_name, tags):
"""Given a list of tags, returns the datetime of the tag with the given name; Otherwise None."""
for tag in tags:
if tag['name'] == tag_name:
commit = read_url(tag['commit']['url'])
return parse_timestamp(commit['commit']['committer']['date'])
return None
def get_data(query_func, load_data=False, save_data=False):
"""Gets data from query_func, optionally saving that data to a file; or loads data from a file."""
if hasattr(query_func, '__name__'):
func_name = query_func.__name__
elif hasattr(query_func, 'func'):
func_name = query_func.func.__name__
pickle_file = '{}.pickle'.format(func_name)
if load_data:
data = load_object(pickle_file)
else:
data = query_func()
if save_data:
save_object(pickle_file, data)
return data
#######################################
# Validation
#######################################
def check_issue(issue, after):
have_warnings = False
labels = issue.get('labels', [])
if 'pull_request' in issue:
if not any(label['name'].startswith('status: ') for label in labels):
logging.warning('pull request without status label: {}'.format(issue_line(issue)))
have_warnings = True
else:
if not any(label['name'].startswith('type: ') for label in labels):
if not any(label['name']=="reso: duplicate" for label in labels):
logging.warning('issue with no type label: {}'.format(issue_line((issue))))
have_warnings = True
if closed_issue(issue, after):
if not any(label['name'].startswith('reso: ') for label in labels):
if not any(label['name'] in IGNORE_ISSUE_TYPE for label in labels):
logging.warning('closed issue with no reso label: {}'.format(issue_line((issue))))
have_warnings = True
return have_warnings
def check_issues(issues, after=None):
"""Checks issues for BEP 1 compliance."""
issues = closed_issues(issues, after) if after else all_issues(issues)
issues = sorted(issues, key=ISSUES_SORT_KEY)
have_warnings = False
for section, issue_group in groupby(issues, key=ISSUES_BY_SECTION):
for issue in issue_group:
have_warnings |= check_issue(issue, after)
return have_warnings
#######################################
# Changelog
#######################################
def issue_line(issue):
"""Returns log line for given issue."""
template = '#{number} {tags}{title}'
tags = issue_tags(issue)
params = {
'title': issue['title'].capitalize().rstrip('.'),
'number': issue['number'],
'tags': ' '.join('[{}]'.format(tag) for tag in tags) + (' ' if tags else '')
}
return template.format(**params)
def generate_changelog(issues, after, heading, rtag=False):
"""Prints out changelog."""
relevent = relevant_issues(issues, after)
relevent = sorted(relevent, key=ISSUES_BY_SECTION)
def write(func, endofline="", append=""):
func(heading + '\n' + '-' * 20 + endofline)
for section, issue_group in groupby(relevent, key=ISSUES_BY_SECTION):
func(' * {}:'.format(section) + endofline)
for issue in reversed(list(issue_group)):
func(' - {}'.format(issue_line(issue)) + endofline)
func(endofline + append)
if rtag is not False:
with open("../CHANGELOG", "r+") as f:
content = f.read()
f.seek(0)
write(f.write, '\n', content)
else:
write(print)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Creates a bokeh changelog using the github API.')
limit_group = parser.add_mutually_exclusive_group(required=True)
limit_group.add_argument('-d', '--since-date', metavar='DATE',
help='select issues that occurred after the given ISO8601 date')
limit_group.add_argument('-p', '--since-tag', metavar='TAG',
help='select issues that occurred after the given git tag')
parser.add_argument('-c', '--check', action='store_true', default=False,
help='check closed issues for BEP 1 compliance')
parser.add_argument('-r', '--release-tag', metavar='RELEASE',
help='the proposed new release tag.\n'
'NOTE: this will automatically write the output to the CHANGELOG')
data_group = parser.add_mutually_exclusive_group()
data_group.add_argument('-s', '--save-data', action='store_true', default=False,
help='save api query result data; useful for testing')
data_group.add_argument('-l', '--load-data', action='store_true', default=False,
help='load api data from previously saved data; useful for testing')
args = parser.parse_args()
if args.since_tag:
tags = get_data(query_tags, load_data=args.load_data, save_data=args.save_data)
after = dateof(args.since_tag, tags)
heading = 'Since {:>14}:'.format(args.since_tag)
elif args.since_date:
after = dateutil.parser.parse(args.since_date)
after = after.replace(tzinfo=dateutil.tz.tzlocal())
heading = 'Since {:>14}:'.format(after.date().isoformat())
issues = get_data(partial(query_all_issues, after), load_data=args.load_data, save_data=args.save_data)
if args.check:
have_warnings = check_issues(issues)
if have_warnings:
sys.exit(1)
sys.exit(0)
if args.release_tag:
heading = '{} {:>8}:'.format(str(datetime.date.today()), args.release_tag)
generate_changelog(issues, after, heading, args.release_tag)
else:
generate_changelog(issues, after, heading)
|
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'CopyContract.check_result_info'
db.add_column(u'build_copycontract', 'check_result_info',
self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'CopyContract.check_result_info'
db.delete_column(u'build_copycontract', 'check_result_info')
models = {
'build.building': {
'Meta': {'object_name': 'Building'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'approve_status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'area': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'area_cmp': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True'}),
'build_state': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'build_year': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'cad_num': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'db_index': 'True'}),
'cad_passport': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cad_sum': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'clinic': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'complete_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'contract': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['build.Contract']", 'null': 'True', 'blank': 'True'}),
'developer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Developer']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'driveway_num': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'driveways': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'electric_supply': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'entrance_door': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'finish_year': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2018, 12, 31, 0, 0)'}),
'flat_num': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'flats_amount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'floor': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'floors': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'gas_supply': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'hallway': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Hallway']", 'null': 'True', 'blank': 'True'}),
'heating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'hot_water_supply': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_doors': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'is_balcony': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_clother_drying': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_dustbin_area': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_heat_boiler': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_intercom': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_loggia': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_parking': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_playground': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_routes': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_water_boiler': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'kindergarden': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'kitchen': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Kitchen']", 'null': 'True', 'blank': 'True'}),
'market': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'mo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mo.MO']"}),
'mo_fond_doc_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'mo_fond_doc_num': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'ownership_doc_num': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'ownership_num': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'ownership_year': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'payment_perspective': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'planing_floor': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'public_transport': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'readiness': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'room': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Room']", 'null': 'True', 'blank': 'True'}),
'school': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_year': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 1, 1, 0, 0)'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'water_removal': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'water_settlement': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'wc': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.WC']", 'null': 'True', 'blank': 'True'}),
'window_constructions': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'build.contract': {
'Meta': {'object_name': 'Contract'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'area': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'check_result_info': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'creation_form': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'developer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Developer']", 'null': 'True', 'blank': 'True'}),
'docs': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['build.ContractDocuments']", 'null': 'True', 'blank': 'True'}),
'finish_year': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2018, 12, 31, 0, 0)'}),
'flats_amount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'has_trouble_docs': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mo.MO']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True'}),
'num': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'period_of_payment': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True'}),
'start_year': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 1, 1, 0, 0)'}),
'summ_mo_money': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'summ_without_mo_money': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'summa': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'summa_fed': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'summa_reg': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'build.contractdocuments': {
'Meta': {'object_name': 'ContractDocuments'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mun_contracts': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
'build.copybuilding': {
'Meta': {'object_name': 'CopyBuilding'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'approve_status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'area': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'area_cmp': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True'}),
'build_state': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'build_year': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'cad_num': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'cad_passport': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cad_sum': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'clinic': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'complete_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'contract': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['build.Contract']", 'null': 'True'}),
'developer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Developer']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'driveways': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'electric_supply': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'entrance_door': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'finish_year': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2018, 12, 31, 0, 0)'}),
'flat_num': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'flats_amount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'floor': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'floors': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'gas_supply': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'hallway': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Hallway']", 'null': 'True', 'blank': 'True'}),
'heating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'hot_water_supply': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_doors': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'is_balcony': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_clother_drying': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_dustbin_area': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_heat_boiler': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_intercom': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_loggia': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_parking': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_playground': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_routes': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_water_boiler': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'kindergarden': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'kitchen': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Kitchen']", 'null': 'True', 'blank': 'True'}),
'market': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'mo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mo.MO']", 'null': 'True', 'blank': 'True'}),
'mo_fond_doc_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'mo_fond_doc_num': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'ownership_doc_num': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'ownership_num': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'ownership_year': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'payment_perspective': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'planing_floor': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'public_transport': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'readiness': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'room': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Room']", 'null': 'True', 'blank': 'True'}),
'school': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_year': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 1, 1, 0, 0)'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'water_removal': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'water_settlement': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'wc': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.WC']", 'null': 'True', 'blank': 'True'}),
'window_constructions': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'build.copycontract': {
'Meta': {'object_name': 'CopyContract'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'area': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'check_result_info': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'creation_form': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'developer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Developer']", 'null': 'True', 'blank': 'True'}),
'docs': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['build.ContractDocuments']", 'null': 'True', 'blank': 'True'}),
'finish_year': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2018, 12, 31, 0, 0)'}),
'flats_amount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'has_trouble_docs': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mo.MO']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True'}),
'num': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'period_of_payment': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True'}),
'start_year': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 1, 1, 0, 0)'}),
'summ_mo_money': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'summ_without_mo_money': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'summa': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'summa_fed': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'summa_reg': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'build.ground': {
'Meta': {'object_name': 'Ground'},
'address': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'approve_status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'area': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'area_cmp': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True'}),
'build_state': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'build_year': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'cad_num': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '2048', 'db_index': 'True'}),
'cad_passport': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cad_sum': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'clinic': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'complete_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'contract': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['build.Contract']", 'null': 'True', 'blank': 'True'}),
'developer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Developer']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'driveways': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'electric_supply': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'entrance_door': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'finish_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'finish_year': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2018, 12, 31, 0, 0)'}),
'flats_amount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'floor': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'floors': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'gas_supply': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'hallway': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Hallway']", 'null': 'True', 'blank': 'True'}),
'heating': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'hot_water_supply': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'internal_doors': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'is_balcony': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_clother_drying': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_dustbin_area': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_heat_boiler': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_intercom': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_loggia': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_parking': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_playground': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_routes': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_water_boiler': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'kindergarden': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'kitchen': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Kitchen']", 'null': 'True', 'blank': 'True'}),
'market': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'mo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mo.MO']"}),
'mo_fond_doc_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'mo_fond_doc_num': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'ownership_doc_num': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'ownership_num': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'ownership_year': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'payment_perspective': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'planing_floor': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'public_transport': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'readiness': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'room': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Room']", 'null': 'True', 'blank': 'True'}),
'school': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'start_year': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 1, 1, 0, 0)'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'water_removal': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'water_settlement': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'wc': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.WC']", 'null': 'True', 'blank': 'True'}),
'window_constructions': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'core.basehallway': {
'Meta': {'object_name': 'BaseHallway'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'core.basekitchen': {
'Meta': {'object_name': 'BaseKitchen'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sink_with_mixer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'})
},
'core.baseroom': {
'Meta': {'object_name': 'BaseRoom'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'core.basewc': {
'Meta': {'object_name': 'BaseWC'},
'bath_with_mixer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_toilet': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'is_tower_dryer': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'sink_with_mixer': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'})
},
'core.developer': {
'Meta': {'object_name': 'Developer'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'boss_position': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'face_list': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'})
},
u'core.hallway': {
'Meta': {'object_name': 'Hallway', '_ormbases': ['core.BaseHallway']},
u'basehallway_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseHallway']", 'unique': 'True', 'primary_key': 'True'}),
'ceiling': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'floor': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'wall': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
u'core.kitchen': {
'Meta': {'object_name': 'Kitchen', '_ormbases': ['core.BaseKitchen']},
u'basekitchen_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseKitchen']", 'unique': 'True', 'primary_key': 'True'}),
'ceiling': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'floor': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'stove': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'wall': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
u'core.room': {
'Meta': {'object_name': 'Room', '_ormbases': ['core.BaseRoom']},
u'baseroom_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseRoom']", 'unique': 'True', 'primary_key': 'True'}),
'ceiling': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'floor': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'wall': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
u'core.wc': {
'Meta': {'object_name': 'WC', '_ormbases': ['core.BaseWC']},
u'basewc_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseWC']", 'unique': 'True', 'primary_key': 'True'}),
'ceiling': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'floor': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'separate': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'wall': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'wc_ceiling': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'wc_floor': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'wc_wall': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'mo.mo': {
'Meta': {'object_name': 'MO'},
'common_amount': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'common_economy': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'common_fed_amount': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'common_percentage': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'common_reg_amount': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'common_spent': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'creation_form': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '24', 'null': 'True', 'blank': 'True'}),
'flats_amount': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'has_trouble': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'home_fed_orphans': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'home_orphans': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'home_reg_orphans': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '2048'}),
'planing_home_orphans': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['build']
|
|
"""
A cmdset holds a set of commands available to the object or to other
objects near it. All the commands a player can give (look, @create etc)
are stored as the default cmdset on the player object and managed using the
CmdHandler object (see cmdhandler.py).
The power of having command sets in CmdSets like this is that CmdSets
can be merged together according to individual rules to create a new
on-the-fly CmdSet that is some combination of the
previous ones. Their function are borrowed to a large parts from mathematical
Set theory, it should not be much of a problem to understand.
See CmdHandler for practical examples on how to apply cmdsets
together to create interesting in-game effects.
"""
from django.utils.translation import ugettext as _
from src.utils.utils import inherits_from, is_iter
__all__ = ("CmdSet",)
class _CmdSetMeta(type):
"""
This metaclass makes some minor on-the-fly convenience fixes to
the cmdset class.
"""
def __init__(mcs, *args, **kwargs):
"""
Fixes some things in the cmdclass
"""
# by default we key the cmdset the same as the
# name of its class.
if not hasattr(mcs, 'key') or not mcs.key:
mcs.key = mcs.__name__
mcs.path = "%s.%s" % (mcs.__module__, mcs.__name__)
if not type(mcs.key_mergetypes) == dict:
mcs.key_mergetypes = {}
super(_CmdSetMeta, mcs).__init__(*args, **kwargs)
class CmdSet(object):
"""
This class describes a unique cmdset that understands priorities. CmdSets
can be merged and made to perform various set operations on each other.
CmdSets have priorities that affect which of their ingoing commands gets used.
In the examples, cmdset A always have higher priority than cmdset B.
key - the name of the cmdset. This can be used on its own for game operations
mergetype (partly from Set theory):
Union - The two command sets are merged so that as many
commands as possible of each cmdset ends up in the
merged cmdset. Same-name commands are merged by
priority. This is the most common default.
Ex: A1,A3 + B1,B2,B4,B5 = A1,B2,A3,B4,B5
Intersect - Only commands found in *both* cmdsets
(i.e. which have same names) end up in the merged
cmdset, with the higher-priority cmdset replacing the
lower one. Ex: A1,A3 + B1,B2,B4,B5 = A1
Replace - The commands of this cmdset completely replaces
the lower-priority cmdset's commands, regardless
of if same-name commands exist.
Ex: A1,A3 + B1,B2,B4,B5 = A1,A3
Remove - This removes the relevant commands from the
lower-priority cmdset completely. They are not
replaced with anything, so this in effects uses the
high-priority cmdset as a filter to affect the
low-priority cmdset.
Ex: A1,A3 + B1,B2,B4,B5 = B2,B4,B5
Note: Commands longer than 2 characters and starting
with double underscrores, like '__noinput_command'
are considered 'system commands' and are
excempt from all merge operations - they are
ALWAYS included across mergers and only affected
if same-named system commands replace them.
priority- All cmdsets are always merged in pairs of two so that
the higher set's mergetype is applied to the
lower-priority cmdset. Default commands have priority 0,
high-priority ones like Exits and Channels have 10 and 9. Priorities
can be negative as well to give default commands preference.
duplicates - determines what happens when two sets of equal
priority merge. Default has the first of them in the
merger (i.e. A above) automatically taking
precedence. But if allow_duplicates is true, the
result will be a merger with more than one of each
name match. This will usually lead to the player
receiving a multiple-match error higher up the road,
but can be good for things like cmdsets on non-player
objects in a room, to allow the system to warn that
more than one 'ball' in the room has the same 'kick'
command defined on it, so it may offer a chance to
select which ball to kick ... Allowing duplicates
only makes sense for Union and Intersect, the setting
is ignored for the other mergetypes.
key_mergetype (dict) - allows the cmdset to define a unique
mergetype for particular cmdsets. Format is
{CmdSetkeystring:mergetype}. Priorities still apply.
Example: {'Myevilcmdset','Replace'} which would make
sure for this set to always use 'Replace' on
Myevilcmdset no matter what overall mergetype this set
has.
no_objs - don't include any commands from nearby objects
when searching for suitable commands
no_exits - ignore the names of exits when matching against
commands
no_channels - ignore the name of channels when matching against
commands (WARNING- this is dangerous since the
player can then not even ask staff for help if
something goes wrong)
"""
__metaclass__ = _CmdSetMeta
key = "Unnamed CmdSet"
mergetype = "Union"
priority = 0
duplicates = False
key_mergetypes = {}
no_exits = False
no_objs = False
no_channels = False
permanent = False
errmessage = ""
# pre-store properties to duplicate straight off
to_duplicate = ("key", "cmdsetobj", "no_exits", "no_objs", "no_channels", "permanent",
"mergetype", "priority", "duplicates", "errmessage")
def __init__(self, cmdsetobj=None, key=None):
"""
Creates a new CmdSet instance.
cmdsetobj - this is the database object to which this particular
instance of cmdset is related. It is often a player but may also be a
regular object.
"""
if key:
self.key = key
self.commands = []
self.system_commands = []
self.actual_mergetype = self.mergetype
self.cmdsetobj = cmdsetobj
# initialize system
self.at_cmdset_creation()
self._contains_cache = {}
# Priority-sensitive merge operations for cmdsets
def _union(self, cmdset_a, cmdset_b):
"C = A U B. CmdSet A is assumed to have higher priority"
cmdset_c = cmdset_a._duplicate()
# we make copies, not refs by use of [:]
cmdset_c.commands = cmdset_a.commands[:]
if cmdset_a.duplicates and cmdset_a.priority == cmdset_b.priority:
cmdset_c.commands.extend(cmdset_b.commands)
else:
cmdset_c.commands.extend([cmd for cmd in cmdset_b if not cmd in cmdset_a])
return cmdset_c
def _intersect(self, cmdset_a, cmdset_b):
"C = A (intersect) B. A is assumed higher priority"
cmdset_c = cmdset_a._duplicate()
if cmdset_a.duplicates and cmdset_a.priority == cmdset_b.priority:
for cmd in [cmd for cmd in cmdset_a if cmd in cmdset_b]:
cmdset_c.add(cmd)
cmdset_c.add(cmdset_b.get(cmd))
else:
cmdset_c.commands = [cmd for cmd in cmdset_a if cmd in cmdset_b]
return cmdset_c
def _replace(self, cmdset_a, cmdset_b):
"C = A + B where the result is A."
cmdset_c = cmdset_a._duplicate()
cmdset_c.commands = cmdset_a.commands[:]
return cmdset_c
def _remove(self, cmdset_a, cmdset_b):
"C = A + B, where B is filtered by A"
cmdset_c = cmdset_a._duplicate()
cmdset_c.commands = [cmd for cmd in cmdset_b if not cmd in cmdset_a]
return cmdset_c
def _instantiate(self, cmd):
"""
checks so that object is an instantiated command
and not, say a cmdclass. If it is, instantiate it.
Other types, like strings, are passed through.
"""
try:
return cmd()
except TypeError:
return cmd
def _duplicate(self):
"""
Returns a new cmdset with the same settings as this one
(no actual commands are copied over)
"""
cmdset = CmdSet()
for key, val in ((key, getattr(self, key)) for key in self.to_duplicate):
if val != getattr(cmdset, key):
# only copy if different from default; avoid turning class-vars into instance vars
setattr(cmdset, key, val)
cmdset.key_mergetypes = self.key_mergetypes.copy()
return cmdset
#cmdset = self.__class__()
#cmdset.__dict__.update(dict((key, val) for key, val in self.__dict__.items() if key in self.to_duplicate))
#cmdset.key_mergetypes = self.key_mergetypes.copy() #copy.deepcopy(self.key_mergetypes)
#return cmdset
def __str__(self):
"""
Show all commands in cmdset when printing it.
"""
return ", ".join([str(cmd) for cmd in sorted(self.commands, key=lambda o:o.key)])
def __iter__(self):
"""
Allows for things like 'for cmd in cmdset':
"""
return iter(self.commands)
def __contains__(self, othercmd):
"""
Returns True if this cmdset contains the given command (as defined
by command name and aliases). This allows for things like 'if cmd in cmdset'
"""
ret = self._contains_cache.get(othercmd)
if ret == None:
ret = othercmd in self.commands
self._contains_cache[othercmd] = ret
return ret
def __add__(self, cmdset_b):
"""
Merge this cmdset (A) with another cmdset (B) using the + operator,
C = A + B
Here, we (by convention) say that 'A is merged onto B to form
C'. The actual merge operation used in the 'addition' depends
on which priorities A and B have. The one of the two with the
highest priority will apply and give its properties to C. In
the case of a tie, A takes priority and replaces the
same-named commands in B unless A has the 'duplicate' variable
set (which means both sets' commands are kept).
"""
# It's okay to merge with None
if not cmdset_b:
return self
sys_commands_a = self.get_system_cmds()
sys_commands_b = cmdset_b.get_system_cmds()
if self.priority >= cmdset_b.priority:
# A higher or equal priority than B
# preserve system __commands
sys_commands = sys_commands_a + [cmd for cmd in sys_commands_b if cmd not in sys_commands_a]
mergetype = self.key_mergetypes.get(cmdset_b.key, self.mergetype)
if mergetype == "Intersect":
cmdset_c = self._intersect(self, cmdset_b)
elif mergetype == "Replace":
cmdset_c = self._replace(self, cmdset_b)
elif mergetype == "Remove":
cmdset_c = self._remove(self, cmdset_b)
else: # Union
cmdset_c = self._union(self, cmdset_b)
cmdset_c.no_channels = self.no_channels
cmdset_c.no_exits = self.no_exits
cmdset_c.no_objs = self.no_objs
else:
# B higher priority than A
# preserver system __commands
sys_commands = sys_commands_b + [cmd for cmd in sys_commands_a if cmd not in sys_commands_b]
mergetype = cmdset_b.key_mergetypes.get(self.key, cmdset_b.mergetype)
if mergetype == "Intersect":
cmdset_c = self._intersect(cmdset_b, self)
elif mergetype == "Replace":
cmdset_c = self._replace(cmdset_b, self)
elif mergetype == "Remove":
cmdset_c = self._remove(self, cmdset_b)
else: # Union
cmdset_c = self._union(cmdset_b, self)
cmdset_c.no_channels = cmdset_b.no_channels
cmdset_c.no_exits = cmdset_b.no_exits
cmdset_c.no_objs = cmdset_b.no_objs
# we store actual_mergetype since key_mergetypes
# might be different from the main mergetype.
# This is used for diagnosis.
cmdset_c.actual_mergetype = mergetype
# return the system commands to the cmdset
cmdset_c.add(sys_commands)
return cmdset_c
def add(self, cmd):
"""
Add a command, a list of commands or a cmdset to this cmdset.
Note that if cmd already exists in set,
it will replace the old one (no priority checking etc
at this point; this is often used to overload
default commands).
If cmd is another cmdset class or -instance, the commands
of that command set is added to this one, as if they were part
of the original cmdset definition. No merging or priority checks
are made, rather later added commands will simply replace
existing ones to make a unique set.
"""
if inherits_from(cmd, "src.commands.cmdset.CmdSet"):
# cmd is a command set so merge all commands in that set
# to this one. We raise a visible error if we created
# an infinite loop (adding cmdset to itself somehow)
try:
cmd = self._instantiate(cmd)
except RuntimeError:
string = "Adding cmdset %(cmd)s to %(class)s lead to an infinite loop. When adding a cmdset to another, "
string += "make sure they are not themself cyclically added to the new cmdset somewhere in the chain."
raise RuntimeError(_(string) % {"cmd":cmd, "class":self.__class__})
cmds = cmd.commands
elif is_iter(cmd):
cmds = [self._instantiate(c) for c in cmd]
else:
cmds = [self._instantiate(cmd)]
commands = self.commands
system_commands = self.system_commands
for cmd in cmds:
# add all commands
if not hasattr(cmd, 'obj'):
cmd.obj = self.cmdsetobj
try:
ic = commands.index(cmd)
commands[ic] = cmd # replace
except ValueError:
commands.append(cmd)
# extra run to make sure to avoid doublets
self.commands = list(set(commands))
#print "In cmdset.add(cmd):", self.key, cmd
# add system_command to separate list as well,
# for quick look-up
if cmd.key.startswith("__"):
try:
ic = system_commands.index(cmd)
system_commands[ic] = cmd # replace
except ValueError:
system_commands.append(cmd)
def remove(self, cmd):
"""
Remove a command instance from the cmdset.
cmd can be either a cmd instance or a key string.
"""
cmd = self._instantiate(cmd)
self.commands = [oldcmd for oldcmd in self.commands if oldcmd != cmd]
def get(self, cmd):
"""
Return the command in this cmdset that matches the
given command. cmd may be either a command instance or
a key string.
"""
cmd = self._instantiate(cmd)
for thiscmd in self.commands:
if thiscmd == cmd:
return thiscmd
def count(self):
"Return number of commands in set"
return len(self.commands)
def get_system_cmds(self):
"""
Return system commands in the cmdset, defined as
commands starting with double underscore __.
These are excempt from merge operations.
"""
return self.system_commands
#return [cmd for cmd in self.commands if cmd.key.startswith('__')]
def make_unique(self, caller):
"""
This is an unsafe command meant to clean out a cmdset of
doublet commands after it has been created. It is useful
for commands inheriting cmdsets from the cmdhandler where
obj-based cmdsets always are added double. Doublets will
be weeded out with preference to commands defined on caller,
otherwise just by first-come-first-served.
"""
unique = {}
for cmd in self.commands:
if cmd.key in unique:
ocmd = unique[cmd.key]
if (hasattr(cmd, 'obj') and cmd.obj == caller) and not \
(hasattr(ocmd, 'obj') and ocmd.obj == caller):
unique[cmd.key] = cmd
else:
unique[cmd.key] = cmd
self.commands = unique.values()
def get_all_cmd_keys_and_aliases(self, caller=None):
"""
Returns a list of all command keys and aliases
available in this cmdset. If caller is given, the
commands is checked for access on the "call" type
before being returned.
"""
names = []
if caller:
[names.extend(cmd._keyaliases) for cmd in self.commands if cmd.access(caller)]
else:
[names.extend(cmd._keyaliases) for cmd in self.commands]
return names
def at_cmdset_creation(self):
"""
Hook method - this should be overloaded in the inheriting
class, and should take care of populating the cmdset
by use of self.add().
"""
pass
|
|
"""Test :mod:`letsencrypt.display.util`."""
import os
import unittest
import mock
from letsencrypt.display import util as display_util
class DisplayT(unittest.TestCase):
"""Base class for both utility classes."""
# pylint: disable=too-few-public-methods
def setUp(self):
self.choices = [("First", "Description1"), ("Second", "Description2")]
self.tags = ["tag1", "tag2", "tag3"]
self.tags_choices = [("1", "tag1"), ("2", "tag2"), ("3", "tag3")]
def visual(displayer, choices):
"""Visually test all of the display functions."""
displayer.notification("Random notification!")
displayer.menu("Question?", choices,
ok_label="O", cancel_label="Can", help_label="??")
displayer.menu("Question?", [choice[1] for choice in choices],
ok_label="O", cancel_label="Can", help_label="??")
displayer.input("Input Message")
displayer.yesno("YesNo Message", yes_label="Yessir", no_label="Nosir")
displayer.checklist("Checklist Message", [choice[0] for choice in choices])
class NcursesDisplayTest(DisplayT):
"""Test ncurses display.
Since this is mostly a wrapper, it might be more helpful to test the actual
dialog boxes. The test_visual function will actually display the various
boxes but requires the user to do the verification. If something seems amiss
please use the test_visual function to debug it, the automatic tests rely
on too much mocking.
"""
def setUp(self):
super(NcursesDisplayTest, self).setUp()
self.displayer = display_util.NcursesDisplay()
self.default_menu_options = {
"choices": self.choices,
"ok_label": "OK",
"cancel_label": "Cancel",
"help_button": False,
"help_label": "",
"width": display_util.WIDTH,
"height": display_util.HEIGHT,
"menu_height": display_util.HEIGHT-6,
}
@mock.patch("letsencrypt.display.util.dialog.Dialog.msgbox")
def test_notification(self, mock_msgbox):
"""Kind of worthless... one liner."""
self.displayer.notification("message")
self.assertEqual(mock_msgbox.call_count, 1)
@mock.patch("letsencrypt.display.util.dialog.Dialog.menu")
def test_menu_tag_and_desc(self, mock_menu):
mock_menu.return_value = (display_util.OK, "First")
ret = self.displayer.menu("Message", self.choices)
mock_menu.assert_called_with("Message", **self.default_menu_options)
self.assertEqual(ret, (display_util.OK, 0))
@mock.patch("letsencrypt.display.util.dialog.Dialog.menu")
def test_menu_tag_and_desc_cancel(self, mock_menu):
mock_menu.return_value = (display_util.CANCEL, "")
ret = self.displayer.menu("Message", self.choices)
mock_menu.assert_called_with("Message", **self.default_menu_options)
self.assertEqual(ret, (display_util.CANCEL, -1))
@mock.patch("letsencrypt.display.util.dialog.Dialog.menu")
def test_menu_desc_only(self, mock_menu):
mock_menu.return_value = (display_util.OK, "1")
ret = self.displayer.menu("Message", self.tags, help_label="More Info")
self.default_menu_options.update(
choices=self.tags_choices, help_button=True, help_label="More Info")
mock_menu.assert_called_with("Message", **self.default_menu_options)
self.assertEqual(ret, (display_util.OK, 0))
@mock.patch("letsencrypt.display.util.dialog.Dialog.menu")
def test_menu_desc_only_help(self, mock_menu):
mock_menu.return_value = (display_util.HELP, "2")
ret = self.displayer.menu("Message", self.tags, help_label="More Info")
self.assertEqual(ret, (display_util.HELP, 1))
@mock.patch("letsencrypt.display.util.dialog.Dialog.menu")
def test_menu_desc_only_cancel(self, mock_menu):
mock_menu.return_value = (display_util.CANCEL, "")
ret = self.displayer.menu("Message", self.tags, help_label="More Info")
self.assertEqual(ret, (display_util.CANCEL, -1))
@mock.patch("letsencrypt.display.util."
"dialog.Dialog.inputbox")
def test_input(self, mock_input):
self.displayer.input("message")
self.assertEqual(mock_input.call_count, 1)
@mock.patch("letsencrypt.display.util.dialog.Dialog.yesno")
def test_yesno(self, mock_yesno):
mock_yesno.return_value = display_util.OK
self.assertTrue(self.displayer.yesno("message"))
mock_yesno.assert_called_with(
"message", display_util.HEIGHT, display_util.WIDTH,
yes_label="Yes", no_label="No")
@mock.patch("letsencrypt.display.util."
"dialog.Dialog.checklist")
def test_checklist(self, mock_checklist):
self.displayer.checklist("message", self.tags)
choices = [
(self.tags[0], "", True),
(self.tags[1], "", True),
(self.tags[2], "", True),
]
mock_checklist.assert_called_with(
"message", width=display_util.WIDTH, height=display_util.HEIGHT,
choices=choices)
# def test_visual(self):
# visual(self.displayer, self.choices)
class FileOutputDisplayTest(DisplayT):
"""Test stdout display.
Most of this class has to deal with visual output. In order to test how the
functions look to a user, uncomment the test_visual function.
"""
def setUp(self):
super(FileOutputDisplayTest, self).setUp()
self.mock_stdout = mock.MagicMock()
self.displayer = display_util.FileDisplay(self.mock_stdout)
def test_notification_no_pause(self):
self.displayer.notification("message", 10, False)
string = self.mock_stdout.write.call_args[0][0]
self.assertTrue("message" in string)
def test_notification_pause(self):
with mock.patch("__builtin__.raw_input", return_value="enter"):
self.displayer.notification("message")
self.assertTrue("message" in self.mock_stdout.write.call_args[0][0])
@mock.patch("letsencrypt.display.util."
"FileDisplay._get_valid_int_ans")
def test_menu(self, mock_ans):
mock_ans.return_value = (display_util.OK, 1)
ret = self.displayer.menu("message", self.choices)
self.assertEqual(ret, (display_util.OK, 0))
def test_input_cancel(self):
with mock.patch("__builtin__.raw_input", return_value="c"):
code, _ = self.displayer.input("message")
self.assertTrue(code, display_util.CANCEL)
def test_input_normal(self):
with mock.patch("__builtin__.raw_input", return_value="domain.com"):
code, input_ = self.displayer.input("message")
self.assertEqual(code, display_util.OK)
self.assertEqual(input_, "domain.com")
def test_yesno(self):
with mock.patch("__builtin__.raw_input", return_value="Yes"):
self.assertTrue(self.displayer.yesno("message"))
with mock.patch("__builtin__.raw_input", return_value="y"):
self.assertTrue(self.displayer.yesno("message"))
with mock.patch("__builtin__.raw_input", side_effect=["maybe", "y"]):
self.assertTrue(self.displayer.yesno("message"))
with mock.patch("__builtin__.raw_input", return_value="No"):
self.assertFalse(self.displayer.yesno("message"))
with mock.patch("__builtin__.raw_input", side_effect=["cancel", "n"]):
self.assertFalse(self.displayer.yesno("message"))
with mock.patch("__builtin__.raw_input", return_value="a"):
self.assertTrue(self.displayer.yesno("msg", yes_label="Agree"))
@mock.patch("letsencrypt.display.util.FileDisplay.input")
def test_checklist_valid(self, mock_input):
mock_input.return_value = (display_util.OK, "2 1")
code, tag_list = self.displayer.checklist("msg", self.tags)
self.assertEqual(
(code, set(tag_list)), (display_util.OK, set(["tag1", "tag2"])))
@mock.patch("letsencrypt.display.util.FileDisplay.input")
def test_checklist_miss_valid(self, mock_input):
mock_input.side_effect = [
(display_util.OK, "10"),
(display_util.OK, "tag1 please"),
(display_util.OK, "1")
]
ret = self.displayer.checklist("msg", self.tags)
self.assertEqual(ret, (display_util.OK, ["tag1"]))
@mock.patch("letsencrypt.display.util.FileDisplay.input")
def test_checklist_miss_quit(self, mock_input):
mock_input.side_effect = [
(display_util.OK, "10"),
(display_util.CANCEL, "1")
]
ret = self.displayer.checklist("msg", self.tags)
self.assertEqual(ret, (display_util.CANCEL, []))
def test_scrub_checklist_input_valid(self):
# pylint: disable=protected-access
indices = [
["1"],
["1", "2", "1"],
["2", "3"],
]
exp = [
set(["tag1"]),
set(["tag1", "tag2"]),
set(["tag2", "tag3"]),
]
for i, list_ in enumerate(indices):
set_tags = set(
self.displayer._scrub_checklist_input(list_, self.tags))
self.assertEqual(set_tags, exp[i])
def test_scrub_checklist_input_invalid(self):
# pylint: disable=protected-access
indices = [
["0"],
["4"],
["tag1"],
["1", "tag1"],
["2", "o"]
]
for list_ in indices:
self.assertEqual(
self.displayer._scrub_checklist_input(list_, self.tags), [])
def test_print_menu(self):
# pylint: disable=protected-access
# This is purely cosmetic... just make sure there aren't any exceptions
self.displayer._print_menu("msg", self.choices)
self.displayer._print_menu("msg", self.tags)
def test_wrap_lines(self):
# pylint: disable=protected-access
msg = ("This is just a weak test{0}"
"This function is only meant to be for easy viewing{0}"
"Test a really really really really really really really really "
"really really really really long line...".format(os.linesep))
text = self.displayer._wrap_lines(msg)
self.assertEqual(text.count(os.linesep), 3)
def test_get_valid_int_ans_valid(self):
# pylint: disable=protected-access
with mock.patch("__builtin__.raw_input", return_value="1"):
self.assertEqual(
self.displayer._get_valid_int_ans(1), (display_util.OK, 1))
ans = "2"
with mock.patch("__builtin__.raw_input", return_value=ans):
self.assertEqual(
self.displayer._get_valid_int_ans(3),
(display_util.OK, int(ans)))
def test_get_valid_int_ans_invalid(self):
# pylint: disable=protected-access
answers = [
["0", "c"],
["4", "one", "C"],
["c"],
]
for ans in answers:
with mock.patch("__builtin__.raw_input", side_effect=ans):
self.assertEqual(
self.displayer._get_valid_int_ans(3),
(display_util.CANCEL, -1))
# def test_visual(self):
# self.displayer = display_util.FileDisplay(sys.stdout)
# visual(self.displayer, self.choices)
class SeparateListInputTest(unittest.TestCase):
"""Test Module functions."""
def setUp(self):
self.exp = ["a", "b", "c", "test"]
@classmethod
def _call(cls, input_):
from letsencrypt.display.util import separate_list_input
return separate_list_input(input_)
def test_commas(self):
self.assertEqual(self._call("a,b,c,test"), self.exp)
def test_spaces(self):
self.assertEqual(self._call("a b c test"), self.exp)
def test_both(self):
self.assertEqual(self._call("a, b, c, test"), self.exp)
def test_mess(self):
actual = [
self._call(" a , b c \t test"),
self._call(",a, ,, , b c test "),
self._call(",,,,, , a b,,, , c,test"),
]
for act in actual:
self.assertEqual(act, self.exp)
class PlaceParensTest(unittest.TestCase):
@classmethod
def _call(cls, label): # pylint: disable=protected-access
from letsencrypt.display.util import _parens_around_char
return _parens_around_char(label)
def test_single_letter(self):
self.assertEqual("(a)", self._call("a"))
def test_multiple(self):
self.assertEqual("(L)abel", self._call("Label"))
self.assertEqual("(y)es please", self._call("yes please"))
if __name__ == "__main__":
unittest.main() # pragma: no cover
|
|
import tornado.web
import tornado.escape
import os
import logging
import tempfile
import tornado.locale
import auth
import datetime
import hashlib
import re
from database import KeySearchTimeout
LOGGER = logging.getLogger("tornado.application")
SUBMISSION_ERROR_OK = 0
SUBMISSION_ERROR_FILE_COUNT_INCONSISTENT = -1
SUBMISSION_ERROR_NOT_AUTHENTICATED = -2
SUBMISSION_ERROR_FILE_TOO_LARGE = -3
SUBMISSION_ERROR_NO_DATA = -4
SUBMISSION_ERROR_SSL_ENFORCED_BUT_NOT_USED = -5
SUBMISSION_ERROR_BAD_METHOD = -6
SUBMISSION_ERROR_LOGIN_REQUIRED = -7
SUBMISSION_ERROR_BAD_INVOCATION = -8
SUBMISSION_ERROR_KS_TIMEOUT = -9
SUBMISSION_ERROR_NO_KEY = -10
SUBMISSION_ERROR_KEY_DOES_NOT_EXIST = -11
SUBMISSION_ERROR_UNDEFINED = -9999
ETAG_CACHE = {}
class SubmissionRejected(Exception):
def __init__(self, msg, code):
self.user_info = {
"error": msg,
"code": code,
}
super(SubmissionRejected, self).__init__(msg)
class u2606Handler(tornado.web.RequestHandler):
"""This base class provides TL and TLU, shortcuts for localization."""
def TLU(self, *args, **kwargs):
"""Return a translated string in the user's locale inferred by Tornado,
falling back to u2606 locale if it can't."""
if self.application.hb_config["ignore_browser_locale"]:
return self.TL(*args, **kwargs)
locale = self.get_browser_locale(self.application.hb_config["language"])
return locale.translate(*args, **kwargs)
def TL(self, *args, **kwargs):
"""Return a translated string in the u2606 locale"""
return self.application.hb_locale.translate(*args, **kwargs)
def emit_error_formatted(self, error):
LOGGER.info(self.TL("Request from {0} ended in failure: {1} ({2})")
.format(self.request.remote_ip, error["error"], error["code"]))
if self.request.headers.get("u2606WantsJSONResponse", "NO") == "YES":
self.write(error)
else:
try:
self.write(self.render_template("error_{0}.html".format(error["code"]), error))
except IOError:
self.write(self.render_template("error_generic.html", error))
def set_default_headers(self):
self.set_header("Server", "u2606")
def render_template(self, __hb_name, values=None, **kw):
kw.update(self.get_template_namepace())
return self.application.hb_template(__hb_name, values, **kw)
def get_template_namepace(self):
return {"TL": self.TL, "TLU": self.TLU}
class UploadHandler(u2606Handler):
SUPPORTED_METHODS = ["GET", "POST", "OPTIONS"]
def set_cors_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "u2606WantsJSONResponse")
# http methods
def options(self):
self.set_cors_headers()
print("lel")
self.finish()
def get(self):
self.set_status(400)
self.emit_error_formatted({
"error": self.TLU("cannot get an upload handler"),
"code": SUBMISSION_ERROR_BAD_METHOD
})
self.finish()
@tornado.web.asynchronous
def post(self):
self.set_cors_headers()
if (self.application.hb_config["enforce_ssl_for_uploads"]
and self.request.protocol != "https"):
self.set_status(400)
self.emit_error_formatted({
"error": self.TLU("SSL is required to upload files."),
"code": SUBMISSION_ERROR_SSL_ENFORCED_BUT_NOT_USED
})
self.process_request()
# utilities
def alloc_filesystem_storage(self):
in_ = os.path.abspath(self.application.hb_config["scratch_directory"])
if not os.path.exists(in_):
os.makedirs(in_, mode=0o700)
scratch_file = tempfile.NamedTemporaryFile(prefix="__u2606_temp_", dir=in_, delete=0)
return (scratch_file.name, scratch_file)
def move_fs_object_into_place(self, from_name, to_name):
final_dest = os.path.abspath(self.application.hb_config["uploads_directory"])
if not os.path.exists(final_dest):
os.makedirs(final_dest, mode=0o777)
final_name = os.path.join(final_dest, to_name)
try:
os.rename(from_name, final_name)
except OSError as e:
LOGGER.error(self.TL("something bad happened while final-copying an object"), exc_info=1)
os.remove(from_name)
raise SubmissionRejected(str(e), SUBMISSION_ERROR_UNDEFINED)
def insert_object_into_database(self, isa, original_filename, from_, mime_type=None):
a_password = self.application.hb_delete_pass_gen.next_key()
ext = original_filename[original_filename.rfind(".") + 1:].lower()
the_object = {
"isa": isa,
"origin_ip": from_,
"delete_password": auth.pass_blob(a_password),
"original_filename": original_filename,
"file_extension": ext,
"mime_type": mime_type or "application/octet-stream",
"timestamp": datetime.datetime.now()
}
try:
key = self.application.hb_coordinator.insert_object(
the_object,
self.application.hb_public_key_gen
)
except KeySearchTimeout:
LOGGER.error(self.TL("key alloc failed!"))
raise SubmissionRejected(self.TLU("failed to find a key in time, try again please"),
SUBMISSION_ERROR_KS_TIMEOUT)
except Exception as e:
LOGGER.error(self.TL("something bad happened while inserting an object"), exc_info=1)
raise SubmissionRejected(str(e), SUBMISSION_ERROR_UNDEFINED)
return (key, a_password)
def finish_with_commit_result(self, success, exception, retval):
if success:
self.write({
"publicKey": retval[0],
"publicFilename": retval[1],
"deletionPassword": retval[2],
"error": self.TLU("Upload complete."),
"code": SUBMISSION_ERROR_OK,
})
else:
try:
self.emit_error_formatted(exception.user_info)
except AttributeError:
self.emit_error_formatted({
"error": self.TL("unknown error"),
"code": SUBMISSION_ERROR_UNDEFINED
})
self.finish()
class GeneralServeHandler(u2606Handler):
#@tornado.web.asynchronous
#def get(self, url):
# self.resolve_url(jailed_path[len(jail_dir):])
# #self.application.hb_dispatch(self.resolve_url, (jailed_path[len(jail_dir):],))
@tornado.web.asynchronous
def get(self, url):
#jail_dir = os.path.abspath(self.application.hb_config["static_root"])
#real_path = os.path.abspath(os.path.join(jail_dir, url.lstrip("/")))
real_url = re.sub("[/]+", "/", url)
#real_url = url
if self.try_serve_uploaded_object(real_url):
return
else:
self.write_404()
self.finish()
def try_serve_uploaded_object(self, obj_name):
fex = obj_name.lstrip("/").rsplit(".", 1)
if len(fex) < 2:
fex.append("")
obj = self.application.hb_coordinator.get_object_record_for_key(fex[0])
if not obj:
return 0
canonical_filename = os.path.join(os.path.abspath(self.application.hb_config["uploads_directory"]),
"{0}.{1}".format(fex[0], obj.file_extension))
if os.path.isfile(canonical_filename):
renderer = self.application.instantiate_renderer_for_isa(obj, canonical_filename)
if renderer:
ioloop = tornado.ioloop.IOLoop.instance()
self.application.hb_dispatch(renderer.render_object, (self, ioloop, fex[0], fex[1]))
else:
self.set_header("Content-Type", obj.mime_type or "application/octet-stream")
self.serve_regular_file(canonical_filename)
return 1
else:
return 0
def serve_regular_file(self, abs_path):
etag_cmp = self.request.headers.get("If-None-Match", None)
if etag_cmp and self._compute_etag(abs_path) == etag_cmp:
self.set_status(304) # their cache is still valid
self.finish()
return 1
try:
the_file = open(abs_path, "rb")
except OSError:
self.set_status(500)
self.emit_error_formatted({
"error": self.TLU("The file you requested can't be shown at the moment."),
"code": 500
})
self.finish()
return 1
the_file.seek(0, 2)
self.set_header("Content-Length", str(the_file.tell()))
self.set_header("ETag", self._compute_etag(abs_path))
the_file.seek(0)
#self.read_and_serve_bytes_from_file(the_file)
self.application.hb_dispatch(self._read_and_serve_file_obj, (the_file,))
def _read_and_serve_file_obj(self, file_obj):
ioloop = tornado.ioloop.IOLoop.instance()
while 1:
block = file_obj.read(4096 ** 2)
if not block:
break
else:
#self.write(block)
ioloop.add_callback(self.write, block)
#ioloop.add_callback(self.flush)
ioloop.add_callback(self.finish)
#self.finish()
def _compute_etag(self, path):
"""Not to be confused with the tornado subclass method.
This is for serve_regular_file only."""
dm = os.path.getmtime(path)
cache_entry = ETAG_CACHE.get(path, (0, ""))
if dm != cache_entry[0]:
# we need to recalculate
hash_ = hashlib.sha1("{0}:{1}".format(dm, path).encode("utf8"))
#try:
# f = open(path, "rb")
#except IOError as e:
# LOGGER.warn("Computing ETag for {0}: can't open file: {1}"
# .format(path, str(e)))
# return None # this will never be a valid etag
#while 1:
# block = f.read(4096 ** 2)
# if not block:
# break
# hash_.update(block)
digest = hash_.hexdigest()
ETAG_CACHE[path] = (dm, digest)
return digest
else:
return cache_entry[1]
def write_404(self):
self.set_status(404)
self.emit_error_formatted({
"error": self.TLU("File not found."),
"code": 404
})
|
|
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit-test code for logtools"""
import os
import sys
import unittest
import logging
from tempfile import mkstemp
from datetime import datetime
from StringIO import StringIO
from operator import itemgetter
from logtools import (filterbots, logfilter, geoip, logsample, logsample_weighted,
logparse, urlparse, logmerge, logplot, qps, sumstat)
from logtools.parsers import *
from logtools import logtools_config, interpolate_config, AttrDict
logging.basicConfig(level=logging.INFO)
class ConfigurationTestCase(unittest.TestCase):
def testInterpolation(self):
self.assertEqual(1, interpolate_config(1, 'bogus_sec', 'bogus_key'))
self.assertRaises(KeyError, interpolate_config, None, 'bogus_sec', 'bogus_key')
class URLParseTestCase(unittest.TestCase):
def setUp(self):
self.rows = [
"http://www.mydomain.com/my/path/myfile?myparam1=myval1&myparam2=myval2",
"http://www.mydomain2.com",
"http://www.mydomain3.com/home",
"http://fun.com/index.php?home"
]
def testUrlParse(self):
i=0
for row in urlparse(StringIO('\n'.join(self.rows)+'\n'), part='netloc'):
i+=1
self.assertEquals(i, len(self.rows), \
"Number of rows output is not equal to input size")
def testMultipleQueryParams(self):
url = "http://www.mydomain.com/my/path/myfile?myparam1=myval1&myparam2=myval2"
for row in urlparse(StringIO(url+"\n"), part='query', query_params='myparam1,myparam2'):
self.assertEquals(row[0], 'myval1', "Returned query param value was not as expected: %s" % \
row)
class ParsingTestCase(unittest.TestCase):
def setUp(self):
self.clf_rows = [
'127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326',
'127.0.0.2 - jay [10/Oct/2000:13:56:12 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326'
]
self.json_rows = [
'{"key1":"val1","key2":true,"key3":31337,"key4":null,"nested_key":[{"nested_key_1":"2"}]}'
]
self.uwsgi_rows = [
"[pid: 11216|app: 0|req: 2680/5864] 24.218.159.119 () {40 vars in 957 bytes} [Thu Jun 13 22:29:59 2013] GET /my/uri/path/?param_id=52&token=s61048gkje_l001z => generated 1813 bytes in 11 msecs (HTTP/1.1 200) 2 headers in 73 bytes (1 switches on core 0)",
"[pid: 11217|app: 0|req: 3064/5865] 10.18.50.145 () {34 vars in 382 bytes} [Thu Jun 13 22:30:00 2013] GET / => generated 8264 bytes in 9 msecs (HTTP/1.1 200) 2 headers in 73 bytes (1 switches on core 0)"
]
def testJSONParser(self):
parser = JSONParser()
for logrow in self.json_rows:
parsed = parser(logrow)
self.assertNotEquals(parsed, None, "Could not parse line: %s" % str(logrow))
def testAccessLog(self):
parser = AccessLog()
parser.set_format(format='%h %l %u %t "%r" %>s %b')
self.assertRaises(ValueError, parser, 'example for invalid format')
for logrow in self.clf_rows:
parsed = parser(logrow)
self.assertNotEquals(parsed, None, "Could not parse line: %s" % str(logrow))
def testCommonLogFormat(self):
parser = CommonLogFormat()
self.assertRaises(ValueError, parser, 'example for invalid format')
for logrow in self.clf_rows:
parsed = parser(logrow)
self.assertNotEquals(parsed, None, "Could not parse line: %s" % str(logrow))
def testuWSGIParser(self):
parser = uWSGIParser()
for logrow in self.uwsgi_rows:
parsed = parser(logrow)
self.assertNotEquals(parsed, None, "Could not parse line: %s" % logrow)
def testLogParse(self):
options = AttrDict({'parser': 'CommonLogFormat', 'field': 4, 'header': False})
fh = StringIO('\n'.join(self.clf_rows))
output = [l for l in logparse(options, None, fh)]
self.assertEquals(len(output), len(self.clf_rows), "Output size was not equal to input size!")
def testMultiKeyGetter(self):
parser = parser = CommonLogFormat()
func = multikey_getter_gen(parser, keys=(1,2), is_indices=True)
fh = StringIO('\n'.join(self.clf_rows))
output = [func(l) for l in fh]
self.assertEquals(len(output), len(self.clf_rows), "Output size was not equal to input size!")
class FilterBotsTestCase(unittest.TestCase):
def setUp(self):
self.options = AttrDict({
"reverse": False,
"unescape": False,
"printlines": False,
"ip_ua_re": "^(?P<ip>.*?) - USER_AGENT:'(?P<ua>.*?)'",
"bots_ips": StringIO("\n".join([
"6.6.6.6"
]) + "\n"),
"bots_ua": StringIO("\n".join([
"## Example comment ##",
"Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)",
"ssearch_bot/Nutch-1.0 (sSearch Crawler; http://www.semantissimo.de)",
"r'.*crawler'",
"s'MSIECrawler)'",
"p'DotSpotsBot'",
"p'Java/'"
]) + "\n")
})
self.fh = StringIO(
"127.0.0.1 - USER_AGENT:'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)' - ...\n" \
"255.255.255.255 - USER_AGENT:'Mozilla' - ...\n" \
"1.1.1.1 - USER_AGENT:'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; MSIECrawler)'\n" \
"2.2.2.2 - USER_AGENT:'Mozilla/4.0 (compatible; MSIE 6.0; Windows 98; Win 9x 4.90; .NET CLR 1.1.4322; MSIECrawler)'\n" \
"3.3.3.3 - USER_AGENT:'DotSpotsBot/0.2 (crawler; support at dotspots.com)'\n" \
"4.4.4.4 - USER_AGENT:'inagist.com url crawler'\n" \
"5.5.5.5 - USER_AGENT:'Java/1.6.0_18'\n" \
"6.6.6.6 - USER_AGENT:'ssearch_bot/Nutch-1.0 (sSearch Crawler; http://www.semantissimo.de)'\n"
)
self.json_fh = StringIO(
'''{"timestamp":"2010\/09\/01 00:00:01","user_agent":"Mozilla\/5.0 (compatible; Googlebot\/2.1; +http:\/\/www.google.com\/bot.html)","user_ip":"66.249.71.108"}\n''' \
'''{"timestamp":"2010\/10\/01 11:00:01","user_agent":"Mozilla\/5.0 (compatible; Googlebot\/2.1; +http:\/\/www.google.com\/bot.html)","user_ip":"66.249.71.109"}\n''' \
'''{"timestamp":"2010\/09\/01 00:00:01","user_agent":"Mozilla\/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.1.11) Gecko\/20100701 Firefox\/3.5.11 (.NET CLR 3.5.30729)","user_ip":"100.100.1.100"}\n''' \
'''{"timestamp":"2010\/10\/01 00:00:01","user_agent":"Mozilla\/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.1.11) Gecko\/20100701 Firefox\/3.5.11 (.NET CLR 3.5.30729)","user_ip":"6.6.6.6"}\n''' \
)
def testParserFiltering(self):
json_options = self.options
json_options['parser'] = 'JSONParser'
json_options['ip_ua_fields'] = 'ua:user_agent,ip:user_ip'
i=0
for l in filterbots(fh=self.json_fh, **json_options):
i+=1
self.assertEquals(i, 1, "filterbots output size different than expected: %s" % str(i))
def testRegExpFiltering(self):
i=0
for l in filterbots(fh=self.fh, **self.options):
i+=1
self.assertEquals(i, 1, "filterbots output size different than expected: %s" % str(i))
class GeoIPTestCase(unittest.TestCase):
def setUp(self):
self.options = AttrDict({ 'ip_re': '^(.*?) -' })
self.fh = StringIO(
"127.0.0.1 - USER_AGENT:'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)' - ...\n" \
"255.255.255.255 - USER_AGENT:'Mozilla' - ...\n" \
"74.125.225.48 - USER_AGENT:'IE' - ...\n" \
"65.55.175.254 - USER_AGENT:'IE' - ...\n"
)
def testGeoIP(self):
try:
import GeoIP
except ImportError:
print >> sys.stderr, "GeoIP Python package not available - skipping geoip unittest."
return
output = [(geocode, ip, line) for geocode, ip, line in geoip(fh=self.fh, **self.options)]
self.assertEquals(len(output), 2, "Output size was different than expected: %s" % str(len(output)))
def testFilter(self):
"""Test GeoIP filtering functionality"""
try:
import GeoIP
except ImportError:
print >> sys.stderr, "GeoIP Python package not available - skipping geoip unittest."
return
# Check positive filter
self.options['filter'] = 'United States'
output = [(geocode, ip, line) for geocode, ip, line in geoip(fh=self.fh, **self.options)]
self.assertEquals(len(output), 2, "Output size was different than expected: %s" % str(len(output)))
# Check negative filter
self.options['filter'] = 'India'
output = [(geocode, ip, line) for geocode, ip, line in geoip(fh=self.fh, **self.options)]
self.assertEquals(len(output), 0, "Output size was different than expected: %s" % str(len(output)))
class SamplingTestCase(unittest.TestCase):
def setUp(self):
self.options = AttrDict({ 'num_samples': 1 })
self.weighted_opts = AttrDict({
'num_samples': 5,
'field': 1,
'delimiter': ' '
})
self.fh = StringIO("\n".join([
'5 five', '1 one', '300 threehundred', '500 fivehundred',
'0 zero', '-1 minusone', '670 sixhundredseventy', '1000 thousand',
'22 twentytwo', '80 eighty', '3 three'
]))
def testUniformSampling(self):
output = [r for r in logsample(fh=self.fh, **self.options)]
self.assertEquals(len(output), self.options.num_samples,
"logsample output size different than expected: %s" % len(output))
def testWeightedSampling(self):
output = [(k, r) for k, r in logsample_weighted(fh=self.fh, **self.weighted_opts)]
self.assertEquals(len(output), self.weighted_opts.num_samples,
"logsample output size different than expected: %s" % len(output))
class FilterTestCase(unittest.TestCase):
"""Unit-test for the logfilter functionality"""
def setUp(self):
self.testset = StringIO("\n".join([
"AA word",
"word AA word",
"word AA",
"AA",
"aa word",
"wordAA",
"AAword",
"wordAAword",
"CC DD word"
])+"\n")
self.exp_emitted_wb = 4
self.exp_emitted = 1
self.blacklist = StringIO("\n".join([
'AA',
'bb',
'CC DD'
])+"\n")
def testACWB(self):
"""Aho-Corasick-based matching with Word Boundaries"""
lines = 0
for l in logfilter(self.testset, blacklist=self.blacklist, field=1, delimiter="\t",
with_acora=True, ignorecase=False,
word_boundaries=True):
#print l
lines += 1
self.assertEquals(lines, self.exp_emitted_wb, "Number of lines emitted was not as expected: %s (Expected: %s)" %
(lines, self.exp_emitted_wb))
def testAC(self):
"""Aho-Corasick-based matching"""
lines = 0
for l in logfilter(self.testset, blacklist=self.blacklist, field=1, delimiter="\t",
with_acora=True, ignorecase=False,
word_boundaries=False):
#print l
lines += 1
self.assertEquals(lines, self.exp_emitted, "Number of lines emitted was not as expected: %s (Expected: %s)" %
(lines, self.exp_emitted))
def testRE(self):
"""Regular Expression-based matching"""
lines = 0
for l in logfilter(self.testset, blacklist=self.blacklist, field=1, delimiter="\t",
with_acora=False, ignorecase=False,
word_boundaries=False):
#print l
lines += 1
self.assertEquals(lines, self.exp_emitted, "Number of lines emitted was not as expected: %s (Expected: %s)" %
(lines, self.exp_emitted))
def testREWB(self):
"""Regular Expression-based matching with Word Boundaries"""
lines = 0
for l in logfilter(self.testset, blacklist=self.blacklist, field=1, delimiter="\t",
with_acora=False, ignorecase=False,
word_boundaries=True):
#print l
lines += 1
self.assertEquals(lines, self.exp_emitted_wb, "Number of lines emitted was not as expected: %s (Expected: %s)" %
(lines, self.exp_emitted_wb))
class MergeTestCase(unittest.TestCase):
def setUp(self):
self.tempfiles = [mkstemp(), mkstemp(), mkstemp()]
self.args = [fname for fh, fname in self.tempfiles]
def tearDown(self):
"""Cleanup temporary files created by test"""
for fh, fname in self.tempfiles:
os.remove(fname)
def testNumericMerge(self):
os.write(self.tempfiles[0][0], "\n".join(['1 one', '5 five', '300 threehundred',
'500 fivehundred']))
os.write(self.tempfiles[1][0], "\n".join(['-1 minusone', '0 zero',
'670 sixhundredseventy' ,'1000 thousand']))
os.write(self.tempfiles[2][0], "\n".join(['3 three', '22 twentytwo', '80 eighty']))
options = AttrDict({'delimiter': ' ', 'field': 1, 'numeric': True })
output = [(k, l) for k, l in logmerge(options, self.args)]
self.assertEquals(len(output), 11, "Output size was not equal to input size!")
self.assertEquals(map(itemgetter(0), output), sorted(map(lambda x: int(x[0]), output)),
"Output was not numerically sorted!")
def testDateMerge(self):
os.write(self.tempfiles[0][0], "\n".join(['2010/01/12 07:00:00,one', '2010/01/12 08:00:00,five',
'2010/01/13 10:00:00,threehundred']))
os.write(self.tempfiles[1][0], "\n".join(['2010/01/12 07:30:00,one', '2010/01/12 08:10:00,five',
'2010/01/12 21:00:00,threehundred']))
os.write(self.tempfiles[2][0], "\n".join(['2010/01/11 05:33:03,one', '2010/01/12 03:10:00,five',
'2010/01/21 22:00:00,threehundred']))
dateformat = '%Y/%m/%d %H:%M:%S'
options = AttrDict({'delimiter': ',', 'field': 1, 'datetime': True, 'dateformat': dateformat })
output = [(k, l) for k, l in logmerge(options, self.args)]
self.assertEquals(len(output), 9, "Output size was not equal to input size!")
self.assertEquals(map(itemgetter(0), output), sorted(map(itemgetter(0), output)),
"Output was not time sorted!")
def testLexicalMerge(self):
os.write(self.tempfiles[0][0], "\n".join(['1 one', '300 threehundred', '5 five',
'500 fivehundred']))
os.write(self.tempfiles[1][0], "\n".join(['-1 minusone', '0 zero', '1000 thousand',
'670 sixhundredseventy']))
os.write(self.tempfiles[2][0], "\n".join(['22 twentytwo', '3 three',
'80 eighty']))
options = AttrDict({ 'delimiter': ' ', 'field': 1, 'numeric': False })
output = [(k, l) for k, l in logmerge(options, self.args)]
self.assertEquals(len(output), 11, "Output size was not equal to input size!")
self.assertEquals(map(itemgetter(0), output), sorted(map(itemgetter(0), output)),
"Output was not lexically sorted!")
class QPSTestCase(unittest.TestCase):
def setUp(self):
self.options = AttrDict({
"ignore": True,
"dt_re": r'^\[(.*?)\]',
"dateformat": "%d/%b/%Y:%H:%M:%S -0700",
"window_size": 15
})
self.fh = StringIO(
'[10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" \n' \
'[10/Oct/2000:13:55:38 -0700] "GET /apache_pb.gif HTTP/1.0" \n' \
'[10/Oct/2000:13:56:59 -0700] "GET /apache_pb.gif HTTP/1.0" \n' \
'[10/Oct/2000:13:57:01 -0700] "GET /apache_pb.gif HTTP/1.0" \n' \
'[11/Oct/2000:14:01:00 -0700] "GET /apache_pb.gif HTTP/1.0" \n' \
'[11/Oct/2000:14:01:13 -0700] "GET /apache_pb.gif HTTP/1.0" \n' \
'[11/Oct/2000:14:01:14 -0700] "GET /apache_pb.gif HTTP/1.0" \n'
)
def testQps(self):
blocks=0
qs=[]
for q in qps(fh=self.fh, **self.options):
blocks+=1
qs.append(q)
self.assertEquals(blocks, 3, "qps output size different than expected: %s" % str(blocks))
class PlotTestCase(unittest.TestCase):
def setUp(self):
self.fh = StringIO("\n".join([
'5 five', '1 one', '300 threehundred', '500 fivehundred',
'0 zero', '-1 minusone', '670 sixhundredseventy', '1000 thousand',
'22 twentytwo', '80 eighty', '3 three'
]))
def testGChart(self):
try:
import pygooglechart
except ImportError:
print >> sys.stderr, "pygooglechart Python package not available - skipping logplot gchart unittest."
return
options = AttrDict({
'backend': 'gchart',
'output': False,
'limit': 10,
'field': 1,
'delimiter': ' ',
'legend': True,
'width': 600,
'height': 300
})
chart = None
for plot_type in ('pie', 'line'):
self.fh.seek(0)
options['type'] = plot_type
chart = logplot(options, None, self.fh)
self.assertNotEquals(chart, None, "logplot returned None. Expected a Plot object")
# Should raise ValueError here due to fh being at EOF
self.assertRaises(ValueError, logplot, options, None, self.fh)
tmp_fh, tmp_fname = mkstemp()
chart.download(tmp_fname)
os.remove(tmp_fname)
class SumstatTestCase(unittest.TestCase):
def setUp(self):
self.data = StringIO('\n'.join([
'500 val1',
'440 val2',
'320 val3',
'85 val4',
'13 val5'
]))
self.avg = 271.6
self.N = 1358
self.M = 5
def testSumstat(self):
stat = sumstat(fh=self.data, delimiter=' ', reverse=True)
self.assertEquals(stat['M'], self.M)
self.assertEquals(stat['N'], self.N)
self.assertEquals(stat['avg'], self.avg)
if __name__ == "__main__":
unittest.main()
|
|
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tarfile
from urllib import quote, unquote
from xml.sax import saxutils
from time import time
from eventlet import sleep
import zlib
from swift.common.swob import Request, HTTPBadGateway, \
HTTPCreated, HTTPBadRequest, HTTPNotFound, HTTPUnauthorized, HTTPOk, \
HTTPPreconditionFailed, HTTPRequestEntityTooLarge, HTTPNotAcceptable, \
HTTPLengthRequired, HTTPException, HTTPServerError, wsgify
from swift.common.utils import json, get_logger, register_swift_info
from swift.common import constraints
from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND, HTTP_CONFLICT
class CreateContainerError(Exception):
def __init__(self, msg, status_int, status):
self.status_int = status_int
self.status = status
Exception.__init__(self, msg)
ACCEPTABLE_FORMATS = ['text/plain', 'application/json', 'application/xml',
'text/xml']
def get_response_body(data_format, data_dict, error_list):
"""
Returns a properly formatted response body according to format. Handles
json and xml, otherwise will return text/plain. Note: xml response does not
include xml declaration.
:params data_format: resulting format
:params data_dict: generated data about results.
:params error_list: list of quoted filenames that failed
"""
if data_format == 'application/json':
data_dict['Errors'] = error_list
return json.dumps(data_dict)
if data_format and data_format.endswith('/xml'):
output = '<delete>\n'
for key in sorted(data_dict):
xml_key = key.replace(' ', '_').lower()
output += '<%s>%s</%s>\n' % (xml_key, data_dict[key], xml_key)
output += '<errors>\n'
output += '\n'.join(
['<object>'
'<name>%s</name><status>%s</status>'
'</object>' % (saxutils.escape(name), status) for
name, status in error_list])
output += '</errors>\n</delete>\n'
return output
output = ''
for key in sorted(data_dict):
output += '%s: %s\n' % (key, data_dict[key])
output += 'Errors:\n'
output += '\n'.join(
['%s, %s' % (name, status)
for name, status in error_list])
return output
def pax_key_to_swift_header(pax_key):
if (pax_key == u"SCHILY.xattr.user.mime_type" or
pax_key == u"LIBARCHIVE.xattr.user.mime_type"):
return "Content-Type"
elif pax_key.startswith(u"SCHILY.xattr.user.meta."):
useful_part = pax_key[len(u"SCHILY.xattr.user.meta."):]
return "X-Object-Meta-" + useful_part.encode("utf-8")
elif pax_key.startswith(u"LIBARCHIVE.xattr.user.meta."):
useful_part = pax_key[len(u"LIBARCHIVE.xattr.user.meta."):]
return "X-Object-Meta-" + useful_part.encode("utf-8")
else:
# You can get things like atime/mtime/ctime or filesystem ACLs in
# pax headers; those aren't really user metadata. The same goes for
# other, non-user metadata.
return None
class Bulk(object):
"""
Middleware that will do many operations on a single request.
Extract Archive:
Expand tar files into a swift account. Request must be a PUT with the
query parameter ?extract-archive=format specifying the format of archive
file. Accepted formats are tar, tar.gz, and tar.bz2.
For a PUT to the following url:
/v1/AUTH_Account/$UPLOAD_PATH?extract-archive=tar.gz
UPLOAD_PATH is where the files will be expanded to. UPLOAD_PATH can be a
container, a pseudo-directory within a container, or an empty string. The
destination of a file in the archive will be built as follows:
/v1/AUTH_Account/$UPLOAD_PATH/$FILE_PATH
Where FILE_PATH is the file name from the listing in the tar file.
If the UPLOAD_PATH is an empty string, containers will be auto created
accordingly and files in the tar that would not map to any container (files
in the base directory) will be ignored.
Only regular files will be uploaded. Empty directories, symlinks, etc will
not be uploaded.
The response from bulk operations functions differently from other swift
responses. This is because a short request body sent from the client could
result in many operations on the proxy server and precautions need to be
made to prevent the request from timing out due to lack of activity. To
this end, the client will always receive a 200 OK response, regardless of
the actual success of the call. The body of the response must be parsed to
determine the actual success of the operation. In addition to this the
client may receive zero or more whitespace characters prepended to the
actual response body while the proxy server is completing the request.
The format of the response body defaults to text/plain but can be either
json or xml depending on the Accept header. Acceptable formats are
text/plain, application/json, application/xml, and text/xml. An example
body is as follows:
{"Response Status": "201 Created",
"Response Body": "",
"Errors": [],
"Number Files Created": 10}
If all valid files were uploaded successfully the Response Status will be
201 Created. If any files failed to be created the response code
corresponds to the subrequest's error. Possible codes are 400, 401, 502 (on
server errors), etc. In both cases the response body will specify the
number of files successfully uploaded and a list of the files that failed.
There are proxy logs created for each file (which becomes a subrequest) in
the tar. The subrequest's proxy log will have a swift.source set to "EA"
the log's content length will reflect the unzipped size of the file. If
double proxy-logging is used the leftmost logger will not have a
swift.source set and the content length will reflect the size of the
payload sent to the proxy (the unexpanded size of the tar.gz).
Bulk Delete:
Will delete multiple objects or containers from their account with a
single request. Responds to POST requests with query parameter
?bulk-delete set. The request url is your storage url. The Content-Type
should be set to text/plain. The body of the POST request will be a
newline separated list of url encoded objects to delete. You can delete
10,000 (configurable) objects per request. The objects specified in the
POST request body must be URL encoded and in the form:
/container_name/obj_name
or for a container (which must be empty at time of delete)
/container_name
The response is similar to extract archive as in every response will be a
200 OK and you must parse the response body for actual results. An example
response is:
{"Number Not Found": 0,
"Response Status": "200 OK",
"Response Body": "",
"Errors": [],
"Number Deleted": 6}
If all items were successfully deleted (or did not exist), the Response
Status will be 200 OK. If any failed to delete, the response code
corresponds to the subrequest's error. Possible codes are 400, 401, 502 (on
server errors), etc. In all cases the response body will specify the number
of items successfully deleted, not found, and a list of those that failed.
The return body will be formatted in the way specified in the request's
Accept header. Acceptable formats are text/plain, application/json,
application/xml, and text/xml.
There are proxy logs created for each object or container (which becomes a
subrequest) that is deleted. The subrequest's proxy log will have a
swift.source set to "BD" the log's content length of 0. If double
proxy-logging is used the leftmost logger will not have a
swift.source set and the content length will reflect the size of the
payload sent to the proxy (the list of objects/containers to be deleted).
"""
def __init__(self, app, conf, max_containers_per_extraction=10000,
max_failed_extractions=1000, max_deletes_per_request=10000,
max_failed_deletes=1000, yield_frequency=10, retry_count=0,
retry_interval=1.5, logger=None):
self.app = app
self.logger = logger or get_logger(conf, log_route='bulk')
self.max_containers = max_containers_per_extraction
self.max_failed_extractions = max_failed_extractions
self.max_failed_deletes = max_failed_deletes
self.max_deletes_per_request = max_deletes_per_request
self.yield_frequency = yield_frequency
self.retry_count = retry_count
self.retry_interval = retry_interval
self.max_path_length = constraints.MAX_OBJECT_NAME_LENGTH \
+ constraints.MAX_CONTAINER_NAME_LENGTH + 2
def create_container(self, req, container_path):
"""
Checks if the container exists and if not try to create it.
:params container_path: an unquoted path to a container to be created
:returns: True if created container, False if container exists
:raises: CreateContainerError when unable to create container
"""
new_env = req.environ.copy()
new_env['PATH_INFO'] = container_path
new_env['swift.source'] = 'EA'
new_env['REQUEST_METHOD'] = 'HEAD'
head_cont_req = Request.blank(container_path, environ=new_env)
resp = head_cont_req.get_response(self.app)
if resp.is_success:
return False
if resp.status_int == 404:
new_env = req.environ.copy()
new_env['PATH_INFO'] = container_path
new_env['swift.source'] = 'EA'
new_env['REQUEST_METHOD'] = 'PUT'
create_cont_req = Request.blank(container_path, environ=new_env)
resp = create_cont_req.get_response(self.app)
if resp.is_success:
return True
raise CreateContainerError(
"Create Container Failed: " + container_path,
resp.status_int, resp.status)
def get_objs_to_delete(self, req):
"""
Will populate objs_to_delete with data from request input.
:params req: a Swob request
:returns: a list of the contents of req.body when separated by newline.
:raises: HTTPException on failures
"""
line = ''
data_remaining = True
objs_to_delete = []
if req.content_length is None and \
req.headers.get('transfer-encoding', '').lower() != 'chunked':
raise HTTPLengthRequired(request=req)
while data_remaining:
if '\n' in line:
obj_to_delete, line = line.split('\n', 1)
obj_to_delete = obj_to_delete.strip()
objs_to_delete.append(
{'name': unquote(obj_to_delete)})
else:
data = req.body_file.read(self.max_path_length)
if data:
line += data
else:
data_remaining = False
obj_to_delete = line.strip()
if obj_to_delete:
objs_to_delete.append(
{'name': unquote(obj_to_delete)})
if len(objs_to_delete) > self.max_deletes_per_request:
raise HTTPRequestEntityTooLarge(
'Maximum Bulk Deletes: %d per request' %
self.max_deletes_per_request)
if len(line) > self.max_path_length * 2:
raise HTTPBadRequest('Invalid File Name')
return objs_to_delete
def handle_delete_iter(self, req, objs_to_delete=None,
user_agent='BulkDelete', swift_source='BD',
out_content_type='text/plain'):
"""
A generator that can be assigned to a swob Response's app_iter which,
when iterated over, will delete the objects specified in request body.
Will occasionally yield whitespace while request is being processed.
When the request is completed will yield a response body that can be
parsed to determine success. See above documentation for details.
:params req: a swob Request
:params objs_to_delete: a list of dictionaries that specifies the
objects to be deleted. If None, uses self.get_objs_to_delete to
query request.
"""
last_yield = time()
separator = ''
failed_files = []
resp_dict = {'Response Status': HTTPOk().status,
'Response Body': '',
'Number Deleted': 0,
'Number Not Found': 0}
try:
if not out_content_type:
raise HTTPNotAcceptable(request=req)
if out_content_type.endswith('/xml'):
yield '<?xml version="1.0" encoding="UTF-8"?>\n'
try:
vrs, account, _junk = req.split_path(2, 3, True)
except ValueError:
raise HTTPNotFound(request=req)
incoming_format = req.headers.get('Content-Type')
if incoming_format and \
not incoming_format.startswith('text/plain'):
# For now only accept newline separated object names
raise HTTPNotAcceptable(request=req)
if objs_to_delete is None:
objs_to_delete = self.get_objs_to_delete(req)
failed_file_response = {'type': HTTPBadRequest}
req.environ['eventlet.minimum_write_chunk_size'] = 0
for obj_to_delete in objs_to_delete:
if last_yield + self.yield_frequency < time():
separator = '\r\n\r\n'
last_yield = time()
yield ' '
obj_name = obj_to_delete['name']
if not obj_name:
continue
if len(failed_files) >= self.max_failed_deletes:
raise HTTPBadRequest('Max delete failures exceeded')
if obj_to_delete.get('error'):
if obj_to_delete['error']['code'] == HTTP_NOT_FOUND:
resp_dict['Number Not Found'] += 1
else:
failed_files.append([quote(obj_name),
obj_to_delete['error']['message']])
continue
delete_path = '/'.join(['', vrs, account,
obj_name.lstrip('/')])
if not constraints.check_utf8(delete_path):
failed_files.append([quote(obj_name),
HTTPPreconditionFailed().status])
continue
new_env = req.environ.copy()
new_env['PATH_INFO'] = delete_path
del(new_env['wsgi.input'])
new_env['CONTENT_LENGTH'] = 0
new_env['REQUEST_METHOD'] = 'DELETE'
new_env['HTTP_USER_AGENT'] = \
'%s %s' % (req.environ.get('HTTP_USER_AGENT'), user_agent)
new_env['swift.source'] = swift_source
self._process_delete(delete_path, obj_name, new_env, resp_dict,
failed_files, failed_file_response)
if failed_files:
resp_dict['Response Status'] = \
failed_file_response['type']().status
elif not (resp_dict['Number Deleted'] or
resp_dict['Number Not Found']):
resp_dict['Response Status'] = HTTPBadRequest().status
resp_dict['Response Body'] = 'Invalid bulk delete.'
except HTTPException as err:
resp_dict['Response Status'] = err.status
resp_dict['Response Body'] = err.body
except Exception:
self.logger.exception('Error in bulk delete.')
resp_dict['Response Status'] = HTTPServerError().status
yield separator + get_response_body(out_content_type,
resp_dict, failed_files)
def handle_extract_iter(self, req, compress_type,
out_content_type='text/plain'):
"""
A generator that can be assigned to a swob Response's app_iter which,
when iterated over, will extract and PUT the objects pulled from the
request body. Will occasionally yield whitespace while request is being
processed. When the request is completed will yield a response body
that can be parsed to determine success. See above documentation for
details.
:params req: a swob Request
:params compress_type: specifying the compression type of the tar.
Accepts '', 'gz', or 'bz2'
"""
resp_dict = {'Response Status': HTTPCreated().status,
'Response Body': '', 'Number Files Created': 0}
failed_files = []
last_yield = time()
separator = ''
containers_accessed = set()
try:
if not out_content_type:
raise HTTPNotAcceptable(request=req)
if out_content_type.endswith('/xml'):
yield '<?xml version="1.0" encoding="UTF-8"?>\n'
if req.content_length is None and \
req.headers.get('transfer-encoding',
'').lower() != 'chunked':
raise HTTPLengthRequired(request=req)
try:
vrs, account, extract_base = req.split_path(2, 3, True)
except ValueError:
raise HTTPNotFound(request=req)
extract_base = extract_base or ''
extract_base = extract_base.rstrip('/')
tar = tarfile.open(mode='r|' + compress_type,
fileobj=req.body_file)
failed_response_type = HTTPBadRequest
req.environ['eventlet.minimum_write_chunk_size'] = 0
containers_created = 0
while True:
if last_yield + self.yield_frequency < time():
separator = '\r\n\r\n'
last_yield = time()
yield ' '
tar_info = next(tar)
if tar_info is None or \
len(failed_files) >= self.max_failed_extractions:
break
if tar_info.isfile():
obj_path = tar_info.name
if obj_path.startswith('./'):
obj_path = obj_path[2:]
obj_path = obj_path.lstrip('/')
if extract_base:
obj_path = extract_base + '/' + obj_path
if '/' not in obj_path:
continue # ignore base level file
destination = '/'.join(
['', vrs, account, obj_path])
container = obj_path.split('/', 1)[0]
if not constraints.check_utf8(destination):
failed_files.append(
[quote(obj_path[:self.max_path_length]),
HTTPPreconditionFailed().status])
continue
if tar_info.size > constraints.MAX_FILE_SIZE:
failed_files.append([
quote(obj_path[:self.max_path_length]),
HTTPRequestEntityTooLarge().status])
continue
container_failure = None
if container not in containers_accessed:
cont_path = '/'.join(['', vrs, account, container])
try:
if self.create_container(req, cont_path):
containers_created += 1
if containers_created > self.max_containers:
raise HTTPBadRequest(
'More than %d containers to create '
'from tar.' % self.max_containers)
except CreateContainerError as err:
# the object PUT to this container still may
# succeed if acls are set
container_failure = [
quote(cont_path[:self.max_path_length]),
err.status]
if err.status_int == HTTP_UNAUTHORIZED:
raise HTTPUnauthorized(request=req)
except ValueError:
failed_files.append([
quote(obj_path[:self.max_path_length]),
HTTPBadRequest().status])
continue
tar_file = tar.extractfile(tar_info)
new_env = req.environ.copy()
new_env['REQUEST_METHOD'] = 'PUT'
new_env['wsgi.input'] = tar_file
new_env['PATH_INFO'] = destination
new_env['CONTENT_LENGTH'] = tar_info.size
new_env['swift.source'] = 'EA'
new_env['HTTP_USER_AGENT'] = \
'%s BulkExpand' % req.environ.get('HTTP_USER_AGENT')
create_obj_req = Request.blank(destination, new_env)
for pax_key, pax_value in tar_info.pax_headers.items():
header_name = pax_key_to_swift_header(pax_key)
if header_name:
# Both pax_key and pax_value are unicode
# strings; the key is already UTF-8 encoded, but
# we still have to encode the value.
create_obj_req.headers[header_name] = \
pax_value.encode("utf-8")
resp = create_obj_req.get_response(self.app)
containers_accessed.add(container)
if resp.is_success:
resp_dict['Number Files Created'] += 1
else:
if container_failure:
failed_files.append(container_failure)
if resp.status_int == HTTP_UNAUTHORIZED:
failed_files.append([
quote(obj_path[:self.max_path_length]),
HTTPUnauthorized().status])
raise HTTPUnauthorized(request=req)
if resp.status_int // 100 == 5:
failed_response_type = HTTPBadGateway
failed_files.append([
quote(obj_path[:self.max_path_length]),
resp.status])
if failed_files:
resp_dict['Response Status'] = failed_response_type().status
elif not resp_dict['Number Files Created']:
resp_dict['Response Status'] = HTTPBadRequest().status
resp_dict['Response Body'] = 'Invalid Tar File: No Valid Files'
except HTTPException as err:
resp_dict['Response Status'] = err.status
resp_dict['Response Body'] = err.body
except (tarfile.TarError, zlib.error) as tar_error:
resp_dict['Response Status'] = HTTPBadRequest().status
resp_dict['Response Body'] = 'Invalid Tar File: %s' % tar_error
except Exception:
self.logger.exception('Error in extract archive.')
resp_dict['Response Status'] = HTTPServerError().status
yield separator + get_response_body(
out_content_type, resp_dict, failed_files)
def _process_delete(self, delete_path, obj_name, env, resp_dict,
failed_files, failed_file_response, retry=0):
delete_obj_req = Request.blank(delete_path, env)
resp = delete_obj_req.get_response(self.app)
if resp.status_int // 100 == 2:
resp_dict['Number Deleted'] += 1
elif resp.status_int == HTTP_NOT_FOUND:
resp_dict['Number Not Found'] += 1
elif resp.status_int == HTTP_UNAUTHORIZED:
failed_files.append([quote(obj_name),
HTTPUnauthorized().status])
elif resp.status_int == HTTP_CONFLICT and \
self.retry_count > 0 and self.retry_count > retry:
retry += 1
sleep(self.retry_interval ** retry)
self._process_delete(delete_path, obj_name, env, resp_dict,
failed_files, failed_file_response,
retry)
else:
if resp.status_int // 100 == 5:
failed_file_response['type'] = HTTPBadGateway
failed_files.append([quote(obj_name), resp.status])
@wsgify
def __call__(self, req):
extract_type = req.params.get('extract-archive')
resp = None
if extract_type is not None and req.method == 'PUT':
archive_type = {
'tar': '', 'tar.gz': 'gz',
'tar.bz2': 'bz2'}.get(extract_type.lower().strip('.'))
if archive_type is not None:
resp = HTTPOk(request=req)
out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS)
if out_content_type:
resp.content_type = out_content_type
resp.app_iter = self.handle_extract_iter(
req, archive_type, out_content_type=out_content_type)
else:
resp = HTTPBadRequest("Unsupported archive format")
if 'bulk-delete' in req.params and req.method in ['POST', 'DELETE']:
resp = HTTPOk(request=req)
out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS)
if out_content_type:
resp.content_type = out_content_type
resp.app_iter = self.handle_delete_iter(
req, out_content_type=out_content_type)
return resp or self.app
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
max_containers_per_extraction = \
int(conf.get('max_containers_per_extraction', 10000))
max_failed_extractions = int(conf.get('max_failed_extractions', 1000))
max_deletes_per_request = int(conf.get('max_deletes_per_request', 10000))
max_failed_deletes = int(conf.get('max_failed_deletes', 1000))
yield_frequency = int(conf.get('yield_frequency', 10))
retry_count = int(conf.get('delete_container_retry_count', 0))
retry_interval = 1.5
register_swift_info(
'bulk_upload',
max_containers_per_extraction=max_containers_per_extraction,
max_failed_extractions=max_failed_extractions)
register_swift_info(
'bulk_delete',
max_deletes_per_request=max_deletes_per_request,
max_failed_deletes=max_failed_deletes)
def bulk_filter(app):
return Bulk(
app, conf,
max_containers_per_extraction=max_containers_per_extraction,
max_failed_extractions=max_failed_extractions,
max_deletes_per_request=max_deletes_per_request,
max_failed_deletes=max_failed_deletes,
yield_frequency=yield_frequency,
retry_count=retry_count,
retry_interval=retry_interval)
return bulk_filter
|
|
import json
import keyword
import textwrap
import re
prologue_template = """
# XXX This code file has been automatically generated. Do not edit it.
from .base import Resource, register_resource
__all__ = []
"""
def generate_prologue():
print(prologue_template)
first_cap_re = re.compile('(.)([A-Z][a-z]+)')
all_cap_re = re.compile('([a-z0-9])([A-Z])')
def convert_camel_to_snake(name):
s1 = first_cap_re.sub(r'\1_\2', name)
return all_cap_re.sub(r'\1_\2', s1).lower()
def map_property_name(name):
if keyword.iskeyword(name):
name = name + '_'
return convert_camel_to_snake(name)
# The schema data appears to be providing incorrect data about what
# fields are required. Appears that the schema is really tracking what
# properties the server will always fill in with data, rather than
# properly indicating what should be provided. Override these for now.
_required_property_overrides = {
'v1.BuildConfig': { 'status': False },
'v1.BuildSource': { 'secrets': False },
'v1.DeploymentConfig': { 'status': False },
'v1.DeploymentConfigSpec': { 'test': False, 'strategy': False },
'v1.ImageStream': { 'spec': False },
'v1.Route': { 'status': False },
'v1.RouteSpec': { 'host': False },
'v1.RouteTargetReference': { 'weight': False },
'v1.TagReference': { 'annotations': False, 'generation': False }
}
def required_properties(type_name, definition):
required = definition.get('required', [])
new_required = []
overrides = _required_property_overrides.get(type_name, {})
for name, flag in overrides.items():
if flag:
new_required.append(name)
for name in required:
if overrides.get(name, True):
new_required.append(name)
return sorted(new_required)
# The schema data in some cases provides the incorrect type for a
# property. Override these for now.
_property_type_overrides = {
'v1.Capabilities' : { 'drop': 'type:string' },
'v1.PersistentVolumeClaimSpec' : { 'accessModes': 'type:string' }
}
# A class definition is generated for each resource from the model in
# the schema data. These will all be added to a global registry so that
# when decoding JSON definition can automatically map to the correct type.
def generate_resources(models):
types = []
for type_name, definition in sorted(models):
print()
class_name = '%s' % type_name.replace('.', '_').replace('*', '')
types.append(class_name)
print('@register_resource')
print('class %s(Resource):' % class_name)
print()
if definition.get('description'):
print(' """%s"""' % '\n'.join(textwrap.wrap(
definition['description'], width=70, initial_indent='',
subsequent_indent=' ')))
print()
print(' __kind__ = %s' % repr(type_name))
print()
print(' __fields__ = {')
for property_name in sorted(definition['properties'].keys()):
print(' %r: %r,' % (map_property_name(property_name),
property_name))
print(' }')
print()
refs = {}
for property_name, details in sorted(definition['properties'].items()):
if '$ref' in details:
override = _property_type_overrides.get(type_name, {}).get(
property_name, None)
if override:
if override.startswith('$ref:'):
refs[property_name] = override.split(':')[1]
elif override.startswith('type:'):
pass
else:
refs[property_name] = details['$ref']
elif ('type' in details and details['type'] == 'array' and
'$ref' in details['items']):
override = _property_type_overrides.get(type_name, {}).get(
property_name, None)
if override:
if override.startswith('$ref:'):
refs[property_name] = override.split(':')[1]
elif override.startswith('type:'):
pass
else:
refs[property_name] = details['items']['$ref']
print(' __types__ = {')
for property_name, property_type_name in sorted(refs.items()):
print(' %r: %r,' % (map_property_name(property_name),
property_type_name))
print(' }')
print()
required = required_properties(type_name, definition)
if required:
print(' __required__ = set([')
for property_name in sorted(required):
print(' %r,' % map_property_name(property_name))
print(' ])')
else:
print(' __required__ = set()')
print()
for property_name, details in sorted(definition['properties'].items()):
property_type = details.get('type')
if not property_type:
property_type = details.get('$ref', '???')
print(' %s = None # %s' % (map_property_name(property_name),
property_type), end='')
if property_name in required:
print(' (required)')
else:
print()
print()
print(' def __init__(self', end='')
if required:
print(', *', end='')
for required_name in sorted(required):
print(', %s' % map_property_name(required_name), end='')
print(', **_kwargs_):')
for property_name, details in sorted(definition['properties'].items()):
if property_name not in required:
if details.get('type') == 'array':
print(' self.%s = []' % map_property_name(property_name))
if 'kind' in definition['properties']:
print()
print(' self.kind = %s' % repr(type_name.split('.')[-1]))
if 'apiVersion' in definition['properties']:
try:
api_version, _ = type_name.split('.')
except ValueError:
pass
else:
if api_version != 'unversioned':
print()
print(' self.api_version = %r' % api_version)
if required:
print()
for required_name in sorted(required):
print(' self.%s = %s' % (map_property_name(
required_name), map_property_name(required_name)))
print()
print(' super().__init__(**_kwargs_)')
print()
print('__all__.extend(%r)' % types)
generate_prologue()
schema = json.loads(open('schemas/openshift-v1-api.json').read())
generate_resources(schema['models'].items())
schema = json.loads(open('schemas/openshift-v1-oapi.json').read())
generate_resources(schema['models'].items())
|
|
"""Curve Interpolators"""
from __future__ import absolute_import, division, print_function
import numpy as np
from scipy.interpolate import PPoly, CubicSpline, interp1d
from scipy.linalg import solve_banded, solve
from numpy import repeat, prod, arange
from numpy.matlib import repmat
from six import string_types
class CurveInterpolator(object):
def __init__(self, *args, **kwargs):
pass
def __call__(self, x, *args, **kwargs):
raise NotImplementedError("CurveInterpolator is an abstract base class.")
class NodeInterpolator(CurveInterpolator):
"""Interpolation on curves defined by node points.
Interpolated value, y(x) is linear in node values y_i(x_i),
i.e. y(x) = sum_i a_i(x) * y_i(x_i).
Weights depend on Interpolation Scheme, and must be calibrated.
"""
def __init__(self, x, y, interpolator, *args, **kwargs):
super(NodeInterpolator, self).__init__()
self.x = x
self.y = y
self.n_nodes = len(x)
if self.n_nodes != len(y):
raise ValueError("Length of x ({}) differs from length of y ({})"
.format(self.n_nodes, len(y)))
self._fn = interpolator(x,y, *args, **kwargs)
def __call__(self, x, *args, **kwargs):
self._fn(x, *args, **kwargs)
class PiecewiseLinear(CurveInterpolator):
"""Piecewise Linear NodeInterpolator.
Parameters
----------
x : array_like, shape (n,)
1-d array containing values of the independent variable.
Values must be real, finite and in strictly increasing order.
y : array_like
Array containing values of the dependent variable. It can have
arbitrary number of dimensions, but the length along `axis` (see below)
must match the length of `x`. Values must be finite.
axis : int, optional
Axis along which `y` is assumed to be varying. Meaning that for
``x[i]`` the corresponding values are ``np.take(y, i, axis=axis)``.
Default is 0.
extrapolate : 2-tuple, optional
Condition to be applied above and below bounds of x.
The first value applies to values below x[0].
The second value applies to values above x[-1].
* None: No extrapolation. NaN is returned.
* 'clamped': The first derivative at curves ends are zero.
Equivalent to a value of 0.0.
* 'natural': The derivative beyond the last point is equal to the
derivative of the closest interval.
* A number: derivative outside bound.
Attributes
----------
x : ndarray, shape (n,)
Breakpoints. The same `x` which was passed to the constructor.
y: ndarray, shape (n, ...)
Methods
-------
__call__
node_derivative
See Also
--------
CubicSplineWithNodeSens, scipy.interpolate
Examples
--------
In this example, we use PiecewiseLinear to interpolate a sampled sinusoid.
>>> from pennies.market.interpolate import PiecewiseLinear
>>> import matplotlib.pyplot as plt
>>> x = np.arange(10)
>>> y = np.sin(x)
>>> interp = PiecewiseLinear(x, y)
>>> xs = np.arange(-0.5, 9.6, 0.1)
>>> p = plt.figure(figsize=(6.5, 4))
>>> p = plt.plot(x, y, 'o', label='data')
>>> p = plt.plot(xs, np.sin(xs), label='true')
>>> p = plt.plot(xs, interp(xs), label='piecewise linear')
>>> p = plt.legend(loc='lower left', ncol=3)
>>> p = plt.xlim(-0.5, 9.5)
>>> #plt.show()
"""
def __init__(self, x, y, extrapolate=('clamped', 'clamped')):
super(PiecewiseLinear, self).__init__(x, y)
self.x = x
self.y = y
self.n_nodes = len(x)
if self.n_nodes != len(y):
raise ValueError("Length of x ({}) differs from length of y ({})"
.format(self.n_nodes, len(y)))
self.extrapolate = extrapolate
# Store slopes (or recompute for each __call__?
self.slopes = np.zeros((self.n_nodes + 1,) + y.shape[1:])
# Interpolation.
self.slopes[1:-1] = (y[1:] - y[:-1]) / (x[1:] - x[:-1])
# Extrapolation: Low
if self.extrapolate[0] == 'clamped' or self.extrapolate[0] == 0:
self.slopes[0] = 0.0
elif self.extrapolate[0] is None:
self.slopes[0] = np.NaN
elif self.extrapolate[0] == 'natural':
self.slopes[0] = self.slopes[1]
elif isinstance(self.extrapolate[0], np.number):
self.slopes[0] = self.extrapolate[0]
else:
raise ValueError('1st member of extrapolate kwarg must be one of' +
'natural, clamped, None, or a value for the slope')
# Extrapolation: High
if self.extrapolate[1] == 'clamped' or self.extrapolate[1] == 0:
self.slopes[-1] = 0.0
elif self.extrapolate[1] is None:
self.slopes[-1] = np.NaN
elif self.extrapolate[1] == 'natural':
self.slopes[-1] = self.slopes[-2]
elif isinstance(self.extrapolate[1], np.number):
self.slopes[-1] = self.extrapolate[1]
else:
raise ValueError('2nd member of extrapolate kwarg must be one of' +
'natural, clamped, None, or a value for the slope')
def __call__(self, x, *args, **kwargs):
""" Estimate y, given x."""
i = np.digitize(x, self.x)
return (self.y[np.maximum(i-1, 0)] +
self.slopes[i] * (x - self.x[np.maximum(i-1, 0)]))
def node_derivative(self, x):
"""Sensitivity of y(x) to a unit move in each node's y-value
Parameters
----------
x : array-like
Points to evaluate the interpolant at.
Returns
-------
dy(x)/dy_i : array-like
Shape is determined by shape of y
the interpolation axis in the original array with the shape of x.
"""
x = np.asarray(x)
ndim = x.ndim
x_is_num = ndim == 0
if x_is_num:
x = np.array([x])
ndim = 1
idx = np.digitize(x, self.x) # n+1 possible values
# apply min/max to ensure out-of-bounds isn't triggered by interp
# this also happens to produce the correct result for 'natural' extrap
idx_safe = np.maximum(np.minimum(idx, self.n_nodes-1), 1) # values in 1..n-1
y_deriv = np.zeros(x.shape + (self.n_nodes,))
inv_dx = 1.0 / (self.x[idx_safe] - self.x[idx_safe - 1])
weight_right = ((x - self.x[idx_safe - 1]) * inv_dx).ravel()
weight_left = ((self.x[idx_safe] - x) * inv_dx).ravel()
# the following produces the indices in form [[x0], [x1],..[xlast]]
# where x0 is an array of the indices to the 0'th axis of each point
# http://docs.scipy.org/doc/numpy/user/basics.indexing.html#indexing-multi-dimensional-arrays
indices = [repmat(repeat(range(x.shape[i]), prod(x.shape[i + 1:])),
1, int(prod(x.shape[:i])))
for i in range(ndim)]
idx_flat = idx_safe.ravel()
y_deriv[indices + [idx_flat]] = weight_right.ravel()
y_deriv[indices + [idx_flat - 1]] = weight_left.ravel()
# Extrapolation below
if self.extrapolate[0] == 'natural':
pass
elif (self.extrapolate[0] == 'clamped' or
np.isscalar(self.extrapolate[0])): # Slope is fixed
extrap = idx == 0
y_deriv[extrap, 0] = 1.
y_deriv[extrap, 1:] = 0
elif self.extrapolate[0] is None:
y_deriv[idx == 0, ...] = np.NaN
else:
raise ValueError('1st member of extrapolate kwarg must be one' +
'natural, clamped, None, or a scalar slope value')
# Extrapolation above
if self.extrapolate[1] == 'natural':
pass
elif (self.extrapolate[1] == 'clamped' or
np.isscalar(self.extrapolate[0])): # Slope is fixed
extrap = idx == self.n_nodes
y_deriv[extrap, -1] = 1.
y_deriv[extrap, :-1] = 0
elif self.extrapolate[1] is None:
extrap = idx == self.n_nodes
y_deriv[extrap, -1] = np.NaN
else:
raise ValueError('2nd member of extrapolate kwarg must be one' +
'natural, clamped, None, or a scalar slope value')
if x_is_num:
y_deriv = y_deriv[0,...]
return y_deriv
class CubicSplineWithNodeSens(PPoly):
"""Cubic spline data interpolator.
Interpolate data with a piecewise cubic polynomial which is twice
continuously differentiable [1]_. The result is represented as a `PPoly`
instance with breakpoints matching the given data.
Parameters
----------
x : array_like, shape (n,)
1-d array containing values of the independent variable.
Values must be real, finite and in strictly increasing order.
y : array_like
Array containing values of the dependent variable. It can have
arbitrary number of dimensions, but the length along `axis` (see below)
must match the length of `x`. Values must be finite.
axis : int, optional
Axis along which `y` is assumed to be varying. Meaning that for
``x[i]`` the corresponding values are ``np.take(y, i, axis=axis)``.
Default is 0.
bc_type : string or 2-tuple, optional
Boundary condition type. Two additional equations, given by the
boundary conditions, are required to determine all coefficients of
polynomials on each segment [2]_.
If `bc_type` is a string, then the specified condition will be applied
at both ends of a spline. Available conditions are:
* 'not-a-knot' (default): The first and second segment at a curve end
are the same polynomial. It is a good default when there is no
information on boundary conditions.
* 'periodic': The interpolated functions is assumed to be periodic
of period ``x[-1] - x[0]``. The first and last value of `y` must be
identical: ``y[0] == y[-1]``. This boundary condition will result in
``y'[0] == y'[-1]`` and ``y''[0] == y''[-1]``.
* 'clamped': The first derivative at curves ends are zero. Assuming
a 1D `y`, ``bc_type=((1, 0.0), (1, 0.0))`` is the same condition.
* 'natural': The second derivative at curve ends are zero. Assuming
a 1D `y`, ``bc_type=((2, 0.0), (2, 0.0))`` is the same condition.
If `bc_type` is a 2-tuple, the first and the second value will be
applied at the curve start and end respectively. The tuple values can
be one of the previously mentioned strings (except 'periodic') or a
tuple `(order, deriv_values)` allowing to specify arbitrary
derivatives at curve ends:
* `order`: the derivative order, 1 or 2.
* `deriv_value`: array_like containing derivative values, shape must
be the same as `y`, excluding `axis` dimension. For example, if `y`
is 1D, then `deriv_value` must be a scalar. If `y` is 3D with the
shape (n0, n1, n2) and axis=2, then `deriv_value` must be 2D
and have the shape (n0, n1).
extrapolate : {bool, 'periodic', None}, optional
If bool, determines whether to extrapolate to out-of-bounds points
based on first and last intervals, or to return NaNs. If 'periodic',
periodic extrapolation is used. If None (default), `extrapolate` is
set to 'periodic' for ``bc_type='periodic'`` and to True otherwise.
Attributes
----------
x : ndarray, shape (n,)
Breakpoints. The same `x` which was passed to the constructor.
c : ndarray, shape (4, n-1, ...)
Coefficients of the polynomials on each segment. The trailing
dimensions match the dimensions of `y`, excluding `axis`. For example,
if `y` is 1-d, then ``c[k, i]`` is a coefficient for
``(x-x[i])**(3-k)`` on the segment between ``x[i]`` and ``x[i+1]``.
axis : int
Interpolation axis. The same `axis` which was passed to the
constructor.
Methods
-------
__call__
derivative
antiderivative
integrate
roots
See Also
--------
Akima1DInterpolator
PchipInterpolator
PPoly
Notes
-----
Parameters `bc_type` and `interpolate` work independently, i.e. the former
controls only construction of a spline, and the latter only evaluation.
When a boundary condition is 'not-a-knot' and n = 2, it is replaced by
a condition that the first derivative is equal to the linear interpolant
slope. When both boundary conditions are 'not-a-knot' and n = 3, the
solution is sought as a parabola passing through given points.
When 'not-a-knot' boundary conditions is applied to both ends, the
resulting spline will be the same as returned by `splrep` (with ``s=0``)
and `InterpolatedUnivariateSpline`, but these two methods use a
representation in B-spline basis.
.. versionadded:: 0.18.0
Examples
--------
In this example the cubic spline is used to interpolate a sampled sinusoid.
>>> from pennies.market.interpolate import CubicSplineWithNodeSens
>>> import matplotlib.pyplot as plt
>>> x = np.arange(10)
>>> y = np.sin(x)
>>> cs = CubicSplineWithNodeSens(x, y)
>>> xs = np.arange(-0.5, 9.6, 0.1)
>>> p = plt.figure(figsize=(6.5, 4))
>>> p = plt.plot(x, y, 'o', label='data')
>>> p = plt.plot(xs, np.sin(xs), label='true')
>>> p = plt.plot(xs, cs(xs), label="S")
>>> p = plt.xlim(-0.5, 9.5)
>>> p = plt.legend(loc='lower left', ncol=3)
>>> #plt.show()
The second example is the interpolation of a polynomial y = x**3 on the
interval 0 <= x<= 1. A cubic spline can represent this function exactly.
To achieve that we need to specify values and first derivatives at
endpoints of the interval. Note that y' = 3 * x**2 and thus y'(0) = 0 and
y'(1) = 3.
>>> cs = CubicSplineWithNodeSens([0, 0.5, 1], [0, 0.125, 1], bc_type=((1, 0), (1, 3)))
>>> x = np.linspace(0, 1)
>>> np.allclose(x**3, cs(x))
True
References
----------
.. [1] `Cubic Spline Interpolation
<https://en.wikiversity.org/wiki/Cubic_Spline_Interpolation>`_
on Wikiversity.
.. [2] Carl de Boor, "A Practical Guide to Splines", Springer-Verlag, 1978.
"""
def __init__(self, x, y, axis=0, bc_type='clamped', extrapolate=None):
x, y = map(np.asarray, (x, y))
if np.issubdtype(x.dtype, np.complexfloating):
raise ValueError("`x` must contain real values.")
if np.issubdtype(y.dtype, np.complexfloating):
dtype = complex
else:
dtype = float
y = y.astype(dtype, copy=False)
axis = axis % y.ndim
if x.ndim != 1:
raise ValueError("`x` must be 1-dimensional.")
if x.shape[0] < 2:
raise ValueError("`x` must contain at least 2 elements.")
if x.shape[0] != y.shape[axis]:
raise ValueError("The length of `y` along `axis`={0} doesn't "
"match the length of `x`".format(axis))
if not np.all(np.isfinite(x)):
raise ValueError("`x` must contain only finite values.")
if not np.all(np.isfinite(y)):
raise ValueError("`y` must contain only finite values.")
dx = np.diff(x)
if np.any(dx <= 0):
raise ValueError("`x` must be strictly increasing sequence.")
n = x.shape[0]
y = np.rollaxis(y, axis)
bc, y = self._validate_bc(bc_type, y, y.shape[1:], axis)
if extrapolate is None:
if bc[0] == 'periodic':
extrapolate = 'periodic'
else:
extrapolate = True
dxr = dx.reshape([dx.shape[0]] + [1] * (y.ndim - 1))
slope = np.diff(y, axis=0) / dxr
# If bc is 'not-a-knot' this change is just a convention.
# If bc is 'periodic' then we already checked that y[0] == y[-1],
# and the spline is just a constant, we handle this case in the same
# way by setting the first derivatives to slope, which is 0.
if n == 2:
if bc[0] in ['not-a-knot', 'periodic']:
bc[0] = (1, slope[0])
if bc[1] in ['not-a-knot', 'periodic']:
bc[1] = (1, slope[0])
# This is a very special case, when both conditions are 'not-a-knot'
# and n == 3. In this case 'not-a-knot' can't be handled regularly
# as both conditions are identical. We handle this case by
# constructing a parabola passing through given points.
if n == 3 and bc[0] == 'not-a-knot' and bc[1] == 'not-a-knot':
A = np.zeros((3, 3)) # This is a standard matrix.
b = np.empty((3,) + y.shape[1:], dtype=y.dtype)
A[0, 0] = 1
A[0, 1] = 1
A[1, 0] = dx[1]
A[1, 1] = 2 * (dx[0] + dx[1])
A[1, 2] = dx[0]
A[2, 1] = 1
A[2, 2] = 1
b[0] = 2 * slope[0]
b[1] = 3 * (dxr[0] * slope[1] + dxr[1] * slope[0])
b[2] = 2 * slope[1]
s = solve(A, b, overwrite_a=False, overwrite_b=False,
check_finite=False)
else:
# Find derivative values at each x[i] by solving a tridiagonal
# system.
A = np.zeros((3, n)) # This is a banded matrix representation.
b = np.empty((n,) + y.shape[1:], dtype=y.dtype)
# Filling the system for i=1..n-2
# (x[i-1] - x[i]) * s[i-1] +\
# 2 * ((x[i] - x[i-1]) + (x[i+1] - x[i])) * s[i] +\
# (x[i] - x[i-1]) * s[i+1] =\
# 3 * ((x[i+1] - x[i])*(y[i] - y[i-1])/(x[i] - x[i-1]) +\
# (x[i] - x[i-1])*(y[i+1] - y[i])/(x[i+1] - x[i]))
A[1, 1:-1] = 2 * (dx[:-1] + dx[1:]) # The diagonal
A[0, 2:] = dx[:-1] # The upper diagonal
A[-1, :-2] = dx[1:] # The lower diagonal
b[1:-1] = 3 * (dxr[1:] * slope[:-1] + dxr[:-1] * slope[1:])
bc_start, bc_end = bc
if bc_start == 'periodic':
# Due to the periodicity, and because y[-1] = y[0], the linear
# system has (n-1) unknowns/equations instead of n:
A = A[:, 0:-1]
A[1, 0] = 2 * (dx[-1] + dx[0])
A[0, 1] = dx[-1]
b = b[:-1]
# Also, due to the periodicity, the system is not tri-diagonal.
# We need to compute a "condensed" matrix of shape (n-2, n-2).
# See http://www.cfm.brown.edu/people/gk/chap6/node14.html for
# more explanations.
# The condensed matrix is obtained by removing the last column
# and last row of the (n-1, n-1) system matrix. The removed
# values are saved in scalar variables with the (n-1, n-1)
# system matrix indices forming their names:
a_m1_0 = dx[-2] # lower left corner value: A[-1, 0]
a_m1_m2 = dx[-1]
a_m1_m1 = 2 * (dx[-1] + dx[-2])
a_m2_m1 = dx[-2]
a_0_m1 = dx[0]
b[0] = 3 * (dxr[0] * slope[-1] + dxr[-1] * slope[0])
b[-1] = 3 * (dxr[-1] * slope[-2] + dxr[-2] * slope[-1])
Ac = A[:, :-1]
b1 = b[:-1]
b2 = np.zeros_like(b1)
b2[0] = -a_0_m1
b2[-1] = -a_m2_m1
# s1 and s2 are the solutions of (n-2, n-2) system
s1 = solve_banded((1, 1), Ac, b1, overwrite_ab=False,
overwrite_b=False, check_finite=False)
s2 = solve_banded((1, 1), Ac, b2, overwrite_ab=False,
overwrite_b=False, check_finite=False)
# computing the s[n-2] solution:
s_m1 = ((b[-1] - a_m1_0 * s1[0] - a_m1_m2 * s1[-1]) /
(a_m1_m1 + a_m1_0 * s2[0] + a_m1_m2 * s2[-1]))
# s is the solution of the (n, n) system:
s = np.empty((n,) + y.shape[1:], dtype=y.dtype)
s[:-2] = s1 + s_m1 * s2
s[-2] = s_m1
s[-1] = s[0]
else:
if bc_start == 'not-a-knot':
A[1, 0] = dx[1]
A[0, 1] = x[2] - x[0]
d = x[2] - x[0]
b[0] = ((dxr[0] + 2*d) * dxr[1] * slope[0] +
dxr[0]**2 * slope[1]) / d
elif bc_start[0] == 1:
A[1, 0] = 1
A[0, 1] = 0
b[0] = bc_start[1]
elif bc_start[0] == 2:
A[1, 0] = 2 * dx[0]
A[0, 1] = dx[0]
b[0] = -0.5 * bc_start[1] * dx[0]**2 + 3 * (y[1] - y[0])
if bc_end == 'not-a-knot':
A[1, -1] = dx[-2]
A[-1, -2] = x[-1] - x[-3]
d = x[-1] - x[-3]
b[-1] = ((dxr[-1]**2*slope[-2] +
(2*d + dxr[-1])*dxr[-2]*slope[-1]) / d)
elif bc_end[0] == 1:
A[1, -1] = 1
A[-1, -2] = 0
b[-1] = bc_end[1]
elif bc_end[0] == 2:
A[1, -1] = 2 * dx[-1]
A[-1, -2] = dx[-1]
b[-1] = 0.5 * bc_end[1] * dx[-1]**2 + 3 * (y[-1] - y[-2])
s = solve_banded((1, 1), A, b, overwrite_ab=False,
overwrite_b=False, check_finite=False)
# Compute coefficients in PPoly form.
t = (s[:-1] + s[1:] - 2 * slope) / dxr
c = np.empty((4, n - 1) + y.shape[1:], dtype=t.dtype)
c[0] = t / dxr
c[1] = (slope - s[:-1]) / dxr - t
c[2] = s[:-1]
c[3] = y[:-1]
super(CubicSplineWithNodeSens, self).__init__(c, x, extrapolate=extrapolate)
self.axis = axis
# Compute y-derivatives at nodes
if n < 3:
raise NotImplementedError('node_derivatives requires more than 3 x')
else:
'''
At this point A and b have been constructed with boundary conditions.
A: stays the same.
b: b becomes a matrix, d(rhs_i) / dy_j
'''
if y.ndim > 1: # TODO - Confirm solution when y has more than 1 axis
raise NotImplementedError(
"Solution of node_derivatives currently only allows 1D y")
# Find vector of cross-derivative values, d/dy_j (dy(x_i)/dx)
# Take derivative of Linear system to compute node derivatives
# A is the same as before. New RHS is tridiagonal.
rhs = np.zeros((n, n)) # TODO: Test for other dimensionalities
# obtain diagonal indices for internal points
ij_diag = tuple([np.diag_indices(n - 2)[i] + 1 for i in range(2)])
minus_over_plus = 3 * dx[:-1] / dx[1:]
plus_over_minus = 3 * dx[1:] / dx[:-1]
rhs[ij_diag] = plus_over_minus - minus_over_plus # The diagonal
rhs[ij_diag[0], ij_diag[1] + 1] = minus_over_plus # upper diagonal # Confirm (+). Confirm slice
rhs[ij_diag[0], ij_diag[1] - 1] = -plus_over_minus # lower diagonal # Confirm (-). Confirm slice
if bc_start[0] == 1:
rhs[0, 0] = 0
elif bc_start[0] == 2:
raise NotImplementedError('bc_start not implemented. '
'We only handle fixed 1st derivatives.')
# Below is the boundary condition for dy/dx|x_0
# b[0] = -0.5 * bc_start[1] * dx[0] ** 2 + 3 * (y[1] - y[0])
else:
raise NotImplementedError('bc_start not implemented. '
'We only handle fixed 1st derivatives.')
if bc_end[0] == 1:
rhs[-1, -1] = 0
elif bc_end[0] == 2:
raise NotImplementedError('bc_end not implemented. '
'We only handle fixed 1st derivatives.')
# Below is the boundary condition for dy/dx|x_{n-1}
# b[-1] = 0.5 * bc_end[1] * dx[-1] ** 2 + 3 * (y[-1] - y[-2])
else:
raise NotImplementedError('bc_end not implemented. '
'We only handle fixed 1st derivatives.')
d2ydydx = solve_banded((1, 1), A, rhs, overwrite_ab=False,
overwrite_b=False, check_finite=False)
# The y_derivative dq(x)/dy_j
# Create an additional vector Piecewise Polynomial
# The PPoly weights are very similar to q(x), both fcns of x, y, y'
inv_dx = 1 / dx
inv_dx_rhs = inv_dx.reshape([dx.shape[0]] + [1] * (rhs.ndim - 1))
d2_sum = (d2ydydx[:-1] + d2ydydx[1:])
d = np.zeros((4, n - 1) + rhs.shape[1:], dtype=t.dtype)
# Start with portion common to all j
d[0] = (inv_dx_rhs**2) * d2_sum
d[1] = -inv_dx_rhs * (d2_sum + d2ydydx[:-1])
d[2] = d2ydydx[:-1]
# Adjust when j==i: dq_i / dy_i
ij_diag = np.diag_indices(n-1) + y.shape[1:]
d[0][ij_diag] += 2.0 * inv_dx**3
d[1][ij_diag] -= 3.0 * inv_dx**2
d[3][ij_diag] += 1.0
# Adjust when j=i+1: dq_i / dy_{i+1}
idx_upper = (ij_diag[0], ij_diag[1] + 1)
d[0][idx_upper] -= 2.0 * inv_dx**3
d[1][idx_upper] += 3.0 * inv_dx**2
self._ysens = PPoly(d, x, extrapolate=extrapolate)
def node_derivative(self, x, nodes=None):
"""Sensitivity of y(x) to a unit move in each node's y value
Parameters
----------
x : array-like
Points to evaluate the interpolant at.
nodes: slice, optional
Some way to select points to get sensitivity to
Returns
-------
dy(x)/dy_i : array-like
Shape is determined by shape of y
the interpolation axis in the original array with the shape of x.
"""
# TODO: Should we also allow user to select nodes???
if nodes is not None:
raise NotImplementedError('nodes kwarg not yet implemented.')
else:
return self._ysens(x)
@staticmethod
def _validate_bc(bc_type, y, expected_deriv_shape, axis):
"""Validate and prepare boundary conditions.
Returns
-------
validated_bc : 2-tuple
Boundary conditions for a curve start and end.
y : ndarray
y casted to complex dtype if one of the boundary conditions has
complex dtype.
"""
if isinstance(bc_type, string_types):
if bc_type == 'periodic':
if not np.allclose(y[0], y[-1], rtol=1e-15, atol=1e-15):
raise ValueError(
"The first and last `y` point along axis {} must "
"be identical (within machine precision) when "
"bc_type='periodic'.".format(axis))
bc_type = (bc_type, bc_type)
else:
if len(bc_type) != 2:
raise ValueError("`bc_type` must contain 2 elements to "
"specify start and end conditions.")
if 'periodic' in bc_type:
raise ValueError("'periodic' `bc_type` is defined for both "
"curve ends and cannot be used with other "
"boundary conditions.")
validated_bc = []
for bc in bc_type:
if isinstance(bc, string_types):
if bc == 'clamped':
validated_bc.append((1, np.zeros(expected_deriv_shape)))
elif bc == 'natural':
validated_bc.append((2, np.zeros(expected_deriv_shape)))
elif bc in ['not-a-knot', 'periodic']:
validated_bc.append(bc)
else:
raise ValueError("bc_type={} is not allowed.".format(bc))
else:
try:
deriv_order, deriv_value = bc
except Exception:
raise ValueError("A specified derivative value must be "
"given in the form (order, value).")
if deriv_order not in [1, 2]:
raise ValueError("The specified derivative order must "
"be 1 or 2.")
deriv_value = np.asarray(deriv_value)
if deriv_value.shape != expected_deriv_shape:
raise ValueError(
"`deriv_value` shape {} is not the expected one {}."
.format(deriv_value.shape, expected_deriv_shape))
if np.issubdtype(deriv_value.dtype, np.complexfloating):
y = y.astype(complex, copy=False)
validated_bc.append((deriv_order, deriv_value))
return validated_bc, y
if __name__ == '__main__':
import matplotlib.pyplot as plt
def f(x):
return np.sin(x)
nodes_x = 0.5 * np.arange(10) # Linearly spaced
nodes_y = f(nodes_x)
# nodes_y = np.stack((nodes_y, nodes_y**2), axis=1) TODO - Work on 2 lines case
# -------------------------------------------------------------------------
# PIECEWISE LINEAR
# -------------------------------------------------------------------------
pl = PiecewiseLinear(nodes_x, nodes_y, extrapolate=('clamped', 'natural'))
x1 = -0.05
#x1 = np.array([-0.05, 0.5])
#x1 = np.array([[-0.05, 0.5], [4.5, 4.75]])
y1 = pl(x1)
f_of_x1 = f(x1)
sens_extrap = pl.node_derivative(x1)
sens_number = pl.node_derivative(2.25)
sens_1d = pl.node_derivative(np.array([-0.05, 0.5]))
sens_2d = pl.node_derivative(np.array([[-0.05, 0.5], [4.5, 4.75]]))
xs = np.linspace(nodes_x[0] - 0.05, nodes_x[-1] + 0.5, num=100)
pl_derivs = pl.node_derivative(xs)
plt.figure(figsize=(6.5, 4))
plt.title('y(x) sensitivity to a shift of x={}'.format(nodes_x[5]))
plt.plot(xs, pl_derivs[:, 5], 'o')
# -------------------------------------------------------------------------
# CUBIC SPLINE
# -------------------------------------------------------------------------
cs_sens = CubicSplineWithNodeSens(nodes_x, nodes_y, bc_type='clamped')
cs = CubicSpline(nodes_x, nodes_y, bc_type='clamped')
# 'clamped' sets 1st derivative to 0.
# 'natural' sets 2nd derivative to 0.
print('Derivative of y(x) to shift in y values of all nodes')
y_sens = cs_sens.node_derivative(x1) # Evaluation node_derivatives at x1
print('y_sens({}) = {}'.format(x1, y_sens))
# Test results manually bumping each node
y_shift_at_x1 = []
y_base = cs(x1)
for i in range(len(nodes_x)):
y_shift = np.array(nodes_y)
shift = 0.001
y_shift[i] += shift
cs_shift = CubicSpline(nodes_x, y_shift, bc_type='clamped')
y_x1 = cs_shift(x1)
y_shift_at_x1.append((y_x1 - y_base) / shift)
print('bump, recalc at {}: {}'.format(x1, y_shift_at_x1))
plt.figure(figsize=(6.5, 4))
plt.plot(nodes_x, y_sens, 'o', label='my solution')
plt.plot(nodes_x, y_shift_at_x1, '^', label='bump, recalc')
plt.figure()
plt.plot(nodes_x, y_sens - y_shift_at_x1, 'x')
# Test that sensitivities when x is a node are 0,0,...,1,..0
print('y_sens({}) = {}'.format(nodes_x[4], cs_sens.node_derivative(nodes_x[4])))
print('y_sens({}) = {}'.format(nodes_x[-1], cs_sens.node_derivative(nodes_x[-1])))
xs = np.linspace(nodes_x[0] - 0.05, nodes_x[-1] + 0.5, num=100)
y_deriv = cs_sens.node_derivative(xs)
plt.figure(figsize=(6.5, 4))
plt.title('y(x) sensitivity to a shift of x={}'.format(nodes_x[5]))
plt.plot(xs, y_deriv[:, 5], 'o')
# Show Spline
plt.figure(figsize=(6.5, 4))
plt.plot(nodes_x, nodes_y, 'o', label='data')
plt.plot(xs, f(xs), label='true')
# our cubic vs scipy.interpolate
plt.plot(xs, cs(xs), label="scipy")
plt.plot(xs, cs_sens(xs), label="sens")
# our piecewise linear vs scipy.interpolate
plt.plot(xs, pl(xs), label='linear')
interp_linear = interp1d(nodes_x, nodes_y, bounds_error=False, fill_value='extrapolate')
plt.plot(xs, interp_linear(xs), label='scipy linear')
from matplotlib.font_manager import FontProperties
fontP = FontProperties()
fontP.set_size('small')
plt.legend(loc='lower right', prop=fontP)
|
|
"""
Atomic data.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from collections import defaultdict
from astropy.table import Table
from astropy.units import angstrom, s, Quantity
import numpy as np
import os
__all__ = ['Ar', 'Transition', 'AtomDat']
data_path = os.path.abspath(__file__).rsplit('/', 1)[0] + '/data'
atomdat = None
Ar = dict(H=1.00794,
He=4.002602,
C=12.0107,
N=14.0067,
O=15.9994,
Mg=24.3050,
Al=26.9815386,
Si=28.0855,
P=30.973762,
S=32.065,
Ca=40.078,
Fe=55.845,
Ti=47.867,
Zn=65.38,
Cr=51.9961)
def get_atomdat():
"""
Function to cache atom.dat.
"""
global atomdat
if atomdat is None:
atomdat = AtomDat()
return atomdat
class Transition(object):
"""
An atomic transition.
Parameters
----------
parent : str
Name of the ion, molecule or isotope, e.g. `HI`.
wavelength : float
Rest frame wavelength (Angstrom).
osc : float
Oscillator strength (dimensionless).
gamma : float
Gamma parameter (s^-1).
"""
def __init__(self, parent, wavelength, osc, gamma):
self.name = '{0} {1:.2f}'.format(parent, wavelength)
self.parent = parent
self.wavelength = wavelength * angstrom
self.osc = Quantity(osc)
self.gamma = gamma / s
def __repr__(self):
return 'Transition({0})'.format(self.name)
class AtomDat(defaultdict):
"""
Atomic transition data from a VPFIT-style atom.dat file.
Parameters
----------
filename : str, optional
The name of the atom.dat-style file. If not given, then the version
bundled with this package is used.
"""
def __init__(self, filename=None):
if filename is None:
filename = '{0}/atom.dat'.format(data_path)
if filename.endswith('.gz'):
import gzip
fh = gzip.open(filename, 'rb')
else:
fh = open(filename, 'rb')
specials = ('??', '__', '>>', '<<', '<>')
super(AtomDat, self).__init__(list)
rows = []
for line in fh:
line = line.decode()
if not line[0].isupper() and line[:2] not in specials:
continue
identifier = line[:7].replace(' ', '')
wavelength, osc, gamma = [
float(item) for item in line[7:].split()[:3]]
transition = Transition(identifier, wavelength, osc, gamma)
self[identifier].append(transition)
rows.append((identifier, wavelength, osc, gamma))
self.table = Table(
rows=rows, names=['ID', 'WAVELENGTH', 'OSC', 'GAMMA'])
self.table['WAVELENGTH'].unit = angstrom
self.table['GAMMA'].unit = 1 / s
fh.close()
def get_transition(self, name):
"""
Gets information on a given transition.
Parameters
----------
name : str
Name of the transition, something like `HI 1215`.
Returns
-------
transition : `igmtools.data.atomic.Transition`
Transition data.
"""
i = 0
name = name.strip()
if name[:4] in set(['H2J0', 'H2J1', 'H2J2', 'H2J3', 'H2J4', 'H2J5',
'H2J6', 'H2J7', 'COJ0', 'COJ1', 'COJ2', 'COJ3',
'COJ4', 'COJ5']):
i = 4
elif name[:3] == 'C3I':
i = 3
else:
while i < len(name) and (name[i].isalpha() or name[i] == '*'):
i += 1
identifier = name[:i]
# Get all transition wavelengths and sorted indicies:
wavelengths = np.array(
[item.wavelength.value for item in self[identifier]])
isort = wavelengths.argsort()
try:
wavelength = float(name[i:])
except:
raise ValueError('Possible transitions for {0}:\n'
' {1}'.format(identifier, self.ion[identifier]))
index = np.searchsorted(wavelengths[isort], wavelength)
if index == len(wavelengths):
index -= 1
else:
difference1 = np.abs(
np.array(self[identifier])[isort][index].wavelength.value -
wavelength)
difference2 = np.abs(
np.array(self[identifier])[isort][index - 1].wavelength.value -
wavelength)
if difference2 < difference1:
index -= 1
transition = np.array(self[identifier])[isort][index]
return transition
|
|
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import yaml
from oslo_serialization import jsonutils
from nailgun.db.sqlalchemy.models import NodeBondInterface
from nailgun import consts
from nailgun.test.base import BaseIntegrationTest
from nailgun.utils import reverse
class TestAssignmentHandlers(BaseIntegrationTest):
def _assign_roles(self, assignment_data, expect_errors=False):
return self.app.post(
reverse(
'NodeAssignmentHandler',
kwargs={'cluster_id': self.cluster.id}
),
jsonutils.dumps(assignment_data),
headers=self.default_headers,
expect_errors=expect_errors
)
def test_assignment(self):
self.env.create(
cluster_kwargs={"api": True},
nodes_kwargs=[
{
"cluster_id": None,
"api": True
}
]
)
self.cluster = self.env.clusters[0]
node = self.env.nodes[0]
assignment_data = [
{
"id": node.id,
"roles": ['controller']
}
]
resp = self._assign_roles(assignment_data)
self.assertEqual(200, resp.status_code)
self.assertEqual(node.cluster, self.cluster)
self.datadiff(
node.pending_roles,
assignment_data[0]["roles"]
)
resp = self._assign_roles(assignment_data, True)
self.assertEqual(400, resp.status_code)
def test_unassignment(self):
cluster = self.env.create(
cluster_kwargs={"api": True},
nodes_kwargs=[{}]
)
node = self.env.nodes[0]
# correct unassignment
resp = self.app.post(
reverse(
'NodeUnassignmentHandler',
kwargs={'cluster_id': cluster['id']}
),
jsonutils.dumps([{'id': node.id}]),
headers=self.default_headers
)
self.assertEqual(200, resp.status_code)
self.assertEqual(node.cluster, None)
self.assertEqual(node.pending_roles, [])
# Test with invalid node ids
for node_id in (0, node.id + 50):
resp = self.app.post(
reverse(
'NodeUnassignmentHandler',
kwargs={'cluster_id': cluster['id']}
),
jsonutils.dumps([{'id': node_id}]),
headers=self.default_headers,
expect_errors=True
)
self.assertEqual(400, resp.status_code)
# Test with invalid cluster id
resp = self.app.post(
reverse(
'NodeUnassignmentHandler',
kwargs={'cluster_id': cluster['id'] + 5}
),
jsonutils.dumps([{'id': node.id}]),
headers=self.default_headers,
expect_errors=True
)
self.assertEqual(resp.status_code, 404)
# Test with wrong cluster id
self.env.create(
cluster_kwargs={"api": True},
nodes_kwargs=[{}]
)
resp = self.app.post(
reverse(
'NodeUnassignmentHandler',
kwargs={'cluster_id': cluster['id']}
),
jsonutils.dumps([{'id': self.env.clusters[1].nodes[0].id}]),
headers=self.default_headers,
expect_errors=True
)
self.assertEqual(resp.status_code, 400)
def test_unassignment_after_deploy(self):
cluster = self.env.create(
nodes_kwargs=[{}]
)
node = self.env.nodes[0]
node.status = 'error'
self.db.commit()
resp = self.app.post(
reverse(
'NodeUnassignmentHandler',
kwargs={'cluster_id': cluster['id']}
),
jsonutils.dumps([{'id': node.id}]),
headers=self.default_headers
)
self.assertEqual(resp.status_code, 200)
self.assertEqual(node.pending_deletion, True)
def test_assigment_with_invalid_cluster(self):
node = self.env.create_node(api=False)
resp = self.app.post(
reverse(
'NodeAssignmentHandler',
kwargs={'cluster_id': '9999'}
),
jsonutils.dumps([{
'id': node.id,
'roles': ['controller']
}]),
headers=self.default_headers,
expect_errors=True
)
self.assertEquals(404, resp.status_code)
def test_assign_conflicting_roles(self):
self.env.create(
cluster_kwargs={"api": True},
nodes_kwargs=[
{
"cluster_id": None,
"api": True
}
]
)
self.cluster = self.env.clusters[0]
node = self.env.nodes[0]
assignment_data = [
{
"id": node.id,
"roles": ['controller', 'compute']
}
]
resp = self._assign_roles(assignment_data, True)
self.assertEquals(400, resp.status_code)
def test_assign_conflicting_all_role(self):
ROLE = yaml.safe_load("""
name: test_role
meta:
name: "Some plugin role"
description: "Some description"
conflicts: "*"
volumes_roles_mapping:
- id: os
allocate_size: all
""")
release = self.env.create_release()
resp = self.env.create_role(release.id, ROLE)
self.env.create(
cluster_kwargs={
"api": True,
"release_id": release.id
},
nodes_kwargs=[
{
"cluster_id": None,
"api": True
}
]
)
self.cluster = self.env.clusters[0]
node = self.env.nodes[0]
assignment_data = [
{
"id": node.id,
"roles": ['controller', 'test_role']
}
]
resp = self._assign_roles(assignment_data, True)
self.assertEquals(400, resp.status_code, resp.body)
assignment_data[0]["roles"] = ['test_role']
resp = self._assign_roles(assignment_data)
self.assertEquals(200, resp.status_code, resp.body)
def test_add_node_with_cluster_network_template(self):
net_template = {
"adv_net_template": {
"default": {
"network_assignments": {
"management": {
"ep": "br-mgmt"
},
"storage": {
"ep": "br-storage"
},
"public": {
"ep": "br-ex"
},
"private": {
"ep": "br-prv"
},
"fuelweb_admin": {
"ep": "br-fw-admin"
}
},
"templates_for_node_role": {
"controller": [
"common"
]
},
"nic_mapping": {
"default": {
"if4": "eth3",
"if1": "eth0",
"if2": "eth1",
"if3": "eth2"
}
},
"network_scheme": {
"common": {
"endpoints": [
"br-mgmt"
],
"transformations": [
{
"action": "add-br",
"name": "br-mgmt"
},
{
"action": "add-port",
"bridge": "br-mgmt",
"name": "<% if2 %>"
}
],
"roles": {
"management": "br-mgmt"
}
}
}
}
}
}
cluster = self.env.create_cluster(
api=False,
net_provider=consts.CLUSTER_NET_PROVIDERS.neutron
)
cluster.release.version = '1111-7.0'
cluster.network_config.configuration_template = net_template
node = self.env.create_node()
assignment_data = [
{
"id": node.id,
"roles": ['controller']
}
]
self.app.post(
reverse(
'NodeAssignmentHandler',
kwargs={'cluster_id': cluster.id}
),
jsonutils.dumps(assignment_data),
headers=self.default_headers
)
net_scheme = node.network_template['templates']['common']
self.assertNotEqual({}, node.network_template)
self.assertEquals(['br-mgmt'], net_scheme['endpoints'])
self.assertEquals({'management': 'br-mgmt'}, net_scheme['roles'])
# The order of transformations matters
self.assertIn('add-br', net_scheme['transformations'][0].values())
self.assertIn('add-port', net_scheme['transformations'][1].values())
self.assertEquals('eth1', net_scheme['transformations'][1]['name'])
class TestClusterStateUnassignment(BaseIntegrationTest):
def test_delete_bond_and_networks_state_on_unassignment(self):
"""Test verifies that
1. bond configuration will be deleted
2. network unassigned from node interfaces
when node unnasigned from cluster
"""
cluster = self.env.create(
nodes_kwargs=[{}]
)
node = self.env.nodes[0]
node.bond_interfaces.append(
NodeBondInterface(name='ovs-bond0',
slaves=node.nic_interfaces))
self.db.flush()
resp = self.app.post(
reverse(
'NodeUnassignmentHandler',
kwargs={'cluster_id': cluster['id']}
),
jsonutils.dumps([{'id': node.id}]),
headers=self.default_headers
)
self.assertEqual(resp.status_code, 200)
self.assertEqual(node.bond_interfaces, [])
for interface in node.interfaces:
self.assertEqual(interface.assigned_networks_list, [])
|
|
"""Test all functions related to the basic accessory implementation.
This includes tests for all mock object types.
"""
from unittest.mock import Mock, patch
import pytest
from homeassistant.components.homekit.accessories import (
HomeAccessory,
HomeBridge,
HomeDriver,
)
from homeassistant.components.homekit.const import (
ATTR_DISPLAY_NAME,
ATTR_INTEGRATION,
ATTR_VALUE,
BRIDGE_MODEL,
BRIDGE_NAME,
BRIDGE_SERIAL_NUMBER,
CHAR_FIRMWARE_REVISION,
CHAR_HARDWARE_REVISION,
CHAR_MANUFACTURER,
CHAR_MODEL,
CHAR_NAME,
CHAR_SERIAL_NUMBER,
CONF_LINKED_BATTERY_CHARGING_SENSOR,
CONF_LINKED_BATTERY_SENSOR,
CONF_LOW_BATTERY_THRESHOLD,
MANUFACTURER,
SERV_ACCESSORY_INFO,
)
from homeassistant.const import (
ATTR_BATTERY_CHARGING,
ATTR_BATTERY_LEVEL,
ATTR_ENTITY_ID,
ATTR_HW_VERSION,
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_SERVICE,
ATTR_SW_VERSION,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
__version__,
__version__ as hass_version,
)
from homeassistant.helpers.event import TRACK_STATE_CHANGE_CALLBACKS
from tests.common import async_mock_service
async def test_accessory_cancels_track_state_change_on_stop(hass, hk_driver):
"""Ensure homekit state changed listeners are unsubscribed on reload."""
entity_id = "sensor.accessory"
hass.states.async_set(entity_id, None)
acc = HomeAccessory(
hass, hk_driver, "Home Accessory", entity_id, 2, {"platform": "isy994"}
)
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
await acc.run()
assert len(hass.data[TRACK_STATE_CHANGE_CALLBACKS][entity_id]) == 1
await acc.stop()
assert entity_id not in hass.data[TRACK_STATE_CHANGE_CALLBACKS]
async def test_home_accessory(hass, hk_driver):
"""Test HomeAccessory class."""
entity_id = "sensor.accessory"
entity_id2 = "light.accessory_that_exceeds_the_maximum_maximum_maximum_maximum_maximum_maximum_maximum_allowed_length"
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id2, STATE_UNAVAILABLE)
await hass.async_block_till_done()
acc = HomeAccessory(
hass, hk_driver, "Home Accessory", entity_id, 2, {"platform": "isy994"}
)
assert acc.hass == hass
assert acc.display_name == "Home Accessory"
assert acc.aid == 2
assert acc.available is True
assert acc.category == 1 # Category.OTHER
assert len(acc.services) == 1
serv = acc.services[0] # SERV_ACCESSORY_INFO
assert serv.display_name == SERV_ACCESSORY_INFO
assert serv.get_characteristic(CHAR_NAME).value == "Home Accessory"
assert serv.get_characteristic(CHAR_MANUFACTURER).value == "Isy994"
assert serv.get_characteristic(CHAR_MODEL).value == "Sensor"
assert serv.get_characteristic(CHAR_SERIAL_NUMBER).value == "sensor.accessory"
acc2 = HomeAccessory(hass, hk_driver, "Home Accessory", entity_id2, 3, {})
serv = acc2.services[0] # SERV_ACCESSORY_INFO
assert serv.get_characteristic(CHAR_NAME).value == "Home Accessory"
assert serv.get_characteristic(CHAR_MANUFACTURER).value == f"{MANUFACTURER} Light"
assert serv.get_characteristic(CHAR_MODEL).value == "Light"
assert (
serv.get_characteristic(CHAR_SERIAL_NUMBER).value
== "light.accessory_that_exceeds_the_maximum_maximum_maximum_maximum"
)
acc3 = HomeAccessory(
hass,
hk_driver,
"Home Accessory that exceeds the maximum maximum maximum maximum maximum maximum length",
entity_id2,
3,
{
ATTR_MODEL: "Awesome Model that exceeds the maximum maximum maximum maximum maximum maximum length",
ATTR_MANUFACTURER: "Lux Brands that exceeds the maximum maximum maximum maximum maximum maximum length",
ATTR_SW_VERSION: "0.4.3 that exceeds the maximum maximum maximum maximum maximum maximum length",
ATTR_INTEGRATION: "luxe that exceeds the maximum maximum maximum maximum maximum maximum length",
},
)
assert acc3.available is False
serv = acc3.services[0] # SERV_ACCESSORY_INFO
assert (
serv.get_characteristic(CHAR_NAME).value
== "Home Accessory that exceeds the maximum maximum maximum maximum "
)
assert (
serv.get_characteristic(CHAR_MANUFACTURER).value
== "Lux Brands that exceeds the maximum maximum maximum maximum maxi"
)
assert (
serv.get_characteristic(CHAR_MODEL).value
== "Awesome Model that exceeds the maximum maximum maximum maximum m"
)
assert (
serv.get_characteristic(CHAR_SERIAL_NUMBER).value
== "light.accessory_that_exceeds_the_maximum_maximum_maximum_maximum"
)
assert serv.get_characteristic(CHAR_FIRMWARE_REVISION).value == "0.4.3"
acc4 = HomeAccessory(
hass,
hk_driver,
"Home Accessory that exceeds the maximum maximum maximum maximum maximum maximum length",
entity_id2,
3,
{
ATTR_MODEL: "Awesome Model that exceeds the maximum maximum maximum maximum maximum maximum length",
ATTR_MANUFACTURER: "Lux Brands that exceeds the maximum maximum maximum maximum maximum maximum length",
ATTR_SW_VERSION: "will_not_match_regex",
ATTR_INTEGRATION: "luxe that exceeds the maximum maximum maximum maximum maximum maximum length",
},
)
assert acc4.available is False
serv = acc4.services[0] # SERV_ACCESSORY_INFO
assert (
serv.get_characteristic(CHAR_NAME).value
== "Home Accessory that exceeds the maximum maximum maximum maximum "
)
assert (
serv.get_characteristic(CHAR_MANUFACTURER).value
== "Lux Brands that exceeds the maximum maximum maximum maximum maxi"
)
assert (
serv.get_characteristic(CHAR_MODEL).value
== "Awesome Model that exceeds the maximum maximum maximum maximum m"
)
assert (
serv.get_characteristic(CHAR_SERIAL_NUMBER).value
== "light.accessory_that_exceeds_the_maximum_maximum_maximum_maximum"
)
assert serv.get_characteristic(CHAR_FIRMWARE_REVISION).value == hass_version
hass.states.async_set(entity_id, "on")
await hass.async_block_till_done()
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
hass.states.async_remove(entity_id)
await hass.async_block_till_done()
assert mock_async_update_state.call_count == 1
with pytest.raises(NotImplementedError):
acc.async_update_state("new_state")
# Test model name from domain
entity_id = "test_model.demo"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = HomeAccessory(hass, hk_driver, "test_name", entity_id, 2, None)
serv = acc.services[0] # SERV_ACCESSORY_INFO
assert serv.get_characteristic(CHAR_MODEL).value == "Test Model"
async def test_accessory_with_missing_basic_service_info(hass, hk_driver):
"""Test HomeAccessory class."""
entity_id = "sensor.accessory"
hass.states.async_set(entity_id, "on")
acc = HomeAccessory(
hass,
hk_driver,
"Home Accessory",
entity_id,
3,
{
ATTR_MODEL: None,
ATTR_MANUFACTURER: None,
ATTR_SW_VERSION: None,
ATTR_INTEGRATION: None,
},
)
serv = acc.get_service(SERV_ACCESSORY_INFO)
assert serv.get_characteristic(CHAR_NAME).value == "Home Accessory"
assert serv.get_characteristic(CHAR_MANUFACTURER).value == "Home Assistant Sensor"
assert serv.get_characteristic(CHAR_MODEL).value == "Sensor"
assert serv.get_characteristic(CHAR_SERIAL_NUMBER).value == entity_id
assert serv.get_characteristic(CHAR_FIRMWARE_REVISION).value == hass_version
assert isinstance(acc.to_HAP(), dict)
async def test_accessory_with_hardware_revision(hass, hk_driver):
"""Test HomeAccessory class with hardware revision."""
entity_id = "sensor.accessory"
hass.states.async_set(entity_id, "on")
acc = HomeAccessory(
hass,
hk_driver,
"Home Accessory",
entity_id,
3,
{
ATTR_MODEL: None,
ATTR_MANUFACTURER: None,
ATTR_SW_VERSION: None,
ATTR_HW_VERSION: "1.2.3",
ATTR_INTEGRATION: None,
},
)
acc.driver = hk_driver
serv = acc.get_service(SERV_ACCESSORY_INFO)
assert serv.get_characteristic(CHAR_NAME).value == "Home Accessory"
assert serv.get_characteristic(CHAR_MANUFACTURER).value == "Home Assistant Sensor"
assert serv.get_characteristic(CHAR_MODEL).value == "Sensor"
assert serv.get_characteristic(CHAR_SERIAL_NUMBER).value == entity_id
assert serv.get_characteristic(CHAR_FIRMWARE_REVISION).value == hass_version
assert serv.get_characteristic(CHAR_HARDWARE_REVISION).value == "1.2.3"
assert isinstance(acc.to_HAP(), dict)
async def test_battery_service(hass, hk_driver, caplog):
"""Test battery service."""
entity_id = "homekit.accessory"
hass.states.async_set(entity_id, None, {ATTR_BATTERY_LEVEL: 50})
await hass.async_block_till_done()
acc = HomeAccessory(hass, hk_driver, "Battery Service", entity_id, 2, None)
assert acc._char_battery.value == 0
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 2
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 50
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 2
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_set(entity_id, None, {ATTR_BATTERY_LEVEL: 15})
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 15
assert acc._char_low_battery.value == 1
assert acc._char_charging.value == 2
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_set(entity_id, None, {ATTR_BATTERY_LEVEL: "error"})
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 15
assert acc._char_low_battery.value == 1
assert acc._char_charging.value == 2
assert "ERROR" not in caplog.text
# Test charging
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_set(
entity_id, None, {ATTR_BATTERY_LEVEL: 10, ATTR_BATTERY_CHARGING: True}
)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
acc = HomeAccessory(hass, hk_driver, "Battery Service", entity_id, 2, None)
assert acc._char_battery.value == 0
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 2
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 10
assert acc._char_low_battery.value == 1
assert acc._char_charging.value == 1
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
hass.states.async_set(
entity_id, None, {ATTR_BATTERY_LEVEL: 100, ATTR_BATTERY_CHARGING: False}
)
await hass.async_block_till_done()
assert acc._char_battery.value == 100
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 0
async def test_linked_battery_sensor(hass, hk_driver, caplog):
"""Test battery service with linked_battery_sensor."""
entity_id = "homekit.accessory"
linked_battery = "sensor.battery"
hass.states.async_set(entity_id, "open", {ATTR_BATTERY_LEVEL: 100})
hass.states.async_set(linked_battery, 50, None)
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{CONF_LINKED_BATTERY_SENSOR: linked_battery},
)
assert acc.linked_battery_sensor == linked_battery
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 50
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 2
hass.states.async_set(linked_battery, 10, None)
await hass.async_block_till_done()
assert acc._char_battery.value == 10
assert acc._char_low_battery.value == 1
# Ignore battery change on entity if it has linked_battery
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
hass.states.async_set(entity_id, "open", {ATTR_BATTERY_LEVEL: 90})
await hass.async_block_till_done()
assert acc._char_battery.value == 10
# Test none numeric state for linked_battery
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
hass.states.async_set(linked_battery, "error", None)
await hass.async_block_till_done()
assert acc._char_battery.value == 10
assert "ERROR" not in caplog.text
# Test charging & low battery threshold
hass.states.async_set(linked_battery, 20, {ATTR_BATTERY_CHARGING: True})
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{CONF_LINKED_BATTERY_SENSOR: linked_battery, CONF_LOW_BATTERY_THRESHOLD: 50},
)
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 20
assert acc._char_low_battery.value == 1
assert acc._char_charging.value == 1
hass.states.async_set(linked_battery, 100, {ATTR_BATTERY_CHARGING: False})
await hass.async_block_till_done()
assert acc._char_battery.value == 100
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 0
hass.states.async_remove(linked_battery)
await hass.async_block_till_done()
assert acc._char_battery.value == 100
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 0
async def test_linked_battery_charging_sensor(hass, hk_driver, caplog):
"""Test battery service with linked_battery_charging_sensor."""
entity_id = "homekit.accessory"
linked_battery_charging_sensor = "binary_sensor.battery_charging"
hass.states.async_set(entity_id, "open", {ATTR_BATTERY_LEVEL: 100})
hass.states.async_set(linked_battery_charging_sensor, STATE_ON, None)
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{CONF_LINKED_BATTERY_CHARGING_SENSOR: linked_battery_charging_sensor},
)
assert acc.linked_battery_charging_sensor == linked_battery_charging_sensor
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 100
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 1
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_set(linked_battery_charging_sensor, STATE_OFF, None)
await acc.run()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_charging.value == 0
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_set(linked_battery_charging_sensor, STATE_ON, None)
await acc.run()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_charging.value == 1
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_remove(linked_battery_charging_sensor)
await acc.run()
await hass.async_block_till_done()
assert acc._char_charging.value == 1
async def test_linked_battery_sensor_and_linked_battery_charging_sensor(
hass, hk_driver, caplog
):
"""Test battery service with linked_battery_sensor and a linked_battery_charging_sensor."""
entity_id = "homekit.accessory"
linked_battery = "sensor.battery"
linked_battery_charging_sensor = "binary_sensor.battery_charging"
hass.states.async_set(entity_id, "open", {ATTR_BATTERY_LEVEL: 100})
hass.states.async_set(linked_battery, 50, None)
hass.states.async_set(linked_battery_charging_sensor, STATE_ON, None)
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{
CONF_LINKED_BATTERY_SENSOR: linked_battery,
CONF_LINKED_BATTERY_CHARGING_SENSOR: linked_battery_charging_sensor,
},
)
assert acc.linked_battery_sensor == linked_battery
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery.value == 50
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 1
hass.states.async_set(linked_battery_charging_sensor, STATE_OFF, None)
await hass.async_block_till_done()
assert acc._char_battery.value == 50
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 0
hass.states.async_remove(linked_battery_charging_sensor)
await hass.async_block_till_done()
assert acc._char_battery.value == 50
assert acc._char_low_battery.value == 0
assert acc._char_charging.value == 0
async def test_missing_linked_battery_charging_sensor(hass, hk_driver, caplog):
"""Test battery service with linked_battery_charging_sensor that is mapping to a missing entity."""
entity_id = "homekit.accessory"
linked_battery_charging_sensor = "binary_sensor.battery_charging"
hass.states.async_set(entity_id, "open", {ATTR_BATTERY_LEVEL: 100})
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{CONF_LINKED_BATTERY_CHARGING_SENSOR: linked_battery_charging_sensor},
)
assert acc.linked_battery_charging_sensor is None
# Make sure we don't throw if the linked_battery_charging_sensor
# is removed
hass.states.async_remove(linked_battery_charging_sensor)
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
await acc.run()
await hass.async_block_till_done()
# Make sure we don't throw if the entity_id
# is removed
hass.states.async_remove(entity_id)
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
await acc.run()
await hass.async_block_till_done()
async def test_missing_linked_battery_sensor(hass, hk_driver, caplog):
"""Test battery service with missing linked_battery_sensor."""
entity_id = "homekit.accessory"
linked_battery = "sensor.battery"
hass.states.async_set(entity_id, "open")
await hass.async_block_till_done()
acc = HomeAccessory(
hass,
hk_driver,
"Battery Service",
entity_id,
2,
{CONF_LINKED_BATTERY_SENSOR: linked_battery},
)
assert not acc.linked_battery_sensor
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert not acc.linked_battery_sensor
assert acc._char_battery is None
assert acc._char_low_battery is None
assert acc._char_charging is None
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
hass.states.async_remove(entity_id)
await acc.run()
await hass.async_block_till_done()
assert not acc.linked_battery_sensor
assert acc._char_battery is None
assert acc._char_low_battery is None
assert acc._char_charging is None
async def test_battery_appears_after_startup(hass, hk_driver, caplog):
"""Test battery level appears after homekit is started."""
entity_id = "homekit.accessory"
hass.states.async_set(entity_id, None, {})
await hass.async_block_till_done()
acc = HomeAccessory(hass, hk_driver, "Accessory without battery", entity_id, 2, {})
assert acc._char_battery is None
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
) as mock_async_update_state:
await acc.run()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
mock_async_update_state.assert_called_with(state)
assert acc._char_battery is None
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
hass.states.async_set(entity_id, None, {ATTR_BATTERY_LEVEL: 15})
await hass.async_block_till_done()
assert acc._char_battery is None
with patch(
"homeassistant.components.homekit.accessories.HomeAccessory.async_update_state"
):
hass.states.async_remove(entity_id)
await hass.async_block_till_done()
assert acc._char_battery is None
async def test_call_service(hass, hk_driver, events):
"""Test call_service method."""
entity_id = "homekit.accessory"
hass.states.async_set(entity_id, None)
await hass.async_block_till_done()
acc = HomeAccessory(hass, hk_driver, "Home Accessory", entity_id, 2, {})
call_service = async_mock_service(hass, "cover", "open_cover")
test_domain = "cover"
test_service = "open_cover"
test_value = "value"
acc.async_call_service(
test_domain, test_service, {ATTR_ENTITY_ID: entity_id}, test_value
)
await hass.async_block_till_done()
assert len(events) == 1
assert events[0].data == {
ATTR_ENTITY_ID: acc.entity_id,
ATTR_DISPLAY_NAME: acc.display_name,
ATTR_SERVICE: test_service,
ATTR_VALUE: test_value,
}
assert len(call_service) == 1
assert call_service[0].domain == test_domain
assert call_service[0].service == test_service
assert call_service[0].data == {ATTR_ENTITY_ID: entity_id}
def test_home_bridge(hk_driver):
"""Test HomeBridge class."""
bridge = HomeBridge("hass", hk_driver, BRIDGE_NAME)
assert bridge.hass == "hass"
assert bridge.display_name == BRIDGE_NAME
assert bridge.category == 2 # Category.BRIDGE
assert len(bridge.services) == 2
serv = bridge.services[0] # SERV_ACCESSORY_INFO
assert serv.display_name == SERV_ACCESSORY_INFO
assert serv.get_characteristic(CHAR_NAME).value == BRIDGE_NAME
assert serv.get_characteristic(CHAR_FIRMWARE_REVISION).value == __version__
assert serv.get_characteristic(CHAR_MANUFACTURER).value == MANUFACTURER
assert serv.get_characteristic(CHAR_MODEL).value == BRIDGE_MODEL
assert serv.get_characteristic(CHAR_SERIAL_NUMBER).value == BRIDGE_SERIAL_NUMBER
bridge = HomeBridge("hass", hk_driver, "test_name")
assert bridge.display_name == "test_name"
assert len(bridge.services) == 2
serv = bridge.services[0] # SERV_ACCESSORY_INFO
# setup_message
bridge.setup_message()
def test_home_driver():
"""Test HomeDriver class."""
ip_address = "127.0.0.1"
port = 51826
path = ".homekit.state"
pin = b"123-45-678"
with patch("pyhap.accessory_driver.AccessoryDriver.__init__") as mock_driver:
driver = HomeDriver(
"hass",
"entry_id",
"name",
"title",
address=ip_address,
port=port,
persist_file=path,
)
mock_driver.assert_called_with(address=ip_address, port=port, persist_file=path)
driver.state = Mock(pincode=pin, paired=False)
xhm_uri_mock = Mock(return_value="X-HM://0")
driver.accessory = Mock(display_name="any", xhm_uri=xhm_uri_mock)
# pair
with patch("pyhap.accessory_driver.AccessoryDriver.pair") as mock_pair, patch(
"homeassistant.components.homekit.accessories.async_dismiss_setup_message"
) as mock_dissmiss_msg:
driver.pair("client_uuid", "client_public", b"1")
mock_pair.assert_called_with("client_uuid", "client_public", b"1")
mock_dissmiss_msg.assert_called_with("hass", "entry_id")
# unpair
with patch("pyhap.accessory_driver.AccessoryDriver.unpair") as mock_unpair, patch(
"homeassistant.components.homekit.accessories.async_show_setup_message"
) as mock_show_msg:
driver.unpair("client_uuid")
mock_unpair.assert_called_with("client_uuid")
mock_show_msg.assert_called_with("hass", "entry_id", "title (any)", pin, "X-HM://0")
|
|
# -*- coding: utf-8 -*-
"""
"""
from collections import OrderedDict
import os
import subprocess
import librosa
import numpy as np
import pandas as pd
import sklearn as sk
import sklearn.model_selection
import skimage as skim
import skimage.measure
import skimage.morphology
import skimage.restoration
from tqdm import tqdm
import xml.etree.ElementTree
from echonet.datasets.dataset import Dataset
from echonet.utils.generics import generate_delta, load_audio, to_one_hot
from IPython.core.debugger import Tracer
class BirdCLEF2016(Dataset):
"""
"""
def __init__(self, data_dir, work_dir, downsample=True):
super().__init__(data_dir, work_dir)
self.DOWNSAMPLE = downsample
self.SEGMENT_LENGTH = 500
self.BANDS = 180
self.WITH_DELTA = False
self.FMAX = 16000
self.FFT = 2205
self.HOP = 441
self._resample_recordings()
self._parse_recordings()
self._generate_spectrograms()
if self.DOWNSAMPLE:
self.SEGMENT_LENGTH //= 2
self.BANDS //= 3
self.class_count = len(self.encoder.classes_)
self._split_dataset()
self.train_meta = self.meta[self.meta['fold'] == 'train']
self.validation_data.meta = self.meta[self.meta['fold'] == 'validation']
self.test_data.meta = self.meta[self.meta['fold'] == 'test']
self._train_size = len(self.recordings[self.recordings['fold'] == 'train'])
self._validation_size = len(self.recordings[self.recordings['fold'] == 'validation'])
self._test_size = len(self.recordings[self.recordings['fold'] == 'test'])
self._populate(self.validation_data)
self._populate(self.test_data)
def _resample_recordings(self):
src_dir = self.data_dir + 'TrainingSet/wav/'
for recording in tqdm(sorted(os.listdir(src_dir))):
if os.path.isfile(src_dir + recording):
wav_in = src_dir + recording
wav_out = self.work_dir + recording
if not os.path.isfile(wav_out):
subprocess.call(['sox', '-S', wav_in, '-r', '44100', '-b', '16', wav_out])
def _parse_recordings(self):
if os.path.isfile(self.work_dir + 'BirdCLEF2016.csv'):
self.recordings = pd.read_csv(self.work_dir + 'BirdCLEF2016.csv')
self.encoder = sk.preprocessing.LabelEncoder()
self.encoder.fit(self.recordings['birdclass'].values)
else:
self.recordings = []
src_dir = self.data_dir + 'TrainingSet/xml/'
for recording in tqdm(sorted(os.listdir(src_dir))):
root = xml.etree.ElementTree.parse(src_dir + recording).getroot()
data = {
'filename': recording[:-4] + '.wav',
'birdclass': root.find('ClassId').text,
'species': root.find('Species').text,
'genus': root.find('Genus').text,
'family': root.find('Family').text,
'background': root.find('BackgroundSpecies').text
}
if data['background'] is None:
data['background'] = ''
columns = ['filename', 'birdclass', 'species', 'genus', 'family', 'background']
row = pd.DataFrame(data, columns=columns, index=[0])
self.recordings.append(row)
self.recordings = pd.concat(self.recordings, ignore_index=True)
self.encoder = sk.preprocessing.LabelEncoder()
self.encoder.fit(self.recordings['birdclass'].values)
self.recordings['target'] = self.encoder.transform(self.recordings['birdclass'].values)
self.recordings.to_csv(self.work_dir + 'BirdCLEF2016.csv', index=False)
def _generate_spectrograms(self):
if os.path.isfile(self.work_dir + 'BirdCLEF2016-clips.csv'):
self.meta = pd.read_csv(self.work_dir + 'BirdCLEF2016-clips.csv')
else:
self.meta = []
for row in tqdm(self.recordings.itertuples(), total=len(self.recordings)):
self.meta.extend(self._split_recording(row))
self.meta = pd.concat(self.meta, ignore_index=True)
self.meta.to_csv(self.work_dir + 'BirdCLEF2016-clips.csv', index=False)
def _split_recording(self, row):
audio = load_audio(self.work_dir + row.filename, 44100)
spec = librosa.feature.melspectrogram(audio, sr=44100, n_fft=self.FFT, fmax=self.FMAX,
hop_length=self.HOP, n_mels=self.BANDS)
freqs = librosa.core.mel_frequencies(n_mels=self.BANDS, fmax=self.FMAX)
spec = librosa.core.perceptual_weighting(spec, freqs, ref_power=np.max)
spec = self._enhance_spectrogram(spec)
mask = skim.morphology.dilation(spec, selem=np.ones((3, 40))) > 0
mask[:10, :] = False
clip_list = []
counter = 0
current = []
window_size = 25
w = 0
while w * window_size < np.shape(spec)[1]:
window = slice(w * window_size, (w + 1) * window_size)
if np.any(mask[:, window]):
current.append(spec[:, window])
elif len(current):
clip_list.append(self._save(np.concatenate(current, axis=1), row, counter))
counter += 1
current = []
w += 1
if len(current):
clip_list.append(self._save(np.concatenate(current, axis=1), row, counter))
return clip_list
def _enhance_spectrogram(self, spec):
spec = (spec + 60.0) / 15.0 # quasi-normalization
np.clip(spec, 0, 5, out=spec)
spec = (spec ** 2 - 6.0) / 6.0
spec = skim.restoration.denoise_tv_chambolle(spec, weight=0.1)
spec = ((spec - np.min(spec)) / np.max(spec - np.min(spec)) - 0.5) * 2.0
spec += 0.5
spec[spec > 0] *= 2
spec = ((spec - np.min(spec)) / np.max(spec - np.min(spec)) - 0.5) * 2.0
return spec
def _save(self, clip, row, counter):
reduced_clip = skim.measure.block_reduce(clip, block_size=(3, 2), func=np.mean)
np.save(self.work_dir + row.filename + '.spec{}.npy'.format(counter),
clip.astype('float16'), allow_pickle=False)
np.save(self.work_dir + row.filename + '.spec{}.ds.npy'.format(counter),
reduced_clip.astype('float16'), allow_pickle=False)
data = OrderedDict([
('filename', row.filename + '.spec{}.npy'.format(counter)),
('target', row.target),
('recording', row.filename),
('birdclass', row.birdclass),
('species', row.species),
('genus', row.genus),
('family', row.family),
('background', '' if pd.isnull(row.background) else row.background)
])
return pd.DataFrame(data, columns=data.keys(), index=[0])
def _split_dataset(self):
"""Splits the dataset into training/validation/testing folds
Stratified split with shuffling:
- 75% of recordings go to training
- 12.5% validation
- 12.5% testing
"""
splitter = sklearn.model_selection.StratifiedShuffleSplit
quarter = splitter(n_splits=1, test_size=0.25, random_state=20161013)
half = splitter(n_splits=1, test_size=0.5, random_state=20161013)
train_split = quarter.split(self.recordings['filename'], self.recordings['target'])
train_idx, holdout_idx = list(train_split)[0]
holdout_split = half.split(self.recordings.loc[holdout_idx, 'filename'],
self.recordings.loc[holdout_idx, 'target'])
validation_idx, test_idx = list(holdout_split)[0]
self.recordings.loc[train_idx, 'fold'] = 'train'
self.recordings.loc[holdout_idx[validation_idx], 'fold'] = 'validation'
self.recordings.loc[holdout_idx[test_idx], 'fold'] = 'test'
right = self.recordings[['filename', 'fold']].rename(columns={'filename': 'recording'})
self.meta = pd.merge(self.meta, right, on='recording')
@property
def input_shape(self):
return 1 + self.WITH_DELTA, self.BANDS, self.SEGMENT_LENGTH
@property
def train_size(self):
return self._train_size
@property
def train_segments(self):
return len(self.train_meta)
@property
def validation_size(self):
return self._validation_size
@property
def validation_segments(self):
return len(self.validation_data.meta)
@property
def test_size(self):
return self._test_size
@property
def test_segments(self):
return len(self.test_data.meta)
def to_categories(self, targets):
return self.encoder.classes_[targets]
def to_targets(self, categories):
return self.encoder.transform(categories)
def test(self, model):
return self._score(model, self.test_data)
def validate(self, model):
return self._score(model, self.validation_data)
def _populate(self, data):
X, y, meta = [], [], []
for row in tqdm(data.meta.itertuples(), total=len(data.meta)):
values = dict(zip(row._fields[1:], row[1:]))
columns = row._fields[1:]
rows = []
for _ in range(2): # multiply segment variants for prediction
X.append(self._extract_segment(row.filename))
y.append(row.target)
rows.append(pd.DataFrame(values, columns=columns, index=[0]))
meta.extend(rows)
X = np.stack(X)
y = to_one_hot(np.array(y), self.class_count)
meta = pd.concat(meta, ignore_index=True)
if self.data_mean is None:
self.data_mean = np.mean(X)
self.data_std = np.std(X)
X -= self.data_mean
X /= self.data_std
data.X = X
data.y = y
data.meta = meta
def iterbatches(self, batch_size):
itrain = super()._iterrows(self.train_meta)
while True:
X, y = [], []
for i in range(batch_size):
row = next(itrain)
X.append(self._extract_segment(row.filename))
y.append(row.target)
X = np.stack(X)
y = to_one_hot(np.array(y), self.class_count)
X -= self.data_mean
X /= self.data_std
yield X, y
def _extract_segment(self, filename):
if self.DOWNSAMPLE:
spec = np.load(self.work_dir + filename[:-4] + '.ds.npy').astype('float32')
else:
spec = np.load(self.work_dir + filename).astype('float32')
spec = spec[:, :-1] # trim border artifacts
if np.shape(spec)[1] >= self.SEGMENT_LENGTH:
offset = self.RandomState.randint(0, np.shape(spec)[1] - self.SEGMENT_LENGTH + 1)
spec = spec[:, offset:offset + self.SEGMENT_LENGTH]
else:
offset = self.RandomState.randint(0, self.SEGMENT_LENGTH - np.shape(spec)[1] + 1)
overlay = np.zeros((self.BANDS, self.SEGMENT_LENGTH)) - 1.0
overlay[:, offset:offset + np.shape(spec)[1]] = spec
spec = overlay
if self.WITH_DELTA:
delta = generate_delta(spec)
return np.stack([spec, delta])
else:
return np.stack([spec])
def _score(self, model, data):
predictions = pd.DataFrame(model.predict(data.X))
results = pd.concat([data.meta[['recording', 'target']], predictions], axis=1)
results = results.groupby('recording').aggregate('mean').reset_index()
results['predicted'] = np.argmax(results.iloc[:, 2:].values, axis=1)
return np.sum(results['predicted'] == results['target']) / len(results)
|
|
from luigi import IntParameter
from xlrd import open_workbook
from xlrd.xldate import xldate_as_tuple
from lib.timespan import get_timespan
from tasks.base_tasks import ColumnsTask, TableTask, TempTableTask, RepoFileUnzipTask
from tasks.util import shell
from tasks.meta import current_session, OBSColumn
from collections import OrderedDict
from tasks.us.ny.nyc.columns import NYCColumns
from tasks.us.ny.nyc.tags import NYCTags
from tasks.poi import POIColumns
from datetime import datetime
from lib.logger import get_logger
import os
LOGGER = get_logger(__name__)
class DownloadPermitIssuanceMonthly(RepoFileUnzipTask):
month = IntParameter()
year = IntParameter()
URL = 'https://www1.nyc.gov/assets/buildings/foil/per{month}{year}excel.zip'
def get_url(self):
return self.URL.format(month=('0' + str(self.month))[-2:],
year=('0' + str(self.year))[-2:])
class PermitIssuanceXLS2TempTableTask(TempTableTask):
month = IntParameter()
year = IntParameter()
def requires(self):
return DownloadPermitIssuanceMonthly(year=self.year, month=self.month)
def run(self):
book = open_workbook(os.path.join(
self.input().path,
'per{}{}.xls'.format(
('0' + str(self.month))[-2:], ('0' + str(self.year))[-2:])),
formatting_info=True)
sheet = book.sheets()[0]
session = current_session()
allvals = []
for rownum, row in enumerate(sheet.get_rows()):
if rownum == 2:
coldefs = ['"{}" VARCHAR'.format(cell.value.replace('"', '').strip()) for cell in row if cell.value]
session.execute('CREATE TABLE {output} ({coldefs})'.format(
coldefs=', '.join(coldefs),
output=self.output().table
))
elif rownum > 2:
# Have to escape colons as they are interpreted as symbols otherwise
vals = []
for cell in row:
# type 0 is empty
if cell.ctype == 0:
pass
# type 2 is numeric, which is always a float, even if it
# should be an integer
elif cell.ctype == 2:
if cell.value == int(cell.value):
vals.append(str(int(cell.value)))
else:
vals.append(str(cell.value))
# type 3 is date
elif cell.ctype == 3:
vals.append("'{}-{}-{}'".format(*xldate_as_tuple(cell.value, 0)))
# everything else just pass in as unicode string, unless
# it's blank, in which case send in NULL
else:
if cell.value:
vals.append("'{}'".format(str(cell.value)
.replace(":", "::")
.replace("'", "''")))
else:
vals.append('NULL')
# Kill occasional erroneous blank last column
if vals[-1] == "NULL":
vals = vals[0:-1]
if len(vals) < len(coldefs):
vals.extend(["NULL"] * (len(coldefs) - len(vals)))
if len(vals) != len(coldefs):
LOGGER.error('FIXME: cannot parse year %s month %s row %s',
self.year, self.month, rownum)
continue
allvals.append(', '.join(vals))
try:
session.execute('INSERT INTO {output} VALUES ({allvals})'.format(
output=self.output().table,
allvals='), ('.join(allvals)
))
except Exception as err:
print(err)
raise
class PermitColumns(ColumnsTask):
def requires(self):
return {
'tags': NYCTags(),
}
def version(self):
return 4
def columns(self):
#nyc = self.input()['nyc']
#poi = self.input()['poi']
return OrderedDict([
('job_num', OBSColumn(
type='TEXT',
weight=1,
name='Department of Buildings Job Number'
)),
('job_doc_num', OBSColumn(
type='TEXT',
weight=1,
name='Department of Buildings Job document number'
)),
('job_type', OBSColumn(
type='TEXT',
weight=1,
name='Job Type'
)),
('self_cert', OBSColumn(
type='TEXT',
weight=1,
name='Self-cert'
)),
('bldg_type', OBSColumn(
type='Text',
weight=1,
name='Building Type',
)),
('residential', OBSColumn(
type='Text',
weight=1,
name='Residential',
)),
('special_dist_1', OBSColumn(
type='Text',
weight=1,
name='Special district 1',
)),
('special_dist_2', OBSColumn(
type='Text',
weight=1,
name='Special district 2',
)),
('work_type', OBSColumn(
type='Text',
weight=1,
name='Work Type',
extra={
'categories': {
'BL': 'Boiler',
'CC': 'Curb Cut',
'CH': 'Chute',
'EQ': 'Construction Equipment',
'AL': 'Alteration',
'DM': 'Demolition & Removal',
'FP': 'Fire Suppression',
'FS': 'Fuel Storage',
'MH': 'Mechanical/HVAC',
'SD': 'Standpipe',
'FN': 'Fence',
'SP': 'Sprinkler',
'SF': 'Scaffold',
'EA': 'Earthwork Only',
'OT': 'Other-General Construction, Partitions, '
'Marquees, BPP (Builder Pavement Plan), etc.',
'NB': 'New Building',
'EW': 'Equipment Work',
'SG': 'Sign',
'FA': 'Fire Alarm',
'FB': 'Fuel Burning',
'AR': 'Architectural',
'FO': 'Foundation',
'ME': 'Mechanical',
'NP': 'No Plans',
'PL': 'Plumbing',
'SH': 'Sidewalk Shed',
'ST': 'Structural',
'ZO': 'Zoning',
}
}
)),
('permit_status', OBSColumn(
type='Text',
weight=1,
name='Permit Status',
)),
('filing_status', OBSColumn(
type='Text',
weight=1,
name='Filing Status',
)),
('permit_type', OBSColumn(
type='Text',
weight=1,
name='Permit Type',
extra={
'categories': {
'BL': 'Boiler',
'CC': 'Curb Cut',
'CH': 'Chute',
'EQ': 'Construction Equipment',
'AL': 'Alteration',
'DM': 'Demolition & Removal',
'FP': 'Fire Suppression',
'FS': 'Fuel Storage',
'MH': 'Mechanical/HVAC',
'SD': 'Standpipe',
'FN': 'Fence',
'SP': 'Sprinkler',
'SF': 'Scaffold',
'EA': 'Earthwork Only',
'OT': 'Other-General Construction, Partitions, '
'Marquees, BPP (Builder Pavement Plan), etc.',
'NB': 'New Building',
'EW': 'Equipment Work',
'SG': 'Sign',
'FA': 'Fire Alarm',
'FB': 'Fuel Burning',
'AR': 'Architectural',
'FO': 'Foundation',
'ME': 'Mechanical',
'NP': 'No Plans',
'PL': 'Plumbing',
'SH': 'Sidewalk Shed',
'ST': 'Structural',
'ZO': 'Zoning',
}
}
)),
('permit_sequence', OBSColumn(
type='Numeric',
weight=1,
name='Permit Sequence Number',
)),
('permit_subtype', OBSColumn(
type='Text',
name='Permit Subtype',
weight=1,
extra={
'categories': {
'BL': 'Boiler',
'CC': 'Curb Cut',
'CH': 'Chute',
'EQ': 'Construction Equipment',
'AL': 'Alteration',
'DM': 'Demolition & Removal',
'FP': 'Fire Suppression',
'FS': 'Fuel Storage',
'MH': 'Mechanical/HVAC',
'SD': 'Standpipe',
'FN': 'Fence',
'SP': 'Sprinkler',
'SF': 'Scaffold',
'EA': 'Earthwork Only',
'OT': 'Other-General Construction, Partitions, '
'Marquees, BPP (Builder Pavement Plan), etc.',
'NB': 'New Building',
'EW': 'Equipment Work',
'SG': 'Sign',
'FA': 'Fire Alarm',
'FB': 'Fuel Burning',
'AR': 'Architectural',
'FO': 'Foundation',
'ME': 'Mechanical',
'NP': 'No Plans',
'PL': 'Plumbing',
'SH': 'Sidewalk Shed',
'ST': 'Structural',
'ZO': 'Zoning',
}
}
)),
('oil_gas', OBSColumn(
type='Text',
weight=1,
name='Oil gas',
)),
('site_fill', OBSColumn(
type='Text',
weight=1,
name='Site fill',
)),
('filing_date', OBSColumn(
type='Date',
weight=1,
name='Filing date',
)),
('issuance_date', OBSColumn(
type='Date',
weight=1,
name='Issuance date',
)),
('expiration_date', OBSColumn(
type='Date',
weight=1,
name='Expiration date',
)),
('job_start_date', OBSColumn(
type='Date',
weight=1,
name='Job start date',
)),
('permittee_first_last_name', OBSColumn(
type='Text',
weight=1,
name='Permittee first & last name',
)),
('permittee_business_name', OBSColumn(
type='Text',
weight=1,
name='Permittee business name',
)),
('permittee_phone', OBSColumn(
type='Text',
weight=1,
name='Permittee phone',
)),
('permittee_license_type', OBSColumn(
type='Text',
weight=1,
name='Permittee license type',
)),
('permittee_license_number', OBSColumn(
type='Text',
weight=1,
name='Permittee license number',
)),
('permittee_other_title', OBSColumn(
type='Text',
weight=1,
name='Permittee Other Title',
)),
('acts_as_superintendent', OBSColumn(
type='Text',
weight=1,
name='Acts as superintent',
)),
('hic_license', OBSColumn(
type='Text',
weight=1,
name='HIC License',
)),
('site_safety_mgrs_name', OBSColumn(
type='Text',
weight=1,
name="Site Safety Manager's name",
)),
('site_safety_mgr_business_name', OBSColumn(
type='Text',
weight=1,
name="Site Safety Manager's Business Name",
)),
('superintendent_first_last_name', OBSColumn(
type='Text',
weight=1,
name="Superintendent first & last name",
)),
('superintendent_business_name', OBSColumn(
type='Text',
weight=1,
name="Superintent business name",
)),
('owner_business_type', OBSColumn(
type='Text',
weight=1,
name="Owner's business type",
)),
('non_profit', OBSColumn(
type='Text',
weight=1,
name="Non-Profit",
)),
('owner_business_name', OBSColumn(
type='Text',
weight=1,
name="Owner's business name",
)),
('owner_first_last_name', OBSColumn(
type='Text',
weight=1,
name="Owner's first and last name",
)),
('owner_house_street', OBSColumn(
type='Text',
weight=1,
name="Owner's house street",
)),
('city_state_zip', OBSColumn(
type='Text',
weight=1,
name='City, state and zip',
)),
('owner_phone_number', OBSColumn(
type='Text',
weight=1,
name="Owner's phone number",
)),
])
def tags(self, input_, col_key, col):
return [input_['tags']['nyc']]
class PermitIssuance(TableTask):
def version(self):
return 3
def requires(self):
data_tables = {}
now = datetime.now()
for year in range(3, 18):
# 2003 only has from March onwards but we skip it because of
# different schema -- no Self-Cert
# 2004 onwards seems similar but no "Site Fill"
if year < 10:
continue
# current year, only grab to prior month
elif year == now.year - 2000:
months = range(1, now.month)
# grab all months
else:
months = range(1, 13)
for month in months:
data_tables[year, month] = \
PermitIssuanceXLS2TempTableTask(year=year, month=month)
return {
'poi_columns': POIColumns(),
'nyc_columns': NYCColumns(),
'permit_columns': PermitColumns(),
'data': data_tables,
}
def columns(self):
input_ = self.input()
poi = input_['poi_columns']
nyc = input_['nyc_columns']
permit = input_['permit_columns']
return OrderedDict([
('bbl', nyc['bbl']),
('borough', nyc['borough']),
('bin', nyc['bin']),
('house_number', poi['house_number']),
('street_name', poi['street_name']),
('job_num', permit['job_num']),
('job_doc_num', permit['job_doc_num']),
('job_type', permit['job_type']),
('self_cert', permit['self_cert']),
('block', nyc['block']),
('lot', nyc['lot']),
('cd', nyc['cd']),
('zip', poi['postal_code']),
('bldg_type', permit['bldg_type']),
('residential', permit['residential']),
('special_dist_1', permit['special_dist_1']),
('special_dist_2', permit['special_dist_2']),
('work_type', permit['work_type']),
('permit_status', permit['permit_status']),
('filing_status', permit['filing_status']),
('permit_type', permit['permit_type']),
('permit_sequence', permit['permit_sequence']),
('permit_subtype', permit['permit_subtype']),
('oil_gas', permit['oil_gas']),
('site_fill', permit['site_fill']),
('filing_date', permit['filing_date']),
('issuance_date', permit['issuance_date']),
('expiration_date', permit['expiration_date']),
('job_start_date', permit['job_start_date']),
('permittee_first_last_name', permit['permittee_first_last_name']),
('permittee_business_name', permit['permittee_business_name']),
('permittee_phone', permit['permittee_phone']),
('permittee_license_type', permit['permittee_license_type']),
('permittee_license_number', permit['permittee_license_number']),
('permittee_other_title', permit['permittee_other_title']),
('acts_as_superintendent', permit['acts_as_superintendent']),
('hic_license', permit['hic_license']),
('site_safety_mgrs_name', permit['site_safety_mgrs_name']),
('site_safety_mgr_business_name', permit['site_safety_mgr_business_name']),
('superintendent_first_last_name', permit['superintendent_first_last_name']),
('superintendent_business_name', permit['superintendent_business_name']),
('owner_business_type', permit['owner_business_type']),
('non_profit', permit['non_profit']),
('owner_business_name', permit['owner_business_name']),
('owner_first_last_name', permit['owner_first_last_name']),
('owner_house_street', permit['owner_house_street']),
('city_state_zip', permit['city_state_zip']),
('owner_phone_number', permit['owner_phone_number']),
])
def table_timespan(self):
return get_timespan('current')
def populate(self):
input_ = self.input()
session = current_session()
for yearmonth, data in input_['data'].items():
year, month = yearmonth
try:
session.execute('''
INSERT INTO {output}
SELECT CASE SUBSTR(LOWER("Borough"), 1, 5)
WHEN 'state' THEN '5'
WHEN 'queen' THEN '4'
WHEN 'brook' THEN '3'
WHEN 'manha' THEN '1'
WHEN 'bronx' THEN '2'
ELSE NULL
END || LPAD("Block", 5, '0') || LPAD("Lot", 4, '0'),
"Borough"::Text,
"Bin #"::Text,
"House #"::Text,
"Street Name"::Text,
"Job #"::Text,
"Job doc. #"::Text,
"Job Type"::Text,
"Self-Cert"::Text,
"Block"::Text,
"Lot"::Text,
"Community Board"::Text,
"Zip Code"::Text,
"Bldg Type"::Text,
"Residential"::Text,
"Special District 1"::Text,
"Special District 2"::Text,
"Work Type"::Text,
"Permit Status"::Text,
"Filing Status"::Text,
"Permit Type"::Text,
"Permit Sequence #"::Numeric,
"Permit Subtype"::Text,
"Oil Gas"::Text,
"Site Fill"::Text,
NullIf("Filing Date", '//0')::Date,
NullIf("Issuance Date", '//0')::Date,
NullIf("Expiration Date", '//0')::Date,
NullIf(NullIf(NullIf(NullIf("Job Start Date",
'0'), '20070009'), '//0'), '4553451R')::Date,
"Permittee's First & Last Name"::Text,
"Permittee's Business Name"::Text,
"Permittee's Phone #"::Text,
"Permittee's License Type"::Text,
"Permittee's License #"::Text,
"Permittee's Other Title"::Text,
"Acts As Superintendent"::Text,
"HIC License"::Text,
"Site Safety Mgr's Name"::Text,
"Site Safety Mgr Business Name"::Text,
"Superintendent First & Last Name"::Text,
"Superintendent Business Name"::Text,
"Owner's Business Type"::Text,
"Non-Profit"::Text,
"Owner's Business Name"::Text,
"Owner's First & Last Name"::Text,
"Owner's House Street"::Text,
"City, State, Zip"::Text,
"Owner's Phone #"::Text
FROM {intable}
WHERE "Borough" IS NOT NULL
'''.format(output=self.output().table,
intable=data.table))
except Exception as err:
LOGGER.error('%s', err)
session.rollback()
raise
session.execute('''
create index on {output} (bbl)
'''.format(output=self.output().table))
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_datafs
----------------------------------
Tests for `datafs` module.
"""
from __future__ import absolute_import
import pytest
from datafs._compat import u
from tests.resources import prep_manager
import os
import tempfile
import hashlib
import random
from six import b
try:
PermissionError
except NameError:
class PermissionError(NameError):
pass
def get_counter():
'''
Counts up. Ensure we don't have name collisions
'''
counter = random.randint(0, 10000)
while True:
yield counter
counter += 1
counter = get_counter()
@pytest.yield_fixture(scope='function')
def archive(api):
'''
Create a temporary archive for use in testing
'''
test_id = next(counter)
archive_name = 'test_archive_{}'.format(test_id)
var = api.create(
archive_name,
metadata=dict(description='My test data archive #{}'.format(test_id)))
try:
yield var
finally:
var.delete()
string_tests = [
'',
'another test',
'9872387932487913874031713470304',
os.linesep.join(['ajfdsaion', 'daf', 'adfadsffdadsf'])]
def update_and_hash(arch, contents):
'''
Save contents to archive ``arch`` and return the DataAPI's hash value
'''
f = tempfile.NamedTemporaryFile(delete=False)
try:
f.write(contents)
f.close()
apihash = arch.api.hash_file(f.name)['checksum']
arch.update(f.name)
finally:
os.remove(f.name)
return apihash
@pytest.mark.parametrize('contents', string_tests)
def test_hashfuncs(archive, contents):
'''
Run through a number of iterations of the hash functions
'''
contents = u(contents)
direct = hashlib.md5(contents.encode('utf-8')).hexdigest()
apihash = update_and_hash(archive, contents)
assert direct == apihash, 'Manual hash "{}" != api hash "{}"'.format(
direct, apihash)
msg = (
'Manual hash "{}"'.format(direct) +
' != archive hash "{}"'.format(archive.get_latest_hash()))
assert direct == archive.get_latest_hash(), msg
# Try uploading the same file
apihash = update_and_hash(archive, contents)
assert direct == apihash, 'Manual hash "{}" != api hash "{}"'.format(
direct, apihash)
msg = (
'Manual hash "{}"'.format(direct) +
' != archive hash "{}"'.format(archive.get_latest_hash()))
assert direct == archive.get_latest_hash(), msg
# Update and test again!
contents = u((os.linesep).join(
[contents, contents, 'line 3!' + contents]))
direct = hashlib.md5(contents.encode('utf-8')).hexdigest()
apihash = update_and_hash(archive, contents)
with archive.open('rb') as f:
current = f.read()
msg = 'Latest updates "{}" != archive contents "{}"'.format(
contents, current)
assert contents == current, msg
assert direct == apihash, 'Manual hash "{}" != api hash "{}"'.format(
direct, apihash)
msg = (
'Manual hash "{}"'.format(direct) +
' != archive hash "{}"'.format(archive.get_latest_hash()))
assert direct == archive.get_latest_hash(), msg
# Update and test a different way!
contents = u((os.linesep).join([contents, 'more!!!', contents]))
direct = hashlib.md5(contents.encode('utf-8')).hexdigest()
with archive.open('wb+') as f:
f.write(b(contents))
with archive.open('rb') as f2:
current = f2.read()
msg = 'Latest updates "{}" != archive contents "{}"'.format(
contents, current)
assert contents == current, msg
msg = (
'Manual hash "{}"'.format(direct) +
' != archive hash "{}"'.format(archive.get_latest_hash()))
assert direct == archive.get_latest_hash(), msg
def test_create_archive(api):
archive_name = 'test_recreation_error'
api.create(archive_name, metadata={'testval': 'my test value'})
var = api.get_archive(archive_name)
testval = var.get_metadata()['testval']
with pytest.raises(KeyError) as excinfo:
api.create(archive_name)
assert "already exists" in str(excinfo.value)
api.create(
archive_name,
metadata={
'testval': 'a different test value'},
raise_on_err=False)
var = api.get_archive(archive_name)
assert testval == var.get_metadata()[
'testval'], "Test archive was incorrectly updated!"
var.update_metadata({'testval': 'a different test value'})
msg = "Test archive was not updated!"
assert var.get_metadata()['testval'] == 'a different test value', msg
# Test archive deletion
var.delete()
with pytest.raises(KeyError):
api.get_archive(archive_name)
def test_api_locks(api, local_auth):
api.lock_manager()
api.lock_authorities()
with pytest.raises((PermissionError, NameError)):
with prep_manager('mongo') as manager:
api.attach_manager(manager)
with pytest.raises((PermissionError, NameError)):
api.attach_authority('auth', local_auth)
def test_log_with_various_messages(api):
'''
Test :py:meth:`~datafs.core.data_archive.DataArchive.log` stability
Addresses :issue:`232` - log fails on versions with no message
'''
arch = api.create('test/archive1.txt')
arch.log()
with arch.open('w+') as f:
f.write(u('hello 1'))
arch.log()
with arch.open('w+', message='hello') as f:
f.write(u('hello 2'))
arch.log()
with arch.open('w+', message=4) as f:
f.write(u('hello 3'))
arch.log()
with arch.open('w+', message=lambda x: x**2) as f:
f.write(u('hello 4'))
arch.log()
with arch.open('w+', message=None) as f:
f.write(u('hello 5'))
arch.log()
|
|
"""HTML utilities suitable for global use."""
from __future__ import unicode_literals
import re
import warnings
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_str, force_text
from django.utils.functional import allow_lazy
from django.utils.http import RFC3986_GENDELIMS, RFC3986_SUBDELIMS
from django.utils.safestring import SafeData, SafeText, mark_safe
from django.utils.six.moves.urllib.parse import (
parse_qsl, quote, unquote, urlencode, urlsplit, urlunsplit,
)
from django.utils.text import normalize_newlines
from .html_parser import HTMLParseError, HTMLParser
# Configuration for urlize() function.
TRAILING_PUNCTUATION = ['.', ',', ':', ';', '.)', '"', '\'', '!']
WRAPPING_PUNCTUATION = [('(', ')'), ('<', '>'), ('[', ']'), ('<', '>'), ('"', '"'), ('\'', '\'')]
# List of possible strings used for bullets in bulleted lists.
DOTS = ['·', '*', '\u2022', '•', '•', '•']
unencoded_ampersands_re = re.compile(r'&(?!(\w+|#\d+);)')
word_split_re = re.compile(r'(\s+)')
simple_url_re = re.compile(r'^https?://\[?\w', re.IGNORECASE)
simple_url_2_re = re.compile(r'^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)($|/.*)$', re.IGNORECASE)
simple_email_re = re.compile(r'^\S+@\S+\.\S+$')
link_target_attribute_re = re.compile(r'(<a [^>]*?)target=[^\s>]+')
html_gunk_re = re.compile(
r'(?:<br clear="all">|<i><\/i>|<b><\/b>|<em><\/em>|<strong><\/strong>|'
'<\/?smallcaps>|<\/?uppercase>)', re.IGNORECASE)
hard_coded_bullets_re = re.compile(
r'((?:<p>(?:%s).*?[a-zA-Z].*?</p>\s*)+)' % '|'.join(re.escape(x)
for x in DOTS), re.DOTALL)
trailing_empty_content_re = re.compile(r'(?:<p>(?: |\s|<br \/>)*?</p>\s*)+\Z')
def escape(text):
"""
Returns the given text with ampersands, quotes and angle brackets encoded
for use in HTML.
This function always escapes its input, even if it's already escaped and
marked as such. This may result in double-escaping. If this is a concern,
use conditional_escape() instead.
"""
return mark_safe(force_text(text).replace('&', '&').replace('<', '<')
.replace('>', '>').replace('"', '"').replace("'", '''))
escape = allow_lazy(escape, six.text_type, SafeText)
_js_escapes = {
ord('\\'): '\\u005C',
ord('\''): '\\u0027',
ord('"'): '\\u0022',
ord('>'): '\\u003E',
ord('<'): '\\u003C',
ord('&'): '\\u0026',
ord('='): '\\u003D',
ord('-'): '\\u002D',
ord(';'): '\\u003B',
ord('\u2028'): '\\u2028',
ord('\u2029'): '\\u2029'
}
# Escape every ASCII character with a value less than 32.
_js_escapes.update((ord('%c' % z), '\\u%04X' % z) for z in range(32))
def escapejs(value):
"""Hex encodes characters for use in JavaScript strings."""
return mark_safe(force_text(value).translate(_js_escapes))
escapejs = allow_lazy(escapejs, six.text_type, SafeText)
def conditional_escape(text):
"""
Similar to escape(), except that it doesn't operate on pre-escaped strings.
This function relies on the __html__ convention used both by Django's
SafeData class and by third-party libraries like markupsafe.
"""
if hasattr(text, '__html__'):
return text.__html__()
else:
return escape(text)
def format_html(format_string, *args, **kwargs):
"""
Similar to str.format, but passes all arguments through conditional_escape,
and calls 'mark_safe' on the result. This function should be used instead
of str.format or % interpolation to build up small HTML fragments.
"""
args_safe = map(conditional_escape, args)
kwargs_safe = {k: conditional_escape(v) for (k, v) in six.iteritems(kwargs)}
return mark_safe(format_string.format(*args_safe, **kwargs_safe))
def format_html_join(sep, format_string, args_generator):
"""
A wrapper of format_html, for the common case of a group of arguments that
need to be formatted using the same format string, and then joined using
'sep'. 'sep' is also passed through conditional_escape.
'args_generator' should be an iterator that returns the sequence of 'args'
that will be passed to format_html.
Example:
format_html_join('\n', "<li>{} {}</li>", ((u.first_name, u.last_name)
for u in users))
"""
return mark_safe(conditional_escape(sep).join(
format_html(format_string, *tuple(args))
for args in args_generator))
def linebreaks(value, autoescape=False):
"""Converts newlines into <p> and <br />s."""
value = normalize_newlines(value)
paras = re.split('\n{2,}', value)
if autoescape:
paras = ['<p>%s</p>' % escape(p).replace('\n', '<br />') for p in paras]
else:
paras = ['<p>%s</p>' % p.replace('\n', '<br />') for p in paras]
return '\n\n'.join(paras)
linebreaks = allow_lazy(linebreaks, six.text_type)
class MLStripper(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def handle_entityref(self, name):
self.fed.append('&%s;' % name)
def handle_charref(self, name):
self.fed.append('&#%s;' % name)
def get_data(self):
return ''.join(self.fed)
def _strip_once(value):
"""
Internal tag stripping utility used by strip_tags.
"""
s = MLStripper()
try:
s.feed(value)
except HTMLParseError:
return value
try:
s.close()
except HTMLParseError:
return s.get_data() + s.rawdata
else:
return s.get_data()
def strip_tags(value):
"""Returns the given HTML with all tags stripped."""
# Note: in typical case this loop executes _strip_once once. Loop condition
# is redundant, but helps to reduce number of executions of _strip_once.
while '<' in value and '>' in value:
new_value = _strip_once(value)
if new_value == value:
# _strip_once was not able to detect more tags
break
value = new_value
return value
strip_tags = allow_lazy(strip_tags)
def remove_tags(html, tags):
"""Returns the given HTML with given tags removed."""
warnings.warn(
"django.utils.html.remove_tags() and the removetags template filter "
"are deprecated. Consider using the bleach library instead.",
RemovedInDjango20Warning, stacklevel=3
)
tags = [re.escape(tag) for tag in tags.split()]
tags_re = '(%s)' % '|'.join(tags)
starttag_re = re.compile(r'<%s(/?>|(\s+[^>]*>))' % tags_re, re.U)
endtag_re = re.compile('</%s>' % tags_re)
html = starttag_re.sub('', html)
html = endtag_re.sub('', html)
return html
remove_tags = allow_lazy(remove_tags, six.text_type)
def strip_spaces_between_tags(value):
"""Returns the given HTML with spaces between tags removed."""
return re.sub(r'>\s+<', '><', force_text(value))
strip_spaces_between_tags = allow_lazy(strip_spaces_between_tags, six.text_type)
def strip_entities(value):
"""Returns the given HTML with all entities (&something;) stripped."""
warnings.warn(
"django.utils.html.strip_entities() is deprecated.",
RemovedInDjango20Warning, stacklevel=2
)
return re.sub(r'&(?:\w+|#\d+);', '', force_text(value))
strip_entities = allow_lazy(strip_entities, six.text_type)
def smart_urlquote(url):
"Quotes a URL if it isn't already quoted."
def unquote_quote(segment):
segment = unquote(force_str(segment))
# Tilde is part of RFC3986 Unreserved Characters
# http://tools.ietf.org/html/rfc3986#section-2.3
# See also http://bugs.python.org/issue16285
segment = quote(segment, safe=RFC3986_SUBDELIMS + RFC3986_GENDELIMS + str('~'))
return force_text(segment)
# Handle IDN before quoting.
try:
scheme, netloc, path, query, fragment = urlsplit(url)
except ValueError:
# invalid IPv6 URL (normally square brackets in hostname part).
return unquote_quote(url)
try:
netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE
except UnicodeError: # invalid domain part
return unquote_quote(url)
if query:
# Separately unquoting key/value, so as to not mix querystring separators
# included in query values. See #22267.
query_parts = [(unquote(force_str(q[0])), unquote(force_str(q[1])))
for q in parse_qsl(query, keep_blank_values=True)]
# urlencode will take care of quoting
query = urlencode(query_parts)
path = unquote_quote(path)
fragment = unquote_quote(fragment)
return urlunsplit((scheme, netloc, path, query, fragment))
def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
"""
Converts any URLs in text into clickable links.
Works on http://, https://, www. links, and also on links ending in one of
the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org).
Links can have trailing punctuation (periods, commas, close-parens) and
leading punctuation (opening parens) and it'll still do the right thing.
If trim_url_limit is not None, the URLs in the link text longer than this
limit will be truncated to trim_url_limit-3 characters and appended with
an ellipsis.
If nofollow is True, the links will get a rel="nofollow" attribute.
If autoescape is True, the link text and URLs will be autoescaped.
"""
safe_input = isinstance(text, SafeData)
def trim_url(x, limit=trim_url_limit):
if limit is None or len(x) <= limit:
return x
return '%s...' % x[:max(0, limit - 3)]
def unescape(text, trail):
"""
If input URL is HTML-escaped, unescape it so as we can safely feed it to
smart_urlquote. For example:
http://example.com?x=1&y=<2> => http://example.com?x=1&y=<2>
"""
unescaped = (text + trail).replace(
'&', '&').replace('<', '<').replace(
'>', '>').replace('"', '"').replace(''', "'")
if trail and unescaped.endswith(trail):
# Remove trail for unescaped if it was not consumed by unescape
unescaped = unescaped[:-len(trail)]
elif trail == ';':
# Trail was consumed by unescape (as end-of-entity marker), move it to text
text += trail
trail = ''
return text, unescaped, trail
words = word_split_re.split(force_text(text))
for i, word in enumerate(words):
if '.' in word or '@' in word or ':' in word:
# Deal with punctuation.
lead, middle, trail = '', word, ''
for punctuation in TRAILING_PUNCTUATION:
if middle.endswith(punctuation):
middle = middle[:-len(punctuation)]
trail = punctuation + trail
for opening, closing in WRAPPING_PUNCTUATION:
if middle.startswith(opening):
middle = middle[len(opening):]
lead = lead + opening
# Keep parentheses at the end only if they're balanced.
if (middle.endswith(closing)
and middle.count(closing) == middle.count(opening) + 1):
middle = middle[:-len(closing)]
trail = closing + trail
# Make URL we want to point to.
url = None
nofollow_attr = ' rel="nofollow"' if nofollow else ''
if simple_url_re.match(middle):
middle, middle_unescaped, trail = unescape(middle, trail)
url = smart_urlquote(middle_unescaped)
elif simple_url_2_re.match(middle):
middle, middle_unescaped, trail = unescape(middle, trail)
url = smart_urlquote('http://%s' % middle_unescaped)
elif ':' not in middle and simple_email_re.match(middle):
local, domain = middle.rsplit('@', 1)
try:
domain = domain.encode('idna').decode('ascii')
except UnicodeError:
continue
url = 'mailto:%s@%s' % (local, domain)
nofollow_attr = ''
# Make link.
if url:
trimmed = trim_url(middle)
if autoescape and not safe_input:
lead, trail = escape(lead), escape(trail)
trimmed = escape(trimmed)
middle = '<a href="%s"%s>%s</a>' % (url, nofollow_attr, trimmed)
words[i] = mark_safe('%s%s%s' % (lead, middle, trail))
else:
if safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
elif safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
return ''.join(words)
urlize = allow_lazy(urlize, six.text_type)
def avoid_wrapping(value):
"""
Avoid text wrapping in the middle of a phrase by adding non-breaking
spaces where there previously were normal spaces.
"""
return value.replace(" ", "\xa0")
|
|
r"""Functions for nuclear and neutron beta decay effective couplings and $Ft$ values."""
from math import pi, log, sqrt
import flavio
from flavio.config import config
from flavio.physics.betadecays.common import wc_eff
from flavio.physics.ckm import get_ckm
from flavio.physics.taudecays.taulnunu import GFeff
from flavio.physics import elements
from flavio.classes import Observable, Prediction
import re
def xi(C, MF, MGT):
r"""Correlation coefficient $\xi$ as function of the effective couplings
`C`, the Fermi matrix element `MF` and the Gamow-Teller matrix element
`MGT`."""
# eq. (15) of arXiv:1803.08732
# note that C_i' = C_i
flavio.citations.register("Gonzalez-Alonso:2018omy")
return 2 * (abs(MF)**2 * (abs(C['V'])**2 + abs(C['S'])**2)
+ abs(MGT)**2 * (abs(C['A'])**2 + abs(C['T'])**2))
def a_xi(C, MF, MGT):
r"""Correlation coefficients $a\xi$ as function of the effective couplings
`C`, the Fermi matrix element `MF` and the Gamow-Teller matrix element
`MGT`."""
# eq. (16) of arXiv:1803.08732
# note that C_i' = C_i
flavio.citations.register("Gonzalez-Alonso:2018omy")
return 2 * (abs(MF)**2 * (abs(C['V'])**2 - abs(C['S'])**2)
- 1 / 3 * abs(MGT)**2 * (abs(C['A'])**2 - abs(C['T'])**2))
def a(C, MF, MGT):
r"""Correlation coefficient $a$ as function of the effective couplings
`C`, the Fermi matrix element `MF` and the Gamow-Teller matrix element
`MGT`."""
return a_xi(C, MF, MGT) / xi(C, MF, MGT)
def b_xi(C, MF, MGT, alpha, Z, s):
r"""Correlation coefficients $b\xi$ as function of the effective couplings
`C`, the Fermi matrix element `MF`, the Gamow-Teller matrix element
`MGT`, the fine structure constant `alpha`, and the nucleon charge `Z`. The sign `s` is + for the electron and - for the positron."""
# eq. (17) of arXiv:1803.08732
# note that C_i' = C_i
flavio.citations.register("Gonzalez-Alonso:2018omy")
gamma = sqrt(1 - alpha**2 * Z**2)
return s * 2 * gamma * 2 * (abs(MF)**2 * (C['V'] * C['S'].conjugate()).real
+ abs(MGT)**2 * (C['A'] * C['T'].conjugate()).real)
def dl(Jp, J):
"""Kronecker's delta"""
if Jp == J:
return 1
else:
return 0
def la(Jp, J):
"""Eq. (A1)"""
if Jp == J - 1:
return 1
elif Jp == J:
return 1 / (J + 1)
elif Jp == J + 1:
return -J / (J + 1)
else:
raise ValueError("Invalid input for function `la`")
def A_xi(C, MF, MGT, J, Jf, s):
r"""Correlation coefficients $A\xi$ as function of the effective couplings
`C`, the Fermi matrix element `MF`, the Gamow-Teller matrix element
`MGT`, and the angular momenta of initial and final state nuclei,
`J` and `Jf`. The sign `s` is + for the electron and - for the
positron."""
# note that C_i' = C_i
return 2 * (s * abs(MGT)**2 * la(Jf, J) * (abs(C['T'])**2 - abs(C['A'])**2)
+ dl(Jf, J) * abs(MF) * abs(MGT) * sqrt(J / (J + 1))
* (2 * C['S'] * C['T'].conjugate()
- 2 * C['V'] * C['A'].conjugate())).real
def B_xi(C, MF, MGT, J, Jf, me_E, s):
r"""Correlation coefficients $B\xi$ as function of the effective couplings
`C`, the Fermi matrix element `MF`, the Gamow-Teller matrix element
`MGT`, and the angular momenta of initial and final state nuclei,
`J` and `Jf`. `me_E` is the ratio of electron mass and energy.
The sign `s` is + for the electron and - for the positron."""
# note that C_i' = C_i
return 2 * (abs(MGT)**2 * la(Jf, J) * (me_E * 2 * C['T'] * C['A'].conjugate()
+ s * (abs(C['T'])**2 + abs(C['A'])**2))
- dl(Jf, J) * abs(MF) * abs(MGT) * sqrt(J / (J + 1))
* ((2 * C['S'] * C['T'].conjugate()
+ 2 * C['V'] * C['A'].conjugate())
+ s * me_E * (2 * C['S'] * C['A'].conjugate()
+ 2 * C['V'] * C['T'].conjugate()))).real
def D_xi(C, MF, MGT, J, Jf):
r"""Correlation coefficients $D\xi$ as function of the effective couplings
`C`, the Fermi matrix element `MF`, the Gamow-Teller matrix element
`MGT`, and the angular momenta of initial and final state nuclei,
`J` and `Jf`. `me_E` is the ratio of electron mass and energy."""
# note that C_i' = C_i
return 2 * (dl(Jf, J) * abs(MF) * abs(MGT) * sqrt(J / (J + 1))
* (2 * C['S'] * C['T'].conjugate()
- 2 * C['V'] * C['A'].conjugate())).imag
def R_xi(C, MF, MGT, J, Jf, s):
r"""Correlation coefficients $R\xi$ as function of the effective couplings
`C`, the Fermi matrix element `MF`, the Gamow-Teller matrix element
`MGT`, and the angular momenta of initial and final state nuclei,
`J` and `Jf`. The sign `s` is + for the electron and - for the positron."""
# note that C_i' = C_i
return 2 * (s * abs(MGT)**2 * la(Jf, J) * 2 * C['T'] * C['A'].conjugate()
+ dl(Jf, J) * abs(MF) * abs(MGT) * sqrt(J / (J + 1))
* (2 * C['S'] * C['A'].conjugate()
- 2 * C['V'] * C['T'].conjugate())).imag
def b(C, MF, MGT, alpha, Z, s):
r"""Correlation coefficient $b$ as function of the effective couplings
`C`, the Fermi matrix element `MF`, the Gamow-Teller matrix element
`MGT`, the fine structure constant `alpha`, and the nucleon charge `Z`."""
return b_xi(C, MF, MGT, alpha, Z, s) / xi(C, MF, MGT)
def A(C, MF, MGT, J, Jf, s):
r"""Correlation coefficient $A$ as function of the effective couplings
`C`, the Fermi matrix element `MF`, the Gamow-Teller matrix element
`MGT`, and the angular momenta of initial and final state nuclei,
`J` and `Jf`. The sign `s` is + for the electron and - for the
positron."""
return A_xi(C, MF, MGT, J, Jf, s) / xi(C, MF, MGT)
def B(C, MF, MGT, J, Jf, me_E, s):
r"""Correlation coefficient $B$ as function of the effective couplings
`C`, the Fermi matrix element `MF`, the Gamow-Teller matrix element
`MGT`, and the angular momenta of initial and final state nuclei,
`J` and `Jf`. `me_E` is the ratio of electron mass and energy.
The sign `s` is + for the electron and - for the positron."""
return B_xi(C, MF, MGT, J, Jf, me_E, s) / xi(C, MF, MGT)
def D(C, MF, MGT, J, Jf):
r"""Correlation coefficient $D$ as function of the effective couplings
`C`, the Fermi matrix element `MF`, the Gamow-Teller matrix element
`MGT`, and the angular momenta of initial and final state nuclei,
`J` and `Jf`."""
return D_xi(C, MF, MGT, J, Jf) / xi(C, MF, MGT)
def R(C, MF, MGT, J, Jf, s):
r"""Correlation coefficient $R$ as function of the effective couplings
`C`, the Fermi matrix element `MF`, the Gamow-Teller matrix element
`MGT`, and the angular momenta of initial and final state nuclei,
`J` and `Jf`. The sign `s` is + for the electron and - for the positron."""
return R_xi(C, MF, MGT, J, Jf, s) / xi(C, MF, MGT)
def K(par):
me = par['m_e']
return 2 * pi**3 * log(2) / me**5
# <me/E> from Table 4 of arXiv:1803.08732
nuclei_superallowed = {
# superallowed 0+->0+
'10C': {'Z': 6, '<me/E>': 0.619, 'tex': r'{}^{10}\text{C}'},
'14O': {'Z': 8, '<me/E>': 0.438, 'tex': r'{}^{14}\text{O}'},
'22Mg': {'Z': 12, '<me/E>': 0.310, 'tex': r'{}^{22}\text{Mg}'},
'26mAl':{'Z': 13, '<me/E>': 0.300, 'tex': r'{}^{26m}\text{Al}'},
'34Cl': {'Z': 17, '<me/E>': 0.234, 'tex': r'{}^{34}\text{Cl}'},
'34Ar': {'Z': 18, '<me/E>': 0.212, 'tex': r'{}^{34}\text{Ar}'},
'38mK': {'Z': 19, '<me/E>': 0.213, 'tex': r'{}^{38m}\text{K}'},
'38Ca': {'Z': 20, '<me/E>': 0.195, 'tex': r'{}^{38}\text{Ca}'},
'42Sc': {'Z': 21, '<me/E>': 0.201, 'tex': r'{}^{42}\text{Sc}'},
'46V': {'Z': 23, '<me/E>': 0.183, 'tex': r'{}^{46}\text{V}'},
'50Mn': {'Z': 25, '<me/E>': 0.169, 'tex': r'{}^{50}\text{Mn}'},
'54Co': {'Z': 27, '<me/E>': 0.157, 'tex': r'{}^{54}\text{Co}'},
'62Ga': {'Z': 31, '<me/E>': 0.141, 'tex': r'{}^{62}\text{Ga}'},
'74Rb': {'Z': 37, '<me/E>': 0.125, 'tex': r'{}^{74}\text{Rb}'},
}
def Ft_superallowed(par, wc_obj, A):
r"""Corrected $\mathcal{F}t$ value of the beta decay of isotope `A`."""
MF = sqrt(2)
MGT = 0
Z = nuclei_superallowed[A]['Z']
scale = config['renormalization scale']['betadecay']
C = wc_eff(par, wc_obj, scale, nu='e')
Xi = xi(C, MF, MGT)
B = b(C, MF, MGT, par['alpha_e'], Z, s=-1) # s=-1 for beta+ decay
me_E = nuclei_superallowed[A]['<me/E>']
Vud = get_ckm(par)[0, 0]
GF = GFeff(wc_obj, par)
pre = GF / sqrt(2) * Vud
ddRp = par['delta_deltaRp_Z2'] * Z**2 # relative uncertainty on \delta R' (universal)
return (1 + ddRp) * K(par) / Xi * 1 / (1 + B * me_E) / abs(pre)**2
class NeutronObservable:
def __init__(self, wc_obj, par, me_E):
self.wc_obj = wc_obj
self.par = par
self.me_E = me_E
self.MF = 1
self.MGT = sqrt(3)
self.scale = config['renormalization scale']['betadecay']
self.C = wc_eff(par, wc_obj, self.scale, nu='e')
self.s = 1 # electron e- in final state
self.Z = 0
self.alpha = par['alpha_e']
self.J = 1 / 2
self.Jf = 1 / 2
def xi(self):
return xi(self.C, self.MF, self.MGT)
def a(self):
return a(self.C, self.MF, self.MGT)
def b(self):
return b(self.C, self.MF, self.MGT, self.alpha, self.Z, self.s)
def A(self):
return A(self.C, self.MF, self.MGT, self.J, self.Jf, self.s)
def B(self):
return B(self.C, self.MF, self.MGT, self.J, self.Jf, self.me_E, self.s)
def D(self):
return D(self.C, self.MF, self.MGT, self.J, self.Jf)
def R(self):
return R(self.C, self.MF, self.MGT, self.J, self.Jf, self.s)
class Neutron_tau(NeutronObservable):
def __init__(self, wc_obj, par, me_E):
super().__init__(wc_obj, par, me_E)
def __call__(self):
Vud = get_ckm(self.par)[0, 0]
GF = GFeff(self.wc_obj, self.par)
pre = GF / sqrt(2) * Vud
ft = K(self.par) / self.xi() * 1 / (1 + self.b() * self.me_E) / abs(pre)**2
fn = self.par['f_n']
Rp = self.par['deltaRp_n']
return ft / log(2) / fn / (1 + Rp)
class Neutron_corr(NeutronObservable):
def __init__(self, wc_obj, par, me_E, coeff):
super().__init__(wc_obj, par, me_E)
self.coeff = coeff
def __call__(self):
if self.coeff == 'a':
return self.a()
elif self.coeff == 'atilde':
return self.a() / (1 + self.b() * self.me_E)
if self.coeff == 'b':
return self.b()
elif self.coeff == 'A':
return self.A()
elif self.coeff == 'Atilde':
return self.A() / (1 + self.b() * self.me_E)
elif self.coeff == 'B':
return self.B()
elif self.coeff == 'Btilde':
return self.B() / (1 + self.b() * self.me_E)
elif self.coeff == 'lambdaAB':
_A = self.A()
_B = self.B()
return (_A - _B) / (_A + _B)
elif self.coeff == 'D':
return self.D()
elif self.coeff == 'R':
return self.R()
# Closures for prediction instances
def Ft_fct(A):
def _(wc_obj, par):
return Ft_superallowed(par, wc_obj, A)
return _
def get_daughter(nuclide):
r"""Get the symbol and tex code of the daughter nuclide."""
A = re.search(r'\d+', nuclide).group()
symbol = re.search(r'[A-Z].*', nuclide).group()
Z = elements.Z(symbol)
daughter_symbol = elements.symbol(Z - 1)
return {'name': '{}{}'.format(A, daughter_symbol),
'tex': r'{{}}^{{{}}}\text{{{}}}'.format(A, daughter_symbol)}
# Observable and Prediction instances
for _A, _Ad in nuclei_superallowed.items():
Dd = get_daughter(_A)
_process_tex = _Ad['tex'] + r"\to " + Dd['tex'] + r"\,e^+\nu_e"
_process_taxonomy = r'Process :: Nucleon decays :: Beta decays :: Superallowed $0^+\to 0^+$ decays :: $' + _process_tex + r"$"
_obs_name = "Ft(" + _A + ")"
_obs = Observable(_obs_name)
_obs.set_description(r"$\mathcal Ft$ value of $" + _Ad['tex'] + r"$ beta decay")
_obs.tex = r"$\mathcal{F}t(" + _Ad['tex'] + r")$"
_obs.add_taxonomy(_process_taxonomy)
Prediction(_obs_name, Ft_fct(_A))
_process_tex = r"n\to p^+ e^-\bar\nu_e"
_process_taxonomy = r'Process :: Nucleon decays :: Beta decays :: Neutron decay :: $' + _process_tex + r"$"
_obs_name = "tau_n"
_obs = Observable(_obs_name, arguments=['me_E'])
_obs.set_description(r"Neutron lifetime")
_obs.tex = r"$\tau_n$"
_obs.add_taxonomy(_process_taxonomy)
func = lambda wc_obj, par, me_E: Neutron_tau(wc_obj, par, me_E)()
Prediction(_obs_name, func)
# coefficients that don't depend on me/E
coeffs = {
'a': 'a_n',
'A': 'A_n',
'D': 'D_n',
'R': 'R_n',
}
# coefficients that depend on me/E
coeffs_mE = {
'atilde': r'\tilde{a}_n',
'b': 'b_n',
'Atilde': r'\tilde{A}_n',
'B': 'B_n', 'Btilde': r'\tilde{B}_n',
'lambdaAB': r'\lambda_{AB}',
}
def make_obs_neutron_corr(coeff, me_E=False):
_process_tex = r"n\to p^+ e^-\bar\nu_e"
_process_taxonomy = r'Process :: Nucleon decays :: Beta decays :: Neutron decay :: $' + _process_tex + r"$"
_obs_name = coeff + "_n"
if me_E:
_obs = Observable(_obs_name, arguments=['me_E'])
else:
_obs = Observable(_obs_name)
_obs.set_description(r"Correlation coefficient $" + tex + r"$ in neutron beta decay")
_obs.tex = r"$" + tex + r"$"
_obs.add_taxonomy(_process_taxonomy)
if me_E:
func = lambda wc_obj, par, me_E: Neutron_corr(wc_obj, par, me_E, coeff)()
else:
func = lambda wc_obj, par: Neutron_corr(wc_obj, par, None, coeff)()
Prediction(_obs_name, func)
for coeff, tex in coeffs.items():
make_obs_neutron_corr(coeff, me_E=False)
for coeff, tex in coeffs_mE.items():
make_obs_neutron_corr(coeff, me_E=True)
|
|
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from stix.test import EntityTestCase, assert_warnings
from stix.utils import silence_warnings
from stix.common.related import (
RelatedCampaign, RelatedCampaignRef, RelatedIdentity, RelatedCOA,
RelatedPackage, RelatedPackageRef, RelatedExploitTarget, RelatedIncident,
RelatedIndicator, RelatedObservable, RelatedThreatActor, RelatedTTP,
RelatedPackageRefs, RelatedPackages, RelatedReports, RelatedReport
)
class RelatedReportTests(EntityTestCase, unittest.TestCase):
klass = RelatedReport
_full_dict = {
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'report': {
'id': 'example:bar-1',
'version': '1.0',
'header': {
'title': 'Test'
}
}
}
class RelatedReportsTests(EntityTestCase, unittest.TestCase):
klass = RelatedReports
_full_dict = {
'scope': 'inclusive',
'related_reports': [
{
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'report': {
'id': 'example:bar-1',
'version': '1.2',
'header': {
'title': 'Test'
}
}
},
{
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'report': {
'id': 'example:bar-2',
'version': '1.2',
'header': {
'title': 'Test'
}
}
}
]
}
class RelatedPackageRefsTests(EntityTestCase, unittest.TestCase):
klass = RelatedPackageRefs
_full_dict = {
'packages': [
{
'idref': "example:foo-1",
'timestamp': "2014-01-31T06:14:46",
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated"
},
{
'idref': "example:foo--2",
'timestamp': "2014-01-31T06:14:46",
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated"
}
]
}
@silence_warnings
def test_add_stix_package(self):
from stix.core import STIXPackage
l = RelatedPackageRefs()
l.append(STIXPackage())
self.assertEqual(1, len(l))
@silence_warnings
def test_add_bad_type(self):
from stix.indicator import Indicator
l = RelatedPackageRefs()
self.assertRaises(
TypeError,
l.append,
Indicator()
)
@assert_warnings
def test_deprecated_warning(self):
from stix.core import STIXPackage
l = RelatedPackageRefs()
l.append(STIXPackage())
class RelatedPackageRefTests(EntityTestCase, unittest.TestCase):
klass = RelatedPackageRef
_full_dict = {
'idref': "example:Campaign-133",
'timestamp': "2014-01-31T06:14:46",
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
}
class RelatedCampaignTests(EntityTestCase, unittest.TestCase):
klass = RelatedCampaign
_full_dict = {
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'campaign': {
'id': 'example:bar-1',
'title': 'Test'
}
}
class RelatedIndicatorTests(EntityTestCase, unittest.TestCase):
klass = RelatedIndicator
_full_dict = {
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'indicator': {
'id': 'example:bar-1',
'title': 'Test'
}
}
class RelatedIncidentTests(EntityTestCase, unittest.TestCase):
klass = RelatedIncident
_full_dict = {
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'incident': {
'id': 'example:bar-1',
'title': 'Test'
}
}
class RelatedExploitTargetTests(EntityTestCase, unittest.TestCase):
klass = RelatedExploitTarget
_full_dict = {
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'exploit_target': {
'id': 'example:bar-1',
'title': 'Test'
}
}
class RelatedThreatActorTests(EntityTestCase, unittest.TestCase):
klass = RelatedThreatActor
_full_dict = {
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'threat_actor': {
'id': 'example:bar-1',
'title': 'Test'
}
}
class RelatedCOATests(EntityTestCase, unittest.TestCase):
klass = RelatedCOA
_full_dict = {
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'course_of_action': {
'id': 'example:bar-1',
'title': 'Test'
}
}
class RelatedTTPTests(EntityTestCase, unittest.TestCase):
klass = RelatedTTP
_full_dict = {
#'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
#'relationship': "Associated",
# 'ttp': {
# 'id': 'example:bar-1',
# 'title': 'Test'
# }
}
class RelatedIdentityTests(EntityTestCase, unittest.TestCase):
klass = RelatedIdentity
_full_dict = {
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'identity': {
'id': 'example:bar-1',
'name': 'Test'
}
}
class RelatedObservableTests(EntityTestCase, unittest.TestCase):
klass = RelatedObservable
_full_dict = {
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'observable': {
'id': 'example:bar-1',
'title': 'Test'
}
}
class RelatedPackageTests(EntityTestCase, unittest.TestCase):
klass = RelatedPackage
_full_dict = {
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'package': {
'id': 'example:bar-1',
'version': '1.2',
'stix_header': {
'title': 'Test'
}
}
}
class RelatedPackagesTests(EntityTestCase, unittest.TestCase):
klass = RelatedPackages
_full_dict = {
'scope': 'inclusive',
'related_packages': [
{
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'package': {
'id': 'example:bar-1',
'version': '1.2',
'stix_header': {
'title': 'Test'
}
}
},
{
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'package': {
'id': 'example:bar-2',
'version': '1.2',
'stix_header': {
'title': 'Test'
}
}
}
]
}
class RelatedCampaignRefTests(EntityTestCase, unittest.TestCase):
klass = RelatedCampaignRef
_full_dict = {
'confidence': {'value': {'value': "Medium", 'xsi:type':'stixVocabs:HighMediumLowVocab-1.0'}},
'information_source': {
'description': "Source of the relationship",
},
'relationship': "Associated",
'campaign': {
'idref': "example:foo-1",
'timestamp': "2014-01-31T06:14:46",
'names': ["foo", "bar"]
}
}
if __name__ == "__main__":
unittest.main()
|
|
# -*- coding: utf8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import operator
from django.core.urlresolvers import reverse
import django.utils.text
from django.utils.translation import ugettext_lazy as _
import horizon.forms
from openstack_dashboard.api import base as api_base
from tuskar_ui import api
from tuskar_ui.infrastructure.flavors import utils
from tuskar_ui.infrastructure.overview import views
from tuskar_ui.utils import metering
from tuskar_boxes.overview import forms
MATCHING_DEPLOYMENT_MODE = utils.matching_deployment_mode()
NODE_STATE_ICON = {
api.node.DISCOVERING_STATE: 'fa-search',
api.node.DISCOVERED_STATE: 'fa-search-plus',
api.node.DISCOVERY_FAILED_STATE: 'fa-search-minus',
api.node.MAINTENANCE_STATE: 'fa-exclamation-triangle',
api.node.FREE_STATE: 'fa-minus',
api.node.PROVISIONING_STATE: 'fa-spinner fa-spin',
api.node.PROVISIONED_STATE: 'fa-check',
api.node.DELETING_STATE: 'fa-spinner fa-spin',
api.node.PROVISIONING_FAILED_STATE: 'fa-exclamation-circle',
None: 'fa-question',
}
def flavor_nodes(request, flavor, exact_match=True):
"""Lists all nodes that match the given flavor.
If exact_match is True, only nodes that match exactly will be listed.
Otherwise, all nodes that have at least the required resources will
be listed.
"""
if exact_match:
matches = operator.eq
else:
matches = operator.ge
for node in api.node.Node.list(request, maintenance=False):
if all(matches(*pair) for pair in (
(int(node.cpus or 0), int(flavor.vcpus or 0)),
(int(node.memory_mb or 0), int(flavor.ram or 0)),
(int(node.local_gb or 0), int(flavor.disk or 0)),
(node.cpu_arch, flavor.cpu_arch),
)):
yield node
def node_role(request, node):
try:
resource = api.heat.Resource.get_by_node(request, node)
except LookupError:
return None
return resource.role
def _node_data(request, nodes):
for node in nodes:
role = node_role(request, node)
yield {
'uuid': node.uuid,
'role_name': role.name if role else '',
'role_slug': django.utils.text.slugify(role.name) if role else '',
'node_title': unicode(_("{0} node").format(role.name.title())
if role else _("Free node")),
'state': node.state,
'state_slug': django.utils.text.slugify(unicode(node.state)),
'state_icon': NODE_STATE_ICON.get(node.state,
NODE_STATE_ICON[None]),
'cpu_arch': node.cpu_arch,
'cpus': node.cpus,
'memory_mb': node.memory_mb,
'local_gb': node.local_gb,
}
def _flavor_data(request, flavors, flavor_roles):
for flavor in flavors:
nodes = list(_node_data(request,
flavor_nodes(request, flavor,
MATCHING_DEPLOYMENT_MODE)))
roles = flavor_roles.get(flavor.name, [])
if nodes or roles:
# Don't list empty flavors
yield {
'name': flavor.name,
'vcpus': flavor.vcpus,
'ram': flavor.ram,
'disk': flavor.disk,
'cpu_arch': flavor.cpu_arch,
'nodes': nodes,
'roles': roles,
}
class IndexView(views.IndexView):
template_name = "tuskar_boxes/overview/index.html"
form_class = forms.EditPlan
def get_data(self, request, context, *args, **kwargs):
data = super(IndexView, self).get_data(request, context,
*args, **kwargs)
nodes = list(_node_data(
request, api.node.Node.list(request, maintenance=False),
))
nodes.sort(key=lambda node: node.get('role_name'))
nodes.reverse()
data['nodes'] = nodes
if not data['stack']:
flavors = api.flavor.Flavor.list(self.request)
if not MATCHING_DEPLOYMENT_MODE:
# In the POC mode, only one flavor is allowed.
flavors = flavors[:1]
flavors.sort(key=lambda np: (np.vcpus, np.ram, np.disk))
roles = data['roles']
free_roles = []
flavor_roles = {}
for role in roles:
if 'form' in data:
role['flavor_field'] = data['form'][role['id'] + '-flavor']
flavor = role['role'].flavor(data['plan'])
if flavor and flavor.name in [f.name for f in flavors]:
role['flavor_name'] = flavor.name
flavor_roles.setdefault(flavor.name, []).append(role)
else:
role['flavor_name'] = ''
field = role.get('flavor_field')
if field:
field.initial = 0
free_roles.append(role)
role['is_valid'] = role[
'role'].is_valid_for_deployment(data['plan'])
data['free_roles'] = free_roles
flavor_data = list(
_flavor_data(self.request, flavors, flavor_roles))
data['flavors'] = flavor_data
data['no_flavor_nodes'] = [
node for node in nodes
if not any(node in d['nodes'] for d in flavor_data)
]
else:
distribution = collections.Counter()
for node in nodes:
distribution[node['role_name']] += 1
for role in data['roles']:
if nodes:
role['distribution'] = int(
float(distribution[role['name']]) / len(nodes) * 100)
else:
role['distribution'] = 0
if api_base.is_service_enabled(request, 'metering'):
for role in data['roles']:
role['graph_url'] = (
reverse('horizon:infrastructure:roles:performance',
args=[role['id']]) + '?' +
metering.url_part('hardware.cpu.load.1min', False) +
'&date_options=0.041666'
)
return data
def get_progress_update(self, request, data):
out = super(IndexView, self).get_progress_update(request, data)
out['nodes'] = data.get('nodes', [])
return out
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['header_actions'] = [{
'name': _('Edit Global Configuration'),
'show_name': True,
'url': reverse('horizon:infrastructure:overview:config'),
'icon': 'fa-pencil',
'ajax_modal': True,
}, {
'name': _('Register Nodes'),
'show_name': True,
'url': reverse('horizon:infrastructure:nodes:register'),
'icon': 'fa-plus',
'ajax_modal': True,
}]
return context
class GlobalServiceConfigView(horizon.forms.ModalFormView):
form_class = forms.GlobalServiceConfig
template_name = "tuskar_boxes/overview/global_service_config.html"
submit_label = _("Save Configuration")
def get_success_url(self):
return reverse('horizon:infrastructure:overview:index')
|
|
# Copyright 2016 The Eyra Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# File author/s:
# Matthias Petursson <[email protected]>
# Robert Kjaran <[email protected]>
import redis
import datetime
import json
#: Relative imports
from util import log
from . import config # get our dict with qc module names & qc module functions
from . import celery_config
class QcError(Exception):
"""QcError
==========
Trouble in paradise. Raised if QC experienced a critical error.
"""
pass
class QcHandler(object):
"""QcHandler
============
Class for handling quality control reporting.
Its only public method is :meth:`getReport`. See its docstring for
details.
Use the config.py file to adjust which modules you want to be active
in the QC.
Usage:
>>> qc = QcHandler(app)
>>> qc.getReport(1)
{'sessionId': 1, 'status': 'started', 'modules':{}}
... wait ...
>>> qc.getReport(1)
{"sessionId": 1,
"status": "processing",
"modules" {
"marosijo" : {
"totalStats": {"accuracy": [0.0;1.0]"},
"perRecordingStats": [{"recordingId": ...,
"stats": {"accuracy": [0.0;1.0]}}]}
},
...
}
}
"""
def __init__(self, app, dbHandler):
"""Initialise a QC handler
config.activeModules should be a dict containing names : function pointers
to the QC modules supposed to be used.
app.config['CELERY_CLASS_POINTER'] should be a function pointer to
the instance of the celery class created in app from celery_handler.py
"""
self.modules = {module['name'] : module['processFn'] \
for k, module in config.activeModules.items()}
self.dbHandler = dbHandler # grab database handler from app to handle MySQL database operations
self.redis = redis.StrictRedis(
host=celery_config.const['host'],
port=celery_config.const['port'],
db=celery_config.const['backend_db'])
def _updateRecordingsList(self, session_id) -> None:
"""
Update the list of recordings for this session(_id) in
the redis datastore. Query the MySQL database and write
out the recordings there (for this session) to the redis datastore.
Redis key format: session/session_id/recordings
Redis value format (same as from dbHandler.getRecordingsInfo return value):
[{"recId": ..., "token": str, "recPath": str}, ..]
Where the recPaths are directly from the MySQL database (relative paths to
server-interface/)
Example:
'session/2/recordings' ->
[{"recId":2, "token":'hello', "recPath":'recordings/session_2/user_2016-03-09T15:42:29.005Z.wav'},
{"recId":2, "token":'hello', "recPath":'recordings/session_2/user_2016-03-09T15:42:29.005Z.wav'}]
"""
recsInfo = self.dbHandler.getRecordingsInfo(session_id)
if len(recsInfo) > 0:
self.redis.set('session/{}/recordings'.format(session_id), recsInfo)
def getReport(self, session_id) -> dict:
"""Return a quality report for the session ``session_id``, if
available otherwise we start a background task to process
currently available recordings.
Keeps a timestamp at 'session/session_id/timestamp' in redis datastore
representing the last time we were queried for said session.
Parameters:
session_id ...
Returned dict if the QC report is not available, but is being
processed:
{"sessionId": ...,
"status": "started",
"modules":{}}
Returned dict definition if no QC module is active:
{"sessionId": ...,
"status": "inactive",
"modules":{}}
Returned dict definition:
{"sessionId": ...,
"status": "processing",
"modules" {
"module1" : {
"totalStats": {"accuracy": [0.0;1.0]"}
[, "perRecordingStats": [
{"recordingId": ...,
"stats": {"accuracy": [0.0;1.0]}
},
...]}
]
},
...
}
}
(see client-server API for should be same definition of return)
"""
# check if session exists
if not self.dbHandler.sessionExists(session_id):
return None
# no active QC
if len(self.modules) == 0:
return dict(sessionId=session_id, status='inactive', modules={})
# see if there is a returning user to this session, in which case, start
# counting at the index after the last made recording
# (slistIdx is not used here unless there isn't a report)
# if in qc_offline mode (post-processing), this is not really useful and
# can be harmful, since the recordings list doesn't change now.
recsInfo = self.redis.get('session/{}/recordings'.format(session_id))
if recsInfo and not celery_config.const['qc_offline_mode']:
recsInfo = json.loads(recsInfo.decode('utf-8'))
slistIdx = len(recsInfo)
else:
slistIdx = 0
# always update the sessionlist on getReport call, there might be new recordings
self._updateRecordingsList(session_id)
# set the timestamp, for the most recent query (this one) of this session
self.redis.set('session/{}/timestamp'.format(session_id),
datetime.datetime.now())
# attempt to grab report for each module from redis datastore.
# if report does not exist, add a task for that session to the celery queue
reports = {}
for name, processFn in self.modules.items():
report = self.redis.get('report/{}/{}'.format(name, session_id))
if report:
reports[name] = json.loads(report.decode("utf-8")) # redis.get returns bytes, so we decode into string
else:
# first check if we are already working on this session with this module,
# in which case do nothing here
processing = self.redis.get('session/{}/processing'.format(session_id))
if processing:
continue
# check to see if we have any reports dumped on disk, in which case continue
# where they left off
qcReportPath = '{}/report/{}/{}'.format(celery_config.const['qc_report_dump_path'],
name,
session_id)
try:
with open(qcReportPath, 'r') as f:
reports = f.read().splitlines() # might be more than one, if a timeout occurred and recording was resumed
# sum the recordings of all the reports (usually only one)
totalRecs = 0
for report in reports:
if report == '':
# robustness to extra newlines
continue
report = json.loads(report)
try:
totalRecs += len(report['perRecordingStats'])
except KeyError as e:
# probably a module which doesn't have perRecordingStats, allow it.
break
if totalRecs != 0:
slistIdx = totalRecs
except FileNotFoundError as e:
pass
# start the async processing
processFn.delay(name, session_id, slistIdx, celery_config.const['batch_size'])
if len(reports) > 0:
return dict(sessionId=session_id, status='processing', modules=reports)
else:
return dict(sessionId=session_id, status='started', modules={})
|
|
#!/usr/bin/env python
"""Bootstrap setuptools installation
To use setuptools in your package's setup.py, include this
file in the same directory and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
To require a specific version of setuptools, set a download
mirror, or use an alternate download directory, simply supply
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import os
import shutil
import sys
import tempfile
import tarfile
import optparse
import subprocess
import platform
import textwrap
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
DEFAULT_VERSION = "2.2"
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
def _python_cmd(*args):
"""
Return True if the command succeeded.
"""
args = (sys.executable,) + args
return subprocess.call(args) == 0
def _install(tarball, install_args=()):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Setuptools')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Setuptools egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
# Remove previously-imported pkg_resources if present (see
# https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
if 'pkg_resources' in sys.modules:
del sys.modules['pkg_resources']
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15):
to_dir = os.path.abspath(to_dir)
rep_modules = 'pkg_resources', 'setuptools'
imported = set(sys.modules).intersection(rep_modules)
try:
import pkg_resources
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("setuptools>=" + version)
return
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir, download_delay)
except pkg_resources.VersionConflict as VC_err:
if imported:
msg = textwrap.dedent("""
The required version of setuptools (>={version}) is not available,
and can't be installed while this script is running. Please
install a more recent version first, using
'easy_install -U setuptools'.
(Currently using {VC_err.args[0]!r})
""").format(VC_err=VC_err, version=version)
sys.stderr.write(msg)
sys.exit(2)
# otherwise, reload ok
del pkg_resources, sys.modules['pkg_resources']
return _do_download(version, download_base, to_dir, download_delay)
def _clean_check(cmd, target):
"""
Run the command to download target. If the command fails, clean up before
re-raising the error.
"""
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
if os.access(target, os.F_OK):
os.unlink(target)
raise
def download_file_powershell(url, target):
"""
Download the file at url to target using Powershell (which will validate
trust). Raise an exception if the command cannot complete.
"""
target = os.path.abspath(target)
cmd = [
'powershell',
'-Command',
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(),
]
_clean_check(cmd, target)
def has_powershell():
if platform.system() != 'Windows':
return False
cmd = ['powershell', '-Command', 'echo test']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_powershell.viable = has_powershell
def download_file_curl(url, target):
cmd = ['curl', url, '--silent', '--output', target]
_clean_check(cmd, target)
def has_curl():
cmd = ['curl', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_curl.viable = has_curl
def download_file_wget(url, target):
cmd = ['wget', url, '--quiet', '--output-document', target]
_clean_check(cmd, target)
def has_wget():
cmd = ['wget', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_wget.viable = has_wget
def download_file_insecure(url, target):
"""
Use Python to download the file, even though it cannot authenticate the
connection.
"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
src = dst = None
try:
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(target, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
download_file_insecure.viable = lambda: True
def get_best_downloader():
downloaders = [
download_file_powershell,
download_file_curl,
download_file_wget,
download_file_insecure,
]
for dl in downloaders:
if dl.viable():
return dl
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15,
downloader_factory=get_best_downloader):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
``downloader_factory`` should be a function taking no arguments and
returning a function for downloading a URL to a target.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
tgz_name = "setuptools-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
if not os.path.exists(saveto): # Avoid repeated downloads
log.warn("Downloading %s", url)
downloader = downloader_factory()
downloader(url, saveto)
return os.path.realpath(saveto)
def _extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
import copy
import operator
from tarfile import ExtractError
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 448 # decimal for oct 0700
self.extract(tarinfo, path)
# Reverse sort directories.
directories.sort(key=operator.attrgetter('name'), reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError as e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the setuptools package
"""
return ['--user'] if options.user_install else []
def _parse_args():
"""
Parse the command line for options
"""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package (requires Python 2.6 or later)')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the setuptools package')
parser.add_option(
'--insecure', dest='downloader_factory', action='store_const',
const=lambda: download_file_insecure, default=get_best_downloader,
help='Use internal, non-validating downloader'
)
options, args = parser.parse_args()
# positional arguments are ignored
return options
def main(version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
options = _parse_args()
tarball = download_setuptools(download_base=options.download_base,
downloader_factory=options.downloader_factory)
return _install(tarball, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main())
|
|
#pylint: disable=line-too-long,too-many-public-methods,invalid-name
#pylint: disable=missing-docstring,protected-access,too-few-public-methods
#pylint: disable=too-many-arguments,too-many-instance-attributes
from __future__ import print_function, absolute_import, division
from argparse import Namespace
from collections import OrderedDict
import os
import re
from testfixtures import TempDirectory
from jacquard.utils import vcf
import jacquard.utils.logger
import jacquard.translate as translate
import jacquard.utils.utils as utils
import test.utils.mock_logger
import test.utils.test_case as test_case
from test.utils.vcf_test import MockVcfReader, MockTag, MockWriter, MockCaller
class MockVariantCallerFactory(object):
_CALLERS = [MockCaller()]
class VariantCallerFactory(object):
def __init__(self, args=None):
self.args = args
@staticmethod
def claim(file_readers):
claimed = []
unclaimed = []
for reader in file_readers:
if re.search(r"^claimed.*\.vcf", reader.file_name):
claimed.append(MockCallerVcfReader(reader))
else:
unclaimed.append(reader)
return unclaimed, claimed
class MockCallerVcfReader(object):
def __init__(self, file_reader):
self._file_reader = file_reader
@staticmethod
def expected_file_format():
return ["foo", "bar"]
class TranslateTestCase(test_case.JacquardBaseTestCase):
def setUp(self):
super(TranslateTestCase, self).setUp()
self.original_variant_caller_factory = translate.variant_caller_factory
self.original_validate_args = translate.validate_args
translate.logger = test.utils.mock_logger
def tearDown(self):
test.utils.mock_logger.reset()
translate.logger = jacquard.utils.logger
translate.validate_args = self.original_validate_args
translate.variant_caller_factory = self.original_variant_caller_factory
super(TranslateTestCase, self).tearDown()
def test_execute_forceWarnsUnclaimedFiles(self):
with TempDirectory() as temp_dir:
translate.validate_args = lambda x: x
args = Namespace(input=temp_dir.path,
output=temp_dir.path,
force=True,
varscan_hc_filter_filename=None)
temp_dir.write("unclaimed1.vcf", b"foo")
temp_dir.write("unclaimed2.vcf", b"foo")
temp_dir.write("unclaimed3.vcf", b"foo")
temp_dir.write("unclaimed4.vcf", b"foo")
temp_dir.write("unclaimed5.vcf", b"foo")
temp_dir.write("unclaimed6.vcf", b"foo")
translate.variant_caller_factory = MockVariantCallerFactory()
translate.execute(args, execution_context=[])
actual_log_warnings = test.utils.mock_logger.messages["WARNING"]
self.assertEquals(6, len(actual_log_warnings))
self.assertRegexpMatches(actual_log_warnings[0],
r"input file \[unclaimed1.vcf\] will not be translated")
self.assertRegexpMatches(actual_log_warnings[5],
r"input file \[unclaimed6.vcf\] will not be translated")
def test_validate_args_ok(self):
with TempDirectory() as input_dir, TempDirectory() as output_dir:
args = Namespace(input=input_dir.path,
output=output_dir.path,
allow_inconsistent_sample_sets=0,
varscan_hc_filter_filename=None)
input_dir.write("claimed.vcf", b"foo")
translate.variant_caller_factory = MockVariantCallerFactory()
translate.validate_args(args)
self.ok()
def test_validate_args_oneUnclaimed(self):
with TempDirectory() as input_dir:
args = Namespace(input=input_dir.path,
force=False,
allow_inconsistent_sample_sets=0,
varscan_hc_filter_filename=None)
input_dir.write("unclaimed.vcf", b"foo")
input_dir.write("claimed.vcf", b"foo")
translate.variant_caller_factory = MockVariantCallerFactory()
self.assertRaisesRegexp(utils.UsageError,
r"1 input file \[unclaimed.vcf\] cannot be translated",
translate.validate_args,
args)
def test_validate_args_oneUnclaimed_withForceOk(self):
with TempDirectory() as input_dir:
args = Namespace(input=input_dir.path,
force=True,
allow_inconsistent_sample_sets=0,
varscan_hc_filter_filename=None)
input_dir.write("unclaimed.vcf", b"foo")
input_dir.write("claimed.vcf", b"foo")
translate.variant_caller_factory = MockVariantCallerFactory()
translate.validate_args(args)
self.ok()
def test_validate_args_allUnclaimedThrowsException(self):
with TempDirectory() as input_dir:
args = Namespace(input=input_dir.path,
allow_inconsistent_sample_sets=0,
varscan_hc_filter_filename=None)
translate.variant_caller_factory = MockVariantCallerFactory()
self.assertRaisesRegexp(utils.UsageError,
"no vcfs in input dir .* can be translated",
translate.validate_args,
args)
def test_validate_args_fiveUnclaimed(self):
with TempDirectory() as input_dir:
args = Namespace(input=input_dir.path,
force=False,
varscan_hc_filter_filename=None)
input_dir.write("unclaimed1.vcf", b"foo")
input_dir.write("unclaimed2.vcf", b"foo")
input_dir.write("unclaimed3.vcf", b"foo")
input_dir.write("unclaimed4.vcf", b"foo")
input_dir.write("unclaimed5.vcf", b"foo")
input_dir.write("claimed.vcf", b"foo")
translate.variant_caller_factory = MockVariantCallerFactory()
self.assertRaisesRegexp(utils.UsageError,
r"5 input files \[.*\] cannot be translated",
translate.validate_args,
args)
def test_validate_args_sixUnclaimed(self):
with TempDirectory() as input_dir:
args = Namespace(input=input_dir.path,
force=False,
varscan_hc_filter_filename=None)
input_dir.write("unclaimed1.vcf", b"foo")
input_dir.write("unclaimed2.vcf", b"foo")
input_dir.write("unclaimed3.vcf", b"foo")
input_dir.write("unclaimed4.vcf", b"foo")
input_dir.write("unclaimed5.vcf", b"foo")
input_dir.write("unclaimed6.vcf", b"foo")
input_dir.write("claimed.vcf", b"foo")
translate.variant_caller_factory = MockVariantCallerFactory()
self.assertRaisesRegexp(utils.UsageError,
r"6 input files \[.*, ...\(1 file\(s\) omitted\)\] cannot be translated",
translate.validate_args,
args)
def test_validate_args_snpIndelPairingValid(self):
with TempDirectory() as input_dir, TempDirectory() as output_dir:
args = Namespace(input=input_dir.path,
output=output_dir.path,
force=False,
allow_inconsistent_sample_sets=0,
varscan_hc_filter_filename=None)
input_dir.write("claimed.snp.vcf", b"foo")
input_dir.write("claimed.indel.vcf", b"foo")
translate.variant_caller_factory = MockVariantCallerFactory()
translate.validate_args(args)
self.ok()
def test_validate_args_snpIndelPairingInvalid(self):
with TempDirectory() as input_dir, TempDirectory() as output_dir:
args = Namespace(input=input_dir.path,
output=output_dir.path,
force=False,
allow_inconsistent_sample_sets=0,
varscan_hc_filter_filename=None)
input_dir.write("claimed.foo.vcf", b"foo")
input_dir.write("claimed2.foo.vcf", b"foo")
input_dir.write("claimed2.bar.vcf", b"foo")
translate.variant_caller_factory = MockVariantCallerFactory()
self.assertRaisesRegexp(utils.UsageError,
"Not all patients were represented by the same set of caller-VCFs. Review inputs/command options to align file pairings or use the flag --allow_inconsistent_sample_sets.",
translate.validate_args,
args)
def test_validate_args_allSnpsOkay(self):
with TempDirectory() as input_dir, TempDirectory() as output_dir:
args = Namespace(input=input_dir.path,
output=output_dir.path,
force=False,
allow_inconsistent_sample_sets=0,
varscan_hc_filter_filename=None)
input_dir.write("claimed.snp.vcf", b"foo")
input_dir.write("claimed2.snp.vcf", b"foo")
translate.variant_caller_factory = MockVariantCallerFactory()
translate.validate_args(args)
self.ok()
def test_validate_args_allIndelsOkay(self):
with TempDirectory() as input_dir, TempDirectory() as output_dir:
args = Namespace(input=input_dir.path,
output=output_dir.path,
force=False,
allow_inconsistent_sample_sets=0,
varscan_hc_filter_filename=None)
input_dir.write("claimed.indels.vcf", b"foo")
input_dir.write("claimed2.indels.vcf", b"foo")
input_dir.write("claimed3.indels.vcf", b"foo")
translate.variant_caller_factory = MockVariantCallerFactory()
translate.validate_args(args)
self.ok()
def test_validate_args_snpIndelPairingAllowInconsistentSampleSetsOK(self):
with TempDirectory() as input_dir, TempDirectory() as output_dir:
args = Namespace(input=input_dir.path,
output=output_dir.path,
force=False,
allow_inconsistent_sample_sets=1,
varscan_hc_filter_filename=None)
input_dir.write("claimed.foo.vcf", b"foo")
input_dir.write("claimed2.foo.vcf", b"foo")
input_dir.write("claimed2.bar.vcf", b"foo")
translate.variant_caller_factory = MockVariantCallerFactory()
translate.validate_args(args)
self.ok()
def test_get_required_input_output_types(self):
self.assertEquals(("directory", "directory"),
translate.get_required_input_output_types())
def test_report_prediction(self):
with TempDirectory() as input_dir:
input_dir.write("A.vcf", b"##source=strelka\n#colHeader")
input_dir.write("B.vcf", b"##source=strelka\n#colHeader")
input_dir.write("B.hpfilter.pass", b"##source=strelka\n#colHeader")
args = Namespace(input=input_dir.path)
desired_output_files = translate.report_prediction(args)
expected_desired_output_files = set(["A.translatedTags.vcf",
"B.translatedTags.vcf"])
self.assertEquals(expected_desired_output_files, desired_output_files)
def test_translate_files(self):
record = vcf.VcfRecord("chr1", "42", "A", "C",
sample_tag_values=OrderedDict(sorted({"SA":OrderedDict(), "SB":OrderedDict()}.items())))
reader = MockVcfReader(metaheaders=["##metaheader1",
"##metaheader2"],
records=[record],
sample_names=["SA", "SB"])
writer = MockWriter()
execution_context = []
new_tags = [MockTag("TAG1",
OrderedDict(sorted({"SA":42, "SB":43}.items())),
metaheader="##newTag1"),
MockTag("TAG2",
OrderedDict(sorted({"SA":420, "SB":430}.items())),
metaheader="##newTag2")]
translate._translate_files(reader,
new_tags,
execution_context,
writer)
self.assertTrue(reader.opened)
self.assertTrue(writer.opened)
expected = ['##metaheader1',
'##metaheader2',
'##newTag1',
'##newTag2',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tNORMAL\tTUMOR']
self.assertEquals(expected, writer._content[0:5])
self.assertRegexpMatches(writer._content[5], "TAG1:TAG2")
self.assertRegexpMatches(writer._content[5], "42:420")
self.assertRegexpMatches(writer._content[5], "43:430")
self.assertTrue(reader.closed)
self.assertTrue(writer.closed)
def test_translate_write_metaheaders_addsExecutionMetaheaders(self):
writer = MockWriter()
reader = MockVcfReader(metaheaders=["##mockCallerMetaheader1"],
column_header="#CHROM\tPOS\tREF\tALT\tStuff")
execution_context = ["##foo1=bar",
"##foo2=baz"]
new_tags = [MockTag(metaheader="##newTag1"),
MockTag(metaheader="##newTag2")]
translate._write_headers(reader, new_tags, execution_context, writer)
expected_headers = ["##foo1=bar",
"##foo2=baz",
"##mockCallerMetaheader1",
"##newTag1",
"##newTag2",
"#CHROM\tPOS\tREF\tALT\tStuff"]
self.assertEquals(expected_headers, writer._content)
class ExcludeMalformedRefTestCase(test_case.JacquardBaseTestCase):
def test_metaheader(self):
self.assertEquals('##FILTER=<ID=JQ_EXCLUDE_MALFORMED_REF,Description="The format of the reference value for this variant record does not comply with VCF standard.">',
translate._ExcludeMalformedRef().metaheader)
def test_add_tag_value_validRefNoFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", "C", vcf_filter="PASS")
translate._ExcludeMalformedRef().add_tag_values(record)
self.assertEquals("PASS", record.filter)
def test_add_tag_value_validIndelRefNoFilter(self):
record = vcf.VcfRecord("chr1", "42", "ACGT", "C", vcf_filter="PASS")
translate._ExcludeMalformedRef().add_tag_values(record)
self.assertEquals("PASS", record.filter)
def test_add_tag_value_validIndelRefEdgecaseNoFilter(self):
record = vcf.VcfRecord("chr1", "42", "ACGTNacgtn", "C", vcf_filter="PASS")
translate._ExcludeMalformedRef().add_tag_values(record)
self.assertEquals("PASS", record.filter)
def test_add_tag_value_invalidRefReplacesFilter(self):
record = vcf.VcfRecord("chr1", "42", "X", "C", vcf_filter="PASS")
translate._ExcludeMalformedRef().add_tag_values(record)
self.assertEquals("JQ_EXCLUDE_MALFORMED_REF", record.filter)
def test_add_tag_value_invalidIndelReplacesFilter(self):
record = vcf.VcfRecord("chr1", "42", "XYZ", "C", vcf_filter="PASS")
translate._ExcludeMalformedRef().add_tag_values(record)
self.assertEquals("JQ_EXCLUDE_MALFORMED_REF", record.filter)
class ExcludeMalformedAltTestCase(test_case.JacquardBaseTestCase):
def test_metaheader(self):
self.assertEquals('##FILTER=<ID=JQ_EXCLUDE_MALFORMED_ALT,Description="The the format of the alternate allele value for this variant record does not comply with VCF standard.">',
translate._ExcludeMalformedAlt().metaheader)
def test_add_tag_value_validAltNoFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", "C", vcf_filter="PASS")
translate._ExcludeMalformedAlt().add_tag_values(record)
self.assertEquals("PASS", record.filter)
def test_add_tag_value_validIndelAltNoFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", "AC,GT", vcf_filter="PASS")
translate._ExcludeMalformedAlt().add_tag_values(record)
self.assertEquals("PASS", record.filter)
def test_add_tag_value_validIndelAltEdgecaseNoFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", "ACGTNacgtn,", vcf_filter="PASS")
translate._ExcludeMalformedAlt().add_tag_values(record)
self.assertEquals("PASS", record.filter)
def test_add_tag_value_invalidAltReplacesFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", "X", vcf_filter="PASS")
translate._ExcludeMalformedAlt().add_tag_values(record)
self.assertEquals("JQ_EXCLUDE_MALFORMED_ALT", record.filter)
def test_add_tag_value_invalidIndelReplacesFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", "XYZ", vcf_filter="PASS")
translate._ExcludeMalformedAlt().add_tag_values(record)
self.assertEquals("JQ_EXCLUDE_MALFORMED_ALT", record.filter)
def test_add_tag_value_missingAltBothReplacesFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", ".*", vcf_filter="PASS")
translate._ExcludeMalformedAlt().add_tag_values(record)
self.assertEquals("JQ_EXCLUDE_MALFORMED_ALT", record.filter)
class ExcludeMissingAltTestCase(test_case.JacquardBaseTestCase):
def test_metaheader(self):
self.assertEquals('##FILTER=<ID=JQ_EXCLUDE_MISSING_ALT,Description="The alternate allele is missing for this variant record.">',
translate._ExcludeMissingAlt().metaheader)
def test_add_tag_value_validAltNoFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", "C", vcf_filter="PASS")
translate._ExcludeMissingAlt().add_tag_values(record)
self.assertEquals("PASS", record.filter)
def test_add_tag_value_validIndelAltNoFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", "ACGT", vcf_filter="PASS")
translate._ExcludeMissingAlt().add_tag_values(record)
self.assertEquals("PASS", record.filter)
def test_add_tag_value_validIndelAltEdgecaseNoFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", "ACGTNacgtn,*.", vcf_filter="PASS")
translate._ExcludeMissingAlt().add_tag_values(record)
self.assertEquals("PASS", record.filter)
def test_add_tag_value_missingAltUpstreamDeletionNoFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", "*", vcf_filter="PASS")
translate._ExcludeMissingAlt().add_tag_values(record)
self.assertEquals("PASS", record.filter)
def test_add_tag_value_missingAltNullReplacesFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", ".", vcf_filter="PASS")
translate._ExcludeMissingAlt().add_tag_values(record)
self.assertEquals("JQ_EXCLUDE_MISSING_ALT", record.filter)
def test_add_tag_value_missingAltBothNoFilter(self):
record = vcf.VcfRecord("chr1", "42", "A", ".*", vcf_filter="PASS")
translate._ExcludeMissingAlt().add_tag_values(record)
self.assertEquals("PASS", record.filter)
class TranslateFunctionalTestCase(test_case.JacquardBaseTestCase):
def test_translate(self):
with TempDirectory() as output_dir:
test_dir = os.path.dirname(os.path.realpath(__file__))
module_testdir = os.path.join(test_dir,
"functional_tests",
"01_translate")
input_dir = os.path.join(module_testdir, "input")
command = ["translate", input_dir, output_dir.path, "--force"]
expected_dir = os.path.join(module_testdir, "benchmark")
self.assertCommand(command, expected_dir)
|
|
#!/usr/bin/python2.5
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import codecs
from six import StringIO
import csv
import os
import re
import zipfile
from . import gtfsfactoryuser
from . import problems
from . import util
class Loader:
def __init__(self,
feed_path=None,
schedule=None,
problems=problems.default_problem_reporter,
extra_validation=False,
load_stop_times=True,
memory_db=True,
zip=None,
check_duplicate_trips=False,
gtfs_factory=None):
"""Initialize a new Loader object.
Args:
feed_path: string path to a zip file or directory
schedule: a Schedule object or None to have one created
problems: a ProblemReporter object, the default reporter raises an
exception for each problem
extra_validation: True if you would like extra validation
load_stop_times: load the stop_times table, used to speed load time when
times are not needed. The default is True.
memory_db: if creating a new Schedule object use an in-memory sqlite
database instead of creating one in a temporary file
zip: a zipfile.ZipFile object, optionally used instead of path
"""
if gtfs_factory is None:
gtfs_factory = gtfsfactoryuser.GtfsFactoryUser().GetGtfsFactory()
if not schedule:
schedule = gtfs_factory.Schedule(problem_reporter=problems,
memory_db=memory_db, check_duplicate_trips=check_duplicate_trips)
self._extra_validation = extra_validation
self._schedule = schedule
self._problems = problems
self._path = feed_path
self._zip = zip
self._load_stop_times = load_stop_times
self._gtfs_factory = gtfs_factory
def _DetermineFormat(self):
"""Determines whether the feed is in a form that we understand, and
if so, returns True."""
if self._zip:
# If zip was passed to __init__ then path isn't used
assert not self._path
return True
if not isinstance(self._path, basestring) and hasattr(self._path, 'read'):
# A file-like object, used for testing with a StringIO file
self._zip = zipfile.ZipFile(self._path, mode='r')
return True
if not os.path.exists(self._path):
self._problems.FeedNotFound(self._path)
return False
if self._path.endswith('.zip'):
try:
self._zip = zipfile.ZipFile(self._path, mode='r')
except IOError: # self._path is a directory
pass
except zipfile.BadZipfile:
self._problems.UnknownFormat(self._path)
return False
if not self._zip and not os.path.isdir(self._path):
self._problems.UnknownFormat(self._path)
return False
return True
def _GetFileNames(self):
"""Returns a list of file names in the feed."""
if self._zip:
return self._zip.namelist()
else:
return os.listdir(self._path)
def _CheckFileNames(self):
filenames = self._GetFileNames()
known_filenames = self._gtfs_factory.GetKnownFilenames()
for feed_file in filenames:
if feed_file not in known_filenames:
if not feed_file.startswith('.'):
# Don't worry about .svn files and other hidden files
# as this will break the tests.
self._problems.UnknownFile(feed_file)
def _GetUtf8Contents(self, file_name):
"""Check for errors in file_name and return a string for csv reader."""
contents = self._FileContents(file_name)
if not contents: # Missing file
return
# Check for errors that will prevent csv.reader from working
if len(contents) >= 2 and contents[0:2] in (codecs.BOM_UTF16_BE,
codecs.BOM_UTF16_LE):
self._problems.FileFormat("appears to be encoded in utf-16", (file_name, ))
# Convert and continue, so we can find more errors
contents = codecs.getdecoder('utf-16')(contents)[0].encode('utf-8')
null_index = contents.find('\0')
if null_index != -1:
# It is easier to get some surrounding text than calculate the exact
# row_num
m = re.search(r'.{,20}\0.{,20}', contents, re.DOTALL)
self._problems.FileFormat(
"contains a null in text \"%s\" at byte %d" %
(codecs.getencoder('string_escape')(m.group()), null_index + 1),
(file_name, ))
return
# strip out any UTF-8 Byte Order Marker (otherwise it'll be
# treated as part of the first column name, causing a mis-parse)
contents = contents.lstrip(codecs.BOM_UTF8)
return contents
def _ReadCsvDict(self, file_name, cols, required, deprecated):
"""Reads lines from file_name, yielding a dict of unicode values."""
assert file_name.endswith(".txt")
table_name = file_name[0:-4]
contents = self._GetUtf8Contents(file_name)
if not contents:
return
eol_checker = util.EndOfLineChecker(StringIO(contents),
file_name, self._problems)
# The csv module doesn't provide a way to skip trailing space, but when I
# checked 15/675 feeds had trailing space in a header row and 120 had spaces
# after fields. Space after header fields can cause a serious parsing
# problem, so warn. Space after body fields can cause a problem time,
# integer and id fields; they will be validated at higher levels.
reader = csv.reader(eol_checker, skipinitialspace=True)
raw_header = next(reader)
header_occurrences = util.defaultdict(lambda: 0)
header = []
valid_columns = [] # Index into raw_header and raw_row
for i, h in enumerate(raw_header):
h_stripped = h.strip()
if not h_stripped:
self._problems.CsvSyntax(
description="The header row should not contain any blank values. "
"The corresponding column will be skipped for the "
"entire file.",
context=(file_name, 1, [''] * len(raw_header), raw_header),
type=problems.TYPE_ERROR)
continue
elif h != h_stripped:
self._problems.CsvSyntax(
description="The header row should not contain any "
"space characters.",
context=(file_name, 1, [''] * len(raw_header), raw_header),
type=problems.TYPE_WARNING)
header.append(h_stripped)
valid_columns.append(i)
header_occurrences[h_stripped] += 1
for name, count in header_occurrences.items():
if count > 1:
self._problems.DuplicateColumn(
header=name,
file_name=file_name,
count=count)
self._schedule._table_columns[table_name] = header
# check for unrecognized columns, which are often misspellings
header_context = (file_name, 1, [''] * len(header), header)
valid_cols = cols + [deprecated_name for (deprecated_name, _) in deprecated]
unknown_cols = set(header) - set(valid_cols)
if len(unknown_cols) == len(header):
self._problems.CsvSyntax(
description="The header row did not contain any known column "
"names. The file is most likely missing the header row "
"or not in the expected CSV format.",
context=(file_name, 1, [''] * len(raw_header), raw_header),
type=problems.TYPE_ERROR)
else:
for col in unknown_cols:
# this is provided in order to create a nice colored list of
# columns in the validator output
self._problems.UnrecognizedColumn(file_name, col, header_context)
# check for missing required columns
missing_cols = set(required) - set(header)
for col in missing_cols:
# this is provided in order to create a nice colored list of
# columns in the validator output
self._problems.MissingColumn(file_name, col, header_context)
# check for deprecated columns
for (deprecated_name, new_name) in deprecated:
if deprecated_name in header:
self._problems.DeprecatedColumn(file_name, deprecated_name, new_name,
header_context)
line_num = 1 # First line read by reader.next() above
for raw_row in reader:
line_num += 1
if len(raw_row) == 0: # skip extra empty lines in file
continue
if len(raw_row) > len(raw_header):
self._problems.OtherProblem('Found too many cells (commas) in line '
'%d of file "%s". Every row in the file '
'should have the same number of cells as '
'the header (first line) does.' %
(line_num, file_name),
(file_name, line_num),
type=problems.TYPE_WARNING)
if len(raw_row) < len(raw_header):
self._problems.OtherProblem('Found missing cells (commas) in line '
'%d of file "%s". Every row in the file '
'should have the same number of cells as '
'the header (first line) does.' %
(line_num, file_name),
(file_name, line_num),
type=problems.TYPE_WARNING)
# raw_row is a list of raw bytes which should be valid utf-8. Convert each
# valid_columns of raw_row into Unicode.
valid_values = []
unicode_error_columns = [] # index of valid_values elements with an error
for i in valid_columns:
try:
valid_values.append(raw_row[i].decode('utf-8'))
except UnicodeDecodeError:
# Replace all invalid characters with REPLACEMENT CHARACTER (U+FFFD)
valid_values.append(codecs.getdecoder("utf8")
(raw_row[i], errors="replace")[0])
unicode_error_columns.append(len(valid_values) - 1)
except IndexError:
break
# The error report may contain a dump of all values in valid_values so
# problems can not be reported until after converting all of raw_row to
# Unicode.
for i in unicode_error_columns:
self._problems.InvalidValue(header[i], valid_values[i],
'Unicode error',
(file_name, line_num,
valid_values, header))
# We strip ALL whitespace from around values. This matches the behavior
# of both the Google and OneBusAway GTFS parser.
valid_values = [value.strip() for value in valid_values]
d = dict(zip(header, valid_values))
yield (d, line_num, header, valid_values)
# TODO: Add testing for this specific function
def _ReadCSV(self, file_name, cols, required, deprecated):
"""Reads lines from file_name, yielding a list of unicode values
corresponding to the column names in cols."""
contents = self._GetUtf8Contents(file_name)
if not contents:
return
eol_checker = util.EndOfLineChecker(StringIO(contents),
file_name, self._problems)
reader = csv.reader(eol_checker) # Use excel dialect
header = next(reader)
header = map(lambda x: x.strip(), header) # trim any whitespace
header_occurrences = util.defaultdict(lambda: 0)
for column_header in header:
header_occurrences[column_header] += 1
for name, count in header_occurrences.items():
if count > 1:
self._problems.DuplicateColumn(
header=name,
file_name=file_name,
count=count)
# check for unrecognized columns, which are often misspellings
header_context = (file_name, 1, [''] * len(header), header)
valid_cols = cols + [deprecated_name for (deprecated_name, _) in deprecated]
unknown_cols = set(header).difference(set(valid_cols))
for col in unknown_cols:
# this is provided in order to create a nice colored list of
# columns in the validator output
self._problems.UnrecognizedColumn(file_name, col, header_context)
# check for missing required columns
col_index = [-1] * len(cols)
for i in range(len(cols)):
if cols[i] in header:
col_index[i] = header.index(cols[i])
elif cols[i] in required:
self._problems.MissingColumn(file_name, cols[i], header_context)
# check for deprecated columns
for (deprecated_name, new_name) in deprecated:
if deprecated_name in header:
self._problems.DeprecatedColumn(file_name, deprecated_name, new_name,
header_context)
row_num = 1
for row in reader:
row_num += 1
if len(row) == 0: # skip extra empty lines in file
continue
if len(row) > len(header):
self._problems.OtherProblem('Found too many cells (commas) in line '
'%d of file "%s". Every row in the file '
'should have the same number of cells as '
'the header (first line) does.' %
(row_num, file_name), (file_name, row_num),
type=problems.TYPE_WARNING)
if len(row) < len(header):
self._problems.OtherProblem('Found missing cells (commas) in line '
'%d of file "%s". Every row in the file '
'should have the same number of cells as '
'the header (first line) does.' %
(row_num, file_name), (file_name, row_num),
type=problems.TYPE_WARNING)
result = [None] * len(cols)
unicode_error_columns = [] # A list of column numbers with an error
for i in range(len(cols)):
ci = col_index[i]
if ci >= 0:
if len(row) <= ci: # handle short CSV rows
result[i] = u''
else:
try:
result[i] = row[ci].decode('utf-8').strip()
except UnicodeDecodeError:
# Replace all invalid characters with
# REPLACEMENT CHARACTER (U+FFFD)
result[i] = codecs.getdecoder("utf8")(row[ci],
errors="replace")[0].strip()
unicode_error_columns.append(i)
for i in unicode_error_columns:
self._problems.InvalidValue(cols[i], result[i],
'Unicode error',
(file_name, row_num, result, cols))
yield (result, row_num, cols)
def _HasFile(self, file_name):
"""Returns True if there's a file in the current feed with the
given file_name in the current feed."""
if self._zip:
return file_name in self._zip.namelist()
else:
file_path = os.path.join(self._path, file_name)
return os.path.exists(file_path) and os.path.isfile(file_path)
def _FileContents(self, file_name):
results = None
if self._zip:
try:
results = self._zip.read(file_name)
except KeyError: # file not found in archve
self._problems.MissingFile(file_name)
return None
else:
try:
data_file = open(os.path.join(self._path, file_name), 'rb')
results = data_file.read()
except IOError: # file not found
self._problems.MissingFile(file_name)
return None
if not results:
self._problems.EmptyFile(file_name)
return results
def _LoadFeed(self):
loading_order = self._gtfs_factory.GetLoadingOrder()
for filename in loading_order:
if not self._gtfs_factory.IsFileRequired(filename) and \
not self._HasFile(filename):
pass # File is not required, and feed does not have it.
else:
object_class = self._gtfs_factory.GetGtfsClassByFileName(filename)
for (d, row_num, header, row) in self._ReadCsvDict(
filename,
object_class._FIELD_NAMES,
object_class._REQUIRED_FIELD_NAMES,
object_class._DEPRECATED_FIELD_NAMES):
self._problems.SetFileContext(filename, row_num, row, header)
instance = object_class(field_dict=d)
instance.SetGtfsFactory(self._gtfs_factory)
if not instance.ValidateBeforeAdd(self._problems):
continue
instance.AddToSchedule(self._schedule, self._problems)
instance.ValidateAfterAdd(self._problems)
self._problems.ClearContext()
def _LoadCalendar(self):
file_name = 'calendar.txt'
file_name_dates = 'calendar_dates.txt'
if not self._HasFile(file_name) and not self._HasFile(file_name_dates):
self._problems.MissingFile(file_name)
return
# map period IDs to (period object, (file_name, row_num, row, cols))
periods = {}
service_period_class = self._gtfs_factory.ServicePeriod
# process calendar.txt
if self._HasFile(file_name):
has_useful_contents = False
for (row, row_num, cols) in \
self._ReadCSV(file_name,
service_period_class._FIELD_NAMES,
service_period_class._REQUIRED_FIELD_NAMES,
service_period_class._DEPRECATED_FIELD_NAMES):
context = (file_name, row_num, row, cols)
self._problems.SetFileContext(*context)
period = service_period_class(field_list=row)
if period.service_id in periods:
self._problems.DuplicateID('service_id', period.service_id)
else:
periods[period.service_id] = (period, context)
self._problems.ClearContext()
# process calendar_dates.txt
if self._HasFile(file_name_dates):
# ['service_id', 'date', 'exception_type']
for (row, row_num, cols) in \
self._ReadCSV(file_name_dates,
service_period_class._FIELD_NAMES_CALENDAR_DATES,
service_period_class._REQUIRED_FIELD_NAMES_CALENDAR_DATES,
service_period_class._DEPRECATED_FIELD_NAMES_CALENDAR_DATES):
context = (file_name_dates, row_num, row, cols)
self._problems.SetFileContext(*context)
service_id = row[0]
period = None
if service_id in periods:
period = periods[service_id][0]
else:
period = service_period_class(service_id)
periods[period.service_id] = (period, context)
exception_type = row[2]
if exception_type == u'1':
period.SetDateHasService(row[1], True, self._problems)
elif exception_type == u'2':
period.SetDateHasService(row[1], False, self._problems)
else:
self._problems.InvalidValue('exception_type', exception_type)
self._problems.ClearContext()
# Now insert the periods into the schedule object, so that they're
# validated with both calendar and calendar_dates info present
for period, context in periods.values():
self._problems.SetFileContext(*context)
self._schedule.AddServicePeriodObject(period, self._problems)
self._problems.ClearContext()
def _LoadShapes(self):
file_name = 'shapes.txt'
if not self._HasFile(file_name):
return
shapes = {} # shape_id to shape object
shape_class = self._gtfs_factory.Shape
for (d, row_num, header, row) in self._ReadCsvDict(
file_name,
shape_class._FIELD_NAMES,
shape_class._REQUIRED_FIELD_NAMES,
shape_class._DEPRECATED_FIELD_NAMES):
file_context = (file_name, row_num, row, header)
self._problems.SetFileContext(*file_context)
shapepoint = self._gtfs_factory.ShapePoint(field_dict=d)
if not shapepoint.ParseAttributes(self._problems):
continue
if shapepoint.shape_id in shapes:
shape = shapes[shapepoint.shape_id]
else:
shape = shape_class(shapepoint.shape_id)
shape.SetGtfsFactory(self._gtfs_factory)
shapes[shapepoint.shape_id] = shape
shape.AddShapePointObjectUnsorted(shapepoint, self._problems)
self._problems.ClearContext()
for shape_id, shape in shapes.items():
self._schedule.AddShapeObject(shape, self._problems)
del shapes[shape_id]
def _LoadStopTimes(self):
stop_time_class = self._gtfs_factory.StopTime
for (row, row_num, cols) in self._ReadCSV('stop_times.txt',
stop_time_class._FIELD_NAMES,
stop_time_class._REQUIRED_FIELD_NAMES,
stop_time_class._DEPRECATED_FIELD_NAMES):
file_context = ('stop_times.txt', row_num, row, cols)
self._problems.SetFileContext(*file_context)
(trip_id, arrival_time, departure_time, stop_id, stop_sequence,
stop_headsign, pickup_type, drop_off_type, shape_dist_traveled,
timepoint) = row
try:
sequence = int(stop_sequence)
except (TypeError, ValueError):
self._problems.InvalidValue('stop_sequence', stop_sequence,
'This should be a number.')
continue
if sequence < 0:
self._problems.InvalidValue('stop_sequence', sequence,
'Sequence numbers should be 0 or higher.')
if stop_id not in self._schedule.stops:
self._problems.InvalidValue('stop_id', stop_id,
'This value wasn\'t defined in stops.txt')
continue
stop = self._schedule.stops[stop_id]
if trip_id not in self._schedule.trips:
self._problems.InvalidValue('trip_id', trip_id,
'This value wasn\'t defined in trips.txt')
continue
trip = self._schedule.trips[trip_id]
# If self._problems.Report returns then StopTime.__init__ will return
# even if the StopTime object has an error. Thus this code may add a
# StopTime that didn't validate to the database.
# Trip.GetStopTimes then tries to make a StopTime from the invalid data
# and calls the problem reporter for errors. An ugly solution is to
# wrap problems and a better solution is to move all validation out of
# __init__. For now make sure Trip.GetStopTimes gets a problem reporter
# when called from Trip.Validate.
stop_time = stop_time_class(self._problems, stop,
arrival_time, departure_time, stop_headsign, pickup_type,
drop_off_type, shape_dist_traveled, stop_sequence=sequence,
timepoint=timepoint)
trip._AddStopTimeObjectUnordered(stop_time, self._schedule)
self._problems.ClearContext()
# stop_times are validated in Trip.ValidateChildren, called by
# Schedule.Validate
def Load(self):
self._problems.ClearContext()
if not self._DetermineFormat():
return self._schedule
self._CheckFileNames()
self._LoadCalendar()
self._LoadShapes()
self._LoadFeed()
if self._load_stop_times:
self._LoadStopTimes()
if self._zip:
self._zip.close()
self._zip = None
if self._extra_validation:
self._schedule.Validate(self._problems, validate_children=False)
return self._schedule
|
|
"""
NonPhysicsWalker.py is for avatars.
A walker control such as this one provides:
- creation of the collision nodes
- handling the keyboard and mouse input for avatar movement
- moving the avatar
it does not:
- play sounds
- play animations
although it does send messeges that allow a listener to play sounds or
animations based on walker events.
"""
from direct.directnotify import DirectNotifyGlobal
from direct.showbase import DirectObject
from direct.controls.ControlManager import CollisionHandlerRayStart
from direct.showbase.InputStateGlobal import inputState
from direct.task.Task import Task
from pandac.PandaModules import *
class NonPhysicsWalker(DirectObject.DirectObject):
notify = DirectNotifyGlobal.directNotify.newCategory("NonPhysicsWalker")
wantDebugIndicator = base.config.GetBool('want-avatar-physics-indicator', 0)
# Ghost mode overrides this:
slideName = "slide-is-disabled"
# special methods
def __init__(self):
DirectObject.DirectObject.__init__(self)
self.worldVelocity = Vec3.zero()
self.collisionsActive = 0
self.speed=0.0
self.rotationSpeed=0.0
self.slideSpeed=0.0
self.vel=Vec3(0.0, 0.0, 0.0)
self.stopThisFrame = 0
def setWalkSpeed(self, forward, jump, reverse, rotate):
assert self.debugPrint("setWalkSpeed()")
self.avatarControlForwardSpeed=forward
#self.avatarControlJumpForce=jump
self.avatarControlReverseSpeed=reverse
self.avatarControlRotateSpeed=rotate
def getSpeeds(self):
#assert self.debugPrint("getSpeeds()")
return (self.speed, self.rotationSpeed, self.slideSpeed)
def setAvatar(self, avatar):
self.avatar = avatar
if avatar is not None:
pass # setup the avatar
def setAirborneHeightFunc(self, getAirborneHeight):
self.getAirborneHeight = getAirborneHeight
def setWallBitMask(self, bitMask):
self.cSphereBitMask = bitMask
def setFloorBitMask(self, bitMask):
self.cRayBitMask = bitMask
def swapFloorBitMask(self, oldMask, newMask):
self.cRayBitMask = self.cRayBitMask &~ oldMask
self.cRayBitMask |= newMask
if self.cRayNodePath and not self.cRayNodePath.isEmpty():
self.cRayNodePath.node().setFromCollideMask(self.cRayBitMask)
def initializeCollisions(self, collisionTraverser, avatarNodePath,
avatarRadius = 1.4, floorOffset = 1.0, reach = 1.0):
"""
Set up the avatar for collisions
"""
assert not avatarNodePath.isEmpty()
self.cTrav = collisionTraverser
self.avatarNodePath = avatarNodePath
# Set up the collision sphere
# This is a sphere on the ground to detect barrier collisions
self.cSphere = CollisionSphere(0.0, 0.0, 0.0, avatarRadius)
cSphereNode = CollisionNode('NPW.cSphereNode')
cSphereNode.addSolid(self.cSphere)
self.cSphereNodePath = avatarNodePath.attachNewNode(cSphereNode)
cSphereNode.setFromCollideMask(self.cSphereBitMask)
cSphereNode.setIntoCollideMask(BitMask32.allOff())
# Set up the collison ray
# This is a ray cast from your head down to detect floor polygons.
# This ray start is arbitrarily high in the air. Feel free to use
# a higher or lower value depending on whether you want an avatar
# that is outside of the world to step up to the floor when they
# get under valid floor:
self.cRay = CollisionRay(0.0, 0.0, CollisionHandlerRayStart, 0.0, 0.0, -1.0)
cRayNode = CollisionNode('NPW.cRayNode')
cRayNode.addSolid(self.cRay)
self.cRayNodePath = avatarNodePath.attachNewNode(cRayNode)
cRayNode.setFromCollideMask(self.cRayBitMask)
cRayNode.setIntoCollideMask(BitMask32.allOff())
# set up wall collision mechanism
self.pusher = CollisionHandlerPusher()
self.pusher.setInPattern("enter%in")
self.pusher.setOutPattern("exit%in")
# set up floor collision mechanism
self.lifter = CollisionHandlerFloor()
self.lifter.setInPattern("on-floor")
self.lifter.setOutPattern("off-floor")
self.lifter.setOffset(floorOffset)
self.lifter.setReach(reach)
# Limit our rate-of-fall with the lifter.
# If this is too low, we actually "fall" off steep stairs
# and float above them as we go down. I increased this
# from 8.0 to 16.0 to prevent this
self.lifter.setMaxVelocity(16.0)
self.pusher.addCollider(self.cSphereNodePath, avatarNodePath)
self.lifter.addCollider(self.cRayNodePath, avatarNodePath)
# activate the collider with the traverser and pusher
self.setCollisionsActive(1)
def deleteCollisions(self):
del self.cTrav
del self.cSphere
self.cSphereNodePath.removeNode()
del self.cSphereNodePath
del self.cRay
self.cRayNodePath.removeNode()
del self.cRayNodePath
del self.pusher
del self.lifter
def setTag(self, key, value):
self.cSphereNodePath.setTag(key, value)
def setCollisionsActive(self, active = 1):
assert self.debugPrint("setCollisionsActive(active%s)"%(active,))
if self.collisionsActive != active:
self.collisionsActive = active
if active:
self.cTrav.addCollider(self.cSphereNodePath, self.pusher)
self.cTrav.addCollider(self.cRayNodePath, self.lifter)
else:
self.cTrav.removeCollider(self.cSphereNodePath)
self.cTrav.removeCollider(self.cRayNodePath)
# Now that we have disabled collisions, make one more pass
# right now to ensure we aren't standing in a wall.
self.oneTimeCollide()
def placeOnFloor(self):
"""
Make a reasonable effor to place the avatar on the ground.
For example, this is useful when switching away from the
current walker.
"""
# With these on, getAirborneHeight is not returning the correct value so
# when we open our book while swimming we pop down underneath the ground
# self.oneTimeCollide()
# self.avatarNodePath.setZ(self.avatarNodePath.getZ()-self.getAirborneHeight())
# Since this is the non physics walker - wont they already be on the ground?
return
def oneTimeCollide(self):
"""
Makes one quick collision pass for the avatar, for instance as
a one-time straighten-things-up operation after collisions
have been disabled.
"""
tempCTrav = CollisionTraverser("oneTimeCollide")
tempCTrav.addCollider(self.cSphereNodePath, self.pusher)
tempCTrav.addCollider(self.cRayNodePath, self.lifter)
tempCTrav.traverse(render)
def addBlastForce(self, vector):
pass
def displayDebugInfo(self):
"""
For debug use.
"""
onScreenDebug.add("controls", "NonPhysicsWalker")
def _calcSpeeds(self):
# get the button states:
forward = inputState.isSet("forward")
reverse = inputState.isSet("reverse")
turnLeft = inputState.isSet("turnLeft")
turnRight = inputState.isSet("turnRight")
slide = inputState.isSet(self.slideName) or 0
#jump = inputState.isSet("jump")
# Check for Auto-Run
if base.localAvatar.getAutoRun():
forward = 1
reverse = 0
# Determine what the speeds are based on the buttons:
self.speed=(forward and self.avatarControlForwardSpeed or
reverse and -self.avatarControlReverseSpeed)
# Should fSlide be renamed slideButton?
self.slideSpeed=slide and ((reverse and turnLeft and -self.avatarControlReverseSpeed*(0.75)) or
(reverse and turnRight and self.avatarControlReverseSpeed*(0.75)) or
(turnLeft and -self.avatarControlForwardSpeed*(0.75)) or
(turnRight and self.avatarControlForwardSpeed*(0.75)))
self.rotationSpeed=not slide and (
(turnLeft and self.avatarControlRotateSpeed) or
(turnRight and -self.avatarControlRotateSpeed))
def handleAvatarControls(self, task):
"""
Check on the arrow keys and update the avatar.
"""
if not self.lifter.hasContact():
# hack fix for falling through the floor:
messenger.send("walkerIsOutOfWorld", [self.avatarNodePath])
self._calcSpeeds()
if __debug__:
debugRunning = inputState.isSet("debugRunning")
if debugRunning:
self.speed*=4.0
self.slideSpeed*=4.0
self.rotationSpeed*=1.25
if self.wantDebugIndicator:
self.displayDebugInfo()
# How far did we move based on the amount of time elapsed?
dt=ClockObject.getGlobalClock().getDt()
# Check to see if we're moving at all:
if self.speed or self.slideSpeed or self.rotationSpeed:
if self.stopThisFrame:
distance = 0.0
slideDistance = 0.0
rotation = 0.0
self.stopThisFrame = 0
else:
distance = dt * self.speed
slideDistance = dt * self.slideSpeed
rotation = dt * self.rotationSpeed
# Take a step in the direction of our previous heading.
self.vel=Vec3(Vec3.forward() * distance +
Vec3.right() * slideDistance)
if self.vel != Vec3.zero():
# rotMat is the rotation matrix corresponding to
# our previous heading.
rotMat=Mat3.rotateMatNormaxis(self.avatarNodePath.getH(), Vec3.up())
step=rotMat.xform(self.vel)
self.avatarNodePath.setFluidPos(Point3(self.avatarNodePath.getPos()+step))
self.avatarNodePath.setH(self.avatarNodePath.getH()+rotation)
messenger.send("avatarMoving")
else:
self.vel.set(0.0, 0.0, 0.0)
self.__oldPosDelta = self.avatarNodePath.getPosDelta(render)
self.__oldDt = dt
try:
self.worldVelocity = self.__oldPosDelta*(1/self.__oldDt)
except:
# divide by zero
self.worldVelocity = 0
return Task.cont
def doDeltaPos(self):
assert self.debugPrint("doDeltaPos()")
def reset(self):
assert self.debugPrint("reset()")
def getVelocity(self):
return self.vel
def enableAvatarControls(self):
"""
Activate the arrow keys, etc.
"""
assert self.debugPrint("enableAvatarControls")
assert self.collisionsActive
taskName = "AvatarControls-%s"%(id(self),)
# remove any old
taskMgr.remove(taskName)
# spawn the new task
taskMgr.add(self.handleAvatarControls, taskName)
def disableAvatarControls(self):
"""
Ignore the arrow keys, etc.
"""
assert self.debugPrint("disableAvatarControls")
taskName = "AvatarControls-%s"%(id(self),)
taskMgr.remove(taskName)
def flushEventHandlers(self):
if hasattr(self, 'cTrav'):
self.pusher.flush()
self.lifter.flush() # not currently defined or needed
if __debug__:
def debugPrint(self, message):
"""for debugging"""
return self.notify.debug(
str(id(self))+' '+message)
|
|
from fsdict import FSDict
import feedgenerator
from urllib import quote_plus
import os.path
from feeddirectives import Latest
from feednodes import latest
from sphinx.addnodes import toctree
from docutils import nodes
#global
feed_entries = None
#constant unlikely to occur in a docname and legal as a filename
MAGIC_SEPARATOR = '---###---'
def parse_date(datestring):
try:
parser = parse_date.parser
except AttributeError:
import dateutil.parser
parser = dateutil.parser.parser()
parse_date.parser = parser
return parser.parse(datestring)
def setup(app):
"""
see: http://sphinx.pocoo.org/ext/appapi.html
this is the primary extension point for Sphinx
"""
from sphinx.application import Sphinx
if not isinstance(app, Sphinx): return
app.add_config_value('feed_title', '', 'html')
app.add_config_value('feed_base_url', '', 'html')
app.add_config_value('feed_description', '', 'html')
app.add_config_value('feed_filename', 'rss.xml', 'html')
app.add_directive('latest', Latest)
app.add_node(latest)
app.connect('build-finished', emit_feed)
app.connect('builder-inited', create_feed_container)
app.connect('env-purge-doc', remove_dead_feed_item)
app.connect('env-purge-doc', purge_dates)
#I would like to parse dates here, but we aren't supplied the document name in the handler, so it's pointless
#app.connect('doctree-read', parse_article_date)
app.connect('html-page-context', create_feed_item)
app.connect('doctree-resolved', process_latest_toc)
def purge_dates(app, env, docname):
if not hasattr(env, 'feed_pub_dates'):
return
try:
del(env.feed_pub_dates[docname])
except KeyError:
pass
def process_latest_toc(app, doctree, fromdocname):
"""We traverse the doctree looking for publication dates to build the
date-based ToC here. Since the order in whicih documents are processed is
ill-defined, from our perspective, we parse all of them each time, but
cache them in the environment"""
env = app.builder.env
cache_article_dates(env)
feed_pub_dates = getattr(env, 'feed_pub_dates', {})
for node in doctree.traverse(latest):
entries = node['entries']
includefiles = node['includefiles']
decorated_entries = [
(feed_pub_dates.get(doc), title, doc)
for title, doc in entries
if doc in feed_pub_dates]
decorated_entries.sort(reverse=True)
latest_list = nodes.bullet_list('',
classes=['feed-latest-articles'])
for date, title, docname in decorated_entries:
para = nodes.paragraph()
list_item = nodes.list_item('', para,
classes=['feed-dated-article'])
if title is None:
title = env.titles.get(docname)
if title:
title = title[0] #.astext()
# Create a reference
newnode = nodes.reference('', '')
innernode = title #nodes.emphasis(title, title)
newnode['refdocname'] = docname
newnode['refuri'] = app.builder.get_relative_uri(
fromdocname, docname)
newnode.append(innernode)
para += newnode
para += nodes.Text(' ', ' ')
stringdate = date.strftime('%Y/%m/%d')
date_wrapper = nodes.container(classes=['feed-article-date'])
date_wrapper += nodes.Text(stringdate, stringdate)
para += date_wrapper
# Insert into the latestlist
latest_list.append(list_item)
node.replace_self(latest_list)
def create_feed_container(app):
"""
create lazy filesystem stash for keeping RSS entry fragments, since we
don't want to store the entire site in the environment (in fact, even if
we did, it wasn't persisting for some reason.)
"""
global feed_entries
rss_fragment_path = os.path.realpath(os.path.join(app.outdir, '..', 'rss_entry_fragments'))
feed_entries = FSDict(work_dir=rss_fragment_path)
app.builder.env.feed_url = app.config.feed_base_url + '/' + \
app.config.feed_filename
def cache_article_dates(env):
# This should only be run once, although currently it is run many times,
# wasting CPU cycles.
if not hasattr(env, 'feed_pub_dates'):
env.feed_pub_dates = {}
feed_pub_dates = env.feed_pub_dates
for docname, doc_metadata in env.metadata.iteritems():
doc_metadata = env.metadata.get(docname, {})
if 'date' not in doc_metadata:
continue #don't index dateless articles
try:
pub_date = parse_date(doc_metadata['date'])
feed_pub_dates[docname] = pub_date
except ValueError, exc:
#probably a nonsensical date
app.builder.warn('date parse error: ' + str(exc) + ' in ' + docname)
def get_date_for_article(env, docname):
feed_pub_dates = env.feed_pub_dates
if docname in feed_pub_dates:
return feed_pub_dates[docname]
def create_feed_item(app, docname, templatename, ctx, doctree):
"""
Here we have access to nice HTML fragments to use in, say, an RSS feed.
We serialize them to disk so that we get them preserved across builds.
We also inject useful metadata into the context here.
"""
global feed_entries
from absolutify_urls import absolutify
env = app.builder.env
metadata = env.metadata.get(docname, {})
pub_date = get_date_for_article(env, docname)
if not pub_date:
return
# RSS item attributes, w/defaults:
# title, link, description, author_email=None,
# author_name=None, author_link=None, pubdate=None, comments=None,
# unique_id=None, enclosure=None, categories=(), item_copyright=None,
# ttl=None,
link = app.config.feed_base_url + '/' + ctx['current_page_name'] + ctx['file_suffix']
item = {
'title': ctx.get('title'),
'link': link,
'unique_id': link,
'description': absolutify(ctx.get('body'), link),
'pubdate': pub_date
}
if 'author' in metadata:
item['author'] = metadata['author']
feed_entries[dated_name(docname, pub_date)] = item
#Now, useful variables to keep in context
ctx['rss_link'] = app.builder.env.feed_url
ctx['pub_date'] = pub_date
def remove_dead_feed_item(app, env, docname):
"""
TODO:
purge unwanted crap
"""
global feed_entries
munged_name = ''.join([MAGIC_SEPARATOR,quote_plus(docname)])
for name in feed_entries:
if name.endswith(munged_name):
del(feed_entries[name])
def emit_feed(app, exc):
global feed_entries
import os.path
title = app.config.feed_title
if not title:
title = app.config.project
feed_dict = {
'title': title,
'link': app.config.feed_base_url,
'feed_url': app.config.feed_base_url,
'description': app.config.feed_description
}
if app.config.language:
feed_dict['language'] = app.config.language
if app.config.copyright:
feed_dict['feed_copyright'] = app.config.copyright
feed = feedgenerator.Rss201rev2Feed(**feed_dict)
app.builder.env.feed_feed = feed
ordered_keys = feed_entries.keys()
ordered_keys.sort(reverse=True)
for key in ordered_keys:
feed.add_item(**feed_entries[key])
outfilename = os.path.join(app.builder.outdir,
app.config.feed_filename)
fp = open(outfilename, 'w')
feed.write(fp, 'utf-8')
fp.close()
def dated_name(docname, date):
"""
we need convenient filenames which incorporate dates for ease of sorting
and guid for uniqueness, plus will work in the FS without inconvenient
characters. NB, at the moment, hour of publication is ignored.
"""
return quote_plus(MAGIC_SEPARATOR.join([date.isoformat(), docname]))
|
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The TensorBoard metrics plugin."""
import collections
import imghdr
import json
from werkzeug import wrappers
from tensorboard import errors
from tensorboard import plugin_util
from tensorboard.backend import http_util
from tensorboard.data import provider
from tensorboard.plugins import base_plugin
from tensorboard.plugins.histogram import metadata as histogram_metadata
from tensorboard.plugins.image import metadata as image_metadata
from tensorboard.plugins.metrics import metadata
from tensorboard.plugins.scalar import metadata as scalar_metadata
_IMGHDR_TO_MIMETYPE = {
"bmp": "image/bmp",
"gif": "image/gif",
"jpeg": "image/jpeg",
"png": "image/png",
"svg": "image/svg+xml",
}
_DEFAULT_IMAGE_MIMETYPE = "application/octet-stream"
_SINGLE_RUN_PLUGINS = frozenset(
[histogram_metadata.PLUGIN_NAME, image_metadata.PLUGIN_NAME]
)
_SAMPLED_PLUGINS = frozenset([image_metadata.PLUGIN_NAME])
def _get_tag_description_info(mapping):
"""Gets maps from tags to descriptions, and descriptions to runs.
Args:
mapping: a nested map `d` such that `d[run][tag]` is a time series
produced by DataProvider's `list_*` methods.
Returns:
A tuple containing
tag_to_descriptions: A map from tag strings to a set of description
strings.
description_to_runs: A map from description strings to a set of run
strings.
"""
tag_to_descriptions = collections.defaultdict(set)
description_to_runs = collections.defaultdict(set)
for (run, tag_to_content) in mapping.items():
for (tag, metadatum) in tag_to_content.items():
description = metadatum.description
if len(description):
tag_to_descriptions[tag].add(description)
description_to_runs[description].add(run)
return tag_to_descriptions, description_to_runs
def _build_combined_description(descriptions, description_to_runs):
"""Creates a single description from a set of descriptions.
Descriptions may be composites when a single tag has different descriptions
across multiple runs.
Args:
descriptions: A list of description strings.
description_to_runs: A map from description strings to a set of run
strings.
Returns:
The combined description string.
"""
prefixed_descriptions = []
for description in descriptions:
runs = sorted(description_to_runs[description])
run_or_runs = "runs" if len(runs) > 1 else "run"
run_header = "## For " + run_or_runs + ": " + ", ".join(runs)
description_html = run_header + "\n" + description
prefixed_descriptions.append(description_html)
header = "# Multiple descriptions\n"
return header + "\n".join(prefixed_descriptions)
def _get_tag_to_description(mapping):
"""Returns a map of tags to descriptions.
Args:
mapping: a nested map `d` such that `d[run][tag]` is a time series
produced by DataProvider's `list_*` methods.
Returns:
A map from tag strings to description HTML strings. E.g.
{
"loss": "<h1>Multiple descriptions</h1><h2>For runs: test, train
</h2><p>...</p>",
"loss2": "<p>The lossy details</p>",
}
"""
tag_to_descriptions, description_to_runs = _get_tag_description_info(
mapping
)
result = {}
for tag in tag_to_descriptions:
descriptions = sorted(tag_to_descriptions[tag])
if len(descriptions) == 1:
description = descriptions[0]
else:
description = _build_combined_description(
descriptions, description_to_runs
)
result[tag] = plugin_util.markdown_to_safe_html(description)
return result
def _get_run_tag_info(mapping):
"""Returns a map of run names to a list of tag names.
Args:
mapping: a nested map `d` such that `d[run][tag]` is a time series
produced by DataProvider's `list_*` methods.
Returns:
A map from run strings to a list of tag strings. E.g.
{"loss001a": ["actor/loss", "critic/loss"], ...}
"""
return {run: sorted(mapping[run]) for run in mapping}
def _format_basic_mapping(mapping):
"""Prepares a scalar or histogram mapping for client consumption.
Args:
mapping: a nested map `d` such that `d[run][tag]` is a time series
produced by DataProvider's `list_*` methods.
Returns:
A dict with the following fields:
runTagInfo: the return type of `_get_run_tag_info`
tagDescriptions: the return type of `_get_tag_to_description`
"""
return {
"runTagInfo": _get_run_tag_info(mapping),
"tagDescriptions": _get_tag_to_description(mapping),
}
def _format_image_blob_sequence_datum(sorted_datum_list, sample):
"""Formats image metadata from a list of BlobSequenceDatum's for clients.
This expects that frontend clients need to access images based on the
run+tag+sample.
Args:
sorted_datum_list: a list of DataProvider's `BlobSequenceDatum`, sorted by
step. This can be produced via DataProvider's `read_blob_sequences`.
sample: zero-indexed integer for the requested sample.
Returns:
A list of `ImageStepDatum` (see http_api.md).
"""
# For images, ignore the first 2 items of a BlobSequenceDatum's values, which
# correspond to width, height.
index = sample + 2
step_data = []
for datum in sorted_datum_list:
if len(datum.values) <= index:
continue
step_data.append(
{
"step": datum.step,
"wallTime": datum.wall_time,
"imageId": datum.values[index].blob_key,
}
)
return step_data
def _get_tag_run_image_info(mapping):
"""Returns a map of tag names to run information.
Args:
mapping: the result of DataProvider's `list_blob_sequences`.
Returns:
A nested map from run strings to tag string to image info, where image
info is an object of form {"maxSamplesPerStep": num}. For example,
{
"reshaped": {
"test": {"maxSamplesPerStep": 1},
"train": {"maxSamplesPerStep": 1}
},
"convolved": {"test": {"maxSamplesPerStep": 50}},
}
"""
tag_run_image_info = collections.defaultdict(dict)
for (run, tag_to_content) in mapping.items():
for (tag, metadatum) in tag_to_content.items():
tag_run_image_info[tag][run] = {
"maxSamplesPerStep": metadatum.max_length - 2 # width, height
}
return dict(tag_run_image_info)
def _format_image_mapping(mapping):
"""Prepares an image mapping for client consumption.
Args:
mapping: the result of DataProvider's `list_blob_sequences`.
Returns:
A dict with the following fields:
tagRunSampledInfo: the return type of `_get_tag_run_image_info`
tagDescriptions: the return type of `_get_tag_description_info`
"""
return {
"tagDescriptions": _get_tag_to_description(mapping),
"tagRunSampledInfo": _get_tag_run_image_info(mapping),
}
class MetricsPlugin(base_plugin.TBPlugin):
"""Metrics Plugin for TensorBoard."""
plugin_name = metadata.PLUGIN_NAME
def __init__(self, context):
"""Instantiates MetricsPlugin.
Args:
context: A base_plugin.TBContext instance. MetricsLoader checks that
it contains a valid `data_provider`.
"""
self._data_provider = context.data_provider
# For histograms, use a round number + 1 since sampling includes both start
# and end steps, so N+1 samples corresponds to dividing the step sequence
# into N intervals.
sampling_hints = context.sampling_hints or {}
self._plugin_downsampling = {
"scalars": sampling_hints.get(scalar_metadata.PLUGIN_NAME, 1000),
"histograms": sampling_hints.get(
histogram_metadata.PLUGIN_NAME, 51
),
"images": sampling_hints.get(image_metadata.PLUGIN_NAME, 10),
}
self._scalar_version_checker = plugin_util._MetadataVersionChecker(
data_kind="scalar time series",
latest_known_version=0,
)
self._histogram_version_checker = plugin_util._MetadataVersionChecker(
data_kind="histogram time series",
latest_known_version=0,
)
self._image_version_checker = plugin_util._MetadataVersionChecker(
data_kind="image time series",
latest_known_version=0,
)
def frontend_metadata(self):
return base_plugin.FrontendMetadata(
is_ng_component=True, tab_name="Time Series"
)
def get_plugin_apps(self):
return {
"/tags": self._serve_tags,
"/timeSeries": self._serve_time_series,
"/imageData": self._serve_image_data,
}
def data_plugin_names(self):
return (scalar_metadata.PLUGIN_NAME, histogram_metadata.PLUGIN_NAME)
def is_active(self):
return False # 'data_plugin_names' suffices.
@wrappers.Request.application
def _serve_tags(self, request):
ctx = plugin_util.context(request.environ)
experiment = plugin_util.experiment_id(request.environ)
index = self._tags_impl(ctx, experiment=experiment)
return http_util.Respond(request, index, "application/json")
def _tags_impl(self, ctx, experiment=None):
"""Returns tag metadata for a given experiment's logged metrics.
Args:
ctx: A `tensorboard.context.RequestContext` value.
experiment: optional string ID of the request's experiment.
Returns:
A nested dict 'd' with keys in ("scalars", "histograms", "images")
and values being the return type of _format_*mapping.
"""
scalar_mapping = self._data_provider.list_scalars(
ctx,
experiment_id=experiment,
plugin_name=scalar_metadata.PLUGIN_NAME,
)
scalar_mapping = self._filter_by_version(
scalar_mapping,
scalar_metadata.parse_plugin_metadata,
self._scalar_version_checker,
)
histogram_mapping = self._data_provider.list_tensors(
ctx,
experiment_id=experiment,
plugin_name=histogram_metadata.PLUGIN_NAME,
)
if histogram_mapping is None:
histogram_mapping = {}
histogram_mapping = self._filter_by_version(
histogram_mapping,
histogram_metadata.parse_plugin_metadata,
self._histogram_version_checker,
)
image_mapping = self._data_provider.list_blob_sequences(
ctx,
experiment_id=experiment,
plugin_name=image_metadata.PLUGIN_NAME,
)
if image_mapping is None:
image_mapping = {}
image_mapping = self._filter_by_version(
image_mapping,
image_metadata.parse_plugin_metadata,
self._image_version_checker,
)
result = {}
result["scalars"] = _format_basic_mapping(scalar_mapping)
result["histograms"] = _format_basic_mapping(histogram_mapping)
result["images"] = _format_image_mapping(image_mapping)
return result
def _filter_by_version(self, mapping, parse_metadata, version_checker):
"""Filter `DataProvider.list_*` output by summary metadata version."""
result = {run: {} for run in mapping}
for (run, tag_to_content) in mapping.items():
for (tag, metadatum) in tag_to_content.items():
md = parse_metadata(metadatum.plugin_content)
if not version_checker.ok(md.version, run, tag):
continue
result[run][tag] = metadatum
return result
@wrappers.Request.application
def _serve_time_series(self, request):
ctx = plugin_util.context(request.environ)
experiment = plugin_util.experiment_id(request.environ)
if request.method == "POST":
series_requests_string = request.form.get("requests")
else:
series_requests_string = request.args.get("requests")
if not series_requests_string:
raise errors.InvalidArgumentError("Missing 'requests' field")
try:
series_requests = json.loads(series_requests_string)
except ValueError:
raise errors.InvalidArgumentError(
"Unable to parse 'requests' as JSON"
)
response = self._time_series_impl(ctx, experiment, series_requests)
return http_util.Respond(request, response, "application/json")
def _time_series_impl(self, ctx, experiment, series_requests):
"""Constructs a list of responses from a list of series requests.
Args:
ctx: A `tensorboard.context.RequestContext` value.
experiment: string ID of the request's experiment.
series_requests: a list of `TimeSeriesRequest` dicts (see http_api.md).
Returns:
A list of `TimeSeriesResponse` dicts (see http_api.md).
"""
responses = [
self._get_time_series(ctx, experiment, request)
for request in series_requests
]
return responses
def _create_base_response(self, series_request):
tag = series_request.get("tag")
run = series_request.get("run")
plugin = series_request.get("plugin")
sample = series_request.get("sample")
response = {"plugin": plugin, "tag": tag}
if isinstance(run, str):
response["run"] = run
if isinstance(sample, int):
response["sample"] = sample
return response
def _get_invalid_request_error(self, series_request):
tag = series_request.get("tag")
plugin = series_request.get("plugin")
run = series_request.get("run")
sample = series_request.get("sample")
if not isinstance(tag, str):
return "Missing tag"
if (
plugin != scalar_metadata.PLUGIN_NAME
and plugin != histogram_metadata.PLUGIN_NAME
and plugin != image_metadata.PLUGIN_NAME
):
return "Invalid plugin"
if plugin in _SINGLE_RUN_PLUGINS and not isinstance(run, str):
return "Missing run"
if plugin in _SAMPLED_PLUGINS and not isinstance(sample, int):
return "Missing sample"
return None
def _get_time_series(self, ctx, experiment, series_request):
"""Returns time series data for a given tag, plugin.
Args:
ctx: A `tensorboard.context.RequestContext` value.
experiment: string ID of the request's experiment.
series_request: a `TimeSeriesRequest` (see http_api.md).
Returns:
A `TimeSeriesResponse` dict (see http_api.md).
"""
tag = series_request.get("tag")
run = series_request.get("run")
plugin = series_request.get("plugin")
sample = series_request.get("sample")
response = self._create_base_response(series_request)
request_error = self._get_invalid_request_error(series_request)
if request_error:
response["error"] = request_error
return response
runs = [run] if run else None
run_to_series = None
if plugin == scalar_metadata.PLUGIN_NAME:
run_to_series = self._get_run_to_scalar_series(
ctx, experiment, tag, runs
)
if plugin == histogram_metadata.PLUGIN_NAME:
run_to_series = self._get_run_to_histogram_series(
ctx, experiment, tag, runs
)
if plugin == image_metadata.PLUGIN_NAME:
run_to_series = self._get_run_to_image_series(
ctx, experiment, tag, sample, runs
)
response["runToSeries"] = run_to_series
return response
def _get_run_to_scalar_series(self, ctx, experiment, tag, runs):
"""Builds a run-to-scalar-series dict for client consumption.
Args:
ctx: A `tensorboard.context.RequestContext` value.
experiment: a string experiment id.
tag: string of the requested tag.
runs: optional list of run names as strings.
Returns:
A map from string run names to `ScalarStepDatum` (see http_api.md).
"""
mapping = self._data_provider.read_scalars(
ctx,
experiment_id=experiment,
plugin_name=scalar_metadata.PLUGIN_NAME,
downsample=self._plugin_downsampling["scalars"],
run_tag_filter=provider.RunTagFilter(runs=runs, tags=[tag]),
)
run_to_series = {}
for (result_run, tag_data) in mapping.items():
if tag not in tag_data:
continue
values = [
{
"wallTime": datum.wall_time,
"step": datum.step,
"value": datum.value,
}
for datum in tag_data[tag]
]
run_to_series[result_run] = values
return run_to_series
def _format_histogram_datum_bins(self, datum):
"""Formats a histogram datum's bins for client consumption.
Args:
datum: a DataProvider's TensorDatum.
Returns:
A list of `HistogramBin`s (see http_api.md).
"""
numpy_list = datum.numpy.tolist()
bins = [{"min": x[0], "max": x[1], "count": x[2]} for x in numpy_list]
return bins
def _get_run_to_histogram_series(self, ctx, experiment, tag, runs):
"""Builds a run-to-histogram-series dict for client consumption.
Args:
ctx: A `tensorboard.context.RequestContext` value.
experiment: a string experiment id.
tag: string of the requested tag.
runs: optional list of run names as strings.
Returns:
A map from string run names to `HistogramStepDatum` (see http_api.md).
"""
mapping = self._data_provider.read_tensors(
ctx,
experiment_id=experiment,
plugin_name=histogram_metadata.PLUGIN_NAME,
downsample=self._plugin_downsampling["histograms"],
run_tag_filter=provider.RunTagFilter(runs=runs, tags=[tag]),
)
run_to_series = {}
for (result_run, tag_data) in mapping.items():
if tag not in tag_data:
continue
values = [
{
"wallTime": datum.wall_time,
"step": datum.step,
"bins": self._format_histogram_datum_bins(datum),
}
for datum in tag_data[tag]
]
run_to_series[result_run] = values
return run_to_series
def _get_run_to_image_series(self, ctx, experiment, tag, sample, runs):
"""Builds a run-to-image-series dict for client consumption.
Args:
ctx: A `tensorboard.context.RequestContext` value.
experiment: a string experiment id.
tag: string of the requested tag.
sample: zero-indexed integer for the requested sample.
runs: optional list of run names as strings.
Returns:
A `RunToSeries` dict (see http_api.md).
"""
mapping = self._data_provider.read_blob_sequences(
ctx,
experiment_id=experiment,
plugin_name=image_metadata.PLUGIN_NAME,
downsample=self._plugin_downsampling["images"],
run_tag_filter=provider.RunTagFilter(runs, tags=[tag]),
)
run_to_series = {}
for (result_run, tag_data) in mapping.items():
if tag not in tag_data:
continue
blob_sequence_datum_list = tag_data[tag]
series = _format_image_blob_sequence_datum(
blob_sequence_datum_list, sample
)
if series:
run_to_series[result_run] = series
return run_to_series
@wrappers.Request.application
def _serve_image_data(self, request):
"""Serves an individual image."""
ctx = plugin_util.context(request.environ)
blob_key = request.args["imageId"]
if not blob_key:
raise errors.InvalidArgumentError("Missing 'imageId' field")
(data, content_type) = self._image_data_impl(ctx, blob_key)
return http_util.Respond(request, data, content_type)
def _image_data_impl(self, ctx, blob_key):
"""Gets the image data for a blob key.
Args:
ctx: A `tensorboard.context.RequestContext` value.
blob_key: a string identifier for a DataProvider blob.
Returns:
A tuple containing:
data: a raw bytestring of the requested image's contents.
content_type: a string HTTP content type.
"""
data = self._data_provider.read_blob(ctx, blob_key=blob_key)
image_type = imghdr.what(None, data)
content_type = _IMGHDR_TO_MIMETYPE.get(
image_type, _DEFAULT_IMAGE_MIMETYPE
)
return (data, content_type)
|
|
import cgi
import random
import shlex
import string
import logging
import pymongo
from pylons import c, g, request
from . import helpers as h
from . import security
log = logging.getLogger(__name__)
_macros = {}
class macro(object):
def __init__(self, context=None):
self._context = context
def __call__(self, func):
_macros[func.__name__] = (func, self._context)
return func
class parse(object):
def __init__(self, context):
self._context = context
def __call__(self, s):
try:
if s.startswith('quote '):
return '[[' + s[len('quote '):] + ']]'
try:
parts = [ unicode(x, 'utf-8') for x in shlex.split(s.encode('utf-8')) ]
if not parts: return '[[' + s + ']]'
macro = self._lookup_macro(parts[0])
if not macro: return '[[' + s + ']]'
for t in parts[1:]:
if '=' not in t:
return '[-%s: missing =-]' % ' '.join(parts)
args = dict(t.split('=', 1) for t in parts[1:])
response = macro(**h.encode_keys(args))
return response
except (ValueError, TypeError), ex:
msg = cgi.escape(u'[[%s]] (%s)' % (s, repr(ex)))
return '\n<div class="error"><pre><code>%s</code></pre></div>' % msg
except Exception, ex:
raise
return '[[Error parsing %s: %s]]' % (s, ex)
def _lookup_macro(self, s):
macro, context = _macros.get(s, None)
if context is None or context == self._context:
return macro
else:
return None
template_neighborhood_feeds = string.Template('''
<div class="neighborhood_feed_entry">
<h3><a href="$href">$title</a></h3>
<p>
by <em>$author</em>
<small>$ago</small>
</p>
<p>$description</p>
</div>
''')
@macro('neighborhood-wiki')
def neighborhood_feeds(tool_name, max_number=5, sort='pubdate'):
from allura import model as M
feed = M.Feed.query.find(
dict(
tool_name=tool_name,
neighborhood_id=c.project.neighborhood._id))
feed = feed.sort(sort, pymongo.DESCENDING).limit(int(max_number)).all()
output = '\n'.join(
template_neighborhood_feeds.substitute(dict(
href=item.link,
title=item.title,
author=item.author_name,
ago=h.ago(item.pubdate),
description=item.description))
for item in feed)
return output
template_neighborhood_blog_posts = string.Template('''
<div class="neighborhood_feed_entry">
<h3><a href="$href">$title</a></h3>
<p>
by <em>$author</em>
<small>$ago</small>
</p>
$description
</div>
''')
@macro('neighborhood-wiki')
def neighborhood_blog_posts(max_number=5, sort='timestamp', summary=False):
from forgeblog import model as BM
posts = BM.BlogPost.query.find(dict(
neighborhood_id=c.project.neighborhood._id,
state='published'))
posts = posts.sort(sort, pymongo.DESCENDING).limit(int(max_number)).all()
output = '\n'.join(
template_neighborhood_blog_posts.substitute(dict(
href=post.url(),
title=post.title,
author=post.author().display_name,
ago=h.ago(post.timestamp),
description=summary and ' ' or g.markdown.convert(post.text)))
for post in posts if security.has_access(post, 'read', project=post.app.project)() and
security.has_access(post.app.project, 'read', project=post.app.project)())
return output
@macro()
def project_blog_posts(max_number=5, sort='timestamp', summary=False, mount_point=None):
from forgeblog import model as BM
app_config_ids = []
for conf in c.project.app_configs:
if conf.tool_name.lower() == 'blog' and (mount_point is None or conf.options.mount_point==mount_point):
app_config_ids.append(conf._id)
posts = BM.BlogPost.query.find({'state':'published','app_config_id':{'$in':app_config_ids}})
posts = posts.sort(sort, pymongo.DESCENDING).limit(int(max_number)).all()
output = '\n'.join(
template_neighborhood_blog_posts.substitute(dict(
href=post.url(),
title=post.title,
author=post.author().display_name,
ago=h.ago(post.timestamp),
description=summary and ' ' or g.markdown.convert(post.text)))
for post in posts if security.has_access(post, 'read', project=post.app.project)() and
security.has_access(post.app.project, 'read', project=post.app.project)())
return output
def get_projects_for_macro(category=None, display_mode='grid', sort='last_updated',
show_total=False, limit=100, labels='', award='', private=False,
columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
grid_view_tools='',
initial_q={}):
from allura.lib.widgets.project_list import ProjectList
from allura.lib import utils
from allura import model as M
# 'trove' is internal substitution for 'category' filter in wiki macro
trove = category
limit = int(limit)
q = dict(
deleted=False,
is_nbhd_project=False)
q.update(initial_q)
if labels:
or_labels = labels.split('|')
q['$or'] = [{'labels': {'$all': l.split(',')}} for l in or_labels]
if trove is not None:
trove = M.TroveCategory.query.get(fullpath=trove)
if award:
aw = M.Award.query.find(dict(
created_by_neighborhood_id=c.project.neighborhood_id,
short=award)).first()
if aw:
ids = [grant.granted_to_project_id for grant in
M.AwardGrant.query.find(dict(
granted_by_neighborhood_id=c.project.neighborhood_id,
award_id=aw._id))]
if '_id' in q:
ids = list(set(q['_id']['$in']).intersection(ids))
q['_id'] = {'$in': ids}
if trove is not None:
q['trove_' + trove.type] = trove._id
sort_key, sort_dir = 'last_updated', pymongo.DESCENDING
if sort == 'alpha':
sort_key, sort_dir = 'name', pymongo.ASCENDING
elif sort == 'random':
sort_key, sort_dir = None, None
elif sort == 'last_registered':
sort_key, sort_dir = '_id', pymongo.DESCENDING
elif sort == '_id':
sort_key, sort_dir = '_id', pymongo.DESCENDING
projects = []
if private:
# Only return private projects.
# Can't filter these with a mongo query directly - have to iterate
# through and check the ACL of each project.
for chunk in utils.chunked_find(M.Project, q, sort_key=sort_key,
sort_dir=sort_dir):
projects.extend([p for p in chunk if p.private])
total = len(projects)
if sort == 'random':
projects = random.sample(projects, min(limit, total))
else:
projects = projects[:limit]
else:
total = None
if sort == 'random':
# MongoDB doesn't have a random sort built in, so...
# 1. Do a direct pymongo query (faster than ORM) to fetch just the
# _ids of objects that match our criteria
# 2. Choose a random sample of those _ids
# 3. Do an ORM query to fetch the objects with those _ids
# 4. Shuffle the results
from ming.orm import mapper
m = mapper(M.Project)
collection = M.main_doc_session.db[m.collection.m.collection_name]
docs = list(collection.find(q, {'_id': 1}))
if docs:
ids = [doc['_id'] for doc in
random.sample(docs, min(limit, len(docs)))]
if '_id' in q:
ids = list(set(q['_id']['$in']).intersection(ids))
q['_id'] = {'$in': ids}
projects = M.Project.query.find(q).all()
random.shuffle(projects)
else:
projects = M.Project.query.find(q).limit(limit).sort(sort_key,
sort_dir).all()
pl = ProjectList()
g.resource_manager.register(pl)
response = pl.display(projects=projects, display_mode=display_mode,
columns=columns, show_proj_icon=show_proj_icon,
show_download_button=show_download_button,
show_awards_banner=show_awards_banner,
grid_view_tools=grid_view_tools)
if show_total:
if total is None:
total = 0
for p in M.Project.query.find(q):
if h.has_access(p, 'read')():
total = total + 1
response = '<p class="macro_projects_total">%s Projects</p>%s' % \
(total, response)
return response
@macro('neighborhood-wiki')
def projects(category=None, display_mode='grid', sort='last_updated',
show_total=False, limit=100, labels='', award='', private=False,
columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
grid_view_tools=''):
initial_q = dict(neighborhood_id=c.project.neighborhood_id)
return get_projects_for_macro(category=category, display_mode=display_mode, sort=sort,
show_total=show_total, limit=limit, labels=labels, award=award, private=private,
columns=columns, show_proj_icon=show_proj_icon, show_download_button=show_download_button,
show_awards_banner=show_awards_banner, grid_view_tools=grid_view_tools,
initial_q=initial_q)
@macro('userproject-wiki')
def my_projects(category=None, display_mode='grid', sort='last_updated',
show_total=False, limit=100, labels='', award='', private=False,
columns=1, show_proj_icon=True, show_download_button=True, show_awards_banner=True,
grid_view_tools=''):
myproj_user = c.project.user_project_of
if myproj_user is None:
myproj_user = c.user.anonymous()
ids = []
for p in myproj_user.my_projects():
ids.append(p._id)
initial_q = dict(_id={'$in': ids})
return get_projects_for_macro(category=category, display_mode=display_mode, sort=sort,
show_total=show_total, limit=limit, labels=labels, award=award, private=private,
columns=columns, show_proj_icon=show_proj_icon, show_download_button=show_download_button,
show_awards_banner=show_awards_banner, grid_view_tools=grid_view_tools,
initial_q=initial_q)
@macro()
def project_screenshots():
from allura.lib.widgets.project_list import ProjectScreenshots
ps = ProjectScreenshots()
g.resource_manager.register(ps)
response = ps.display(project=c.project)
return response
@macro()
def download_button():
from allura import model as M
from allura.lib.widgets.macros import DownloadButton
button = DownloadButton(project=c.project)
g.resource_manager.register(button)
response = button.display(project=c.project)
return response
@macro()
def include(ref=None, **kw):
from allura import model as M
from allura.lib.widgets.macros import Include
if ref is None:
return '[-include-]'
link = M.Shortlink.lookup(ref)
if not link:
return '[[include %s (not found)]]' % ref
artifact = link.ref.artifact
if artifact is None:
return '[[include (artifact not found)]]' % ref
included = request.environ.setdefault('allura.macro.included', set())
if artifact in included:
return '[[include %s (already included)]' % ref
else:
included.add(artifact)
sb = Include()
g.resource_manager.register(sb)
response = sb.display(artifact=artifact, attrs=kw)
return response
@macro()
def img(src=None, **kw):
attrs = ('%s="%s"' % t for t in kw.iteritems())
included = request.environ.setdefault('allura.macro.att_embedded', set())
included.add(src)
if '://' in src:
return '<img src="%s" %s/>' % (src, ' '.join(attrs))
else:
return '<img src="./attachment/%s" %s/>' % (src, ' '.join(attrs))
template_project_admins = string.Template('<a href="$url">$name</a><br/>')
@macro()
def project_admins():
from allura import model as M
output = ''
admin_role = M.ProjectRole.query.get(project_id=c.project._id,name='Admin')
if admin_role:
output = '\n'.join(
template_project_admins.substitute(dict(
url=user_role.user.url(),
name=user_role.user.display_name))
for user_role in admin_role.users_with_role())
return output
|
|
import re
import os
import sys
import types
from copy import copy
from distutils.ccompiler import *
from distutils import ccompiler
from distutils.errors import DistutilsExecError, DistutilsModuleError, \
DistutilsPlatformError
from distutils.sysconfig import customize_compiler
from distutils.version import LooseVersion
from numpy.distutils import log
from numpy.distutils.exec_command import exec_command
from numpy.distutils.misc_util import cyg2win32, is_sequence, mingw32, \
quote_args, msvc_on_amd64
from numpy.distutils.compat import get_exception
def replace_method(klass, method_name, func):
if sys.version_info[0] < 3:
m = types.MethodType(func, None, klass)
else:
# Py3k does not have unbound method anymore, MethodType does not work
m = lambda self, *args, **kw: func(self, *args, **kw)
setattr(klass, method_name, m)
# Using customized CCompiler.spawn.
def CCompiler_spawn(self, cmd, display=None):
"""
Execute a command in a sub-process.
Parameters
----------
cmd : str
The command to execute.
display : str or sequence of str, optional
The text to add to the log file kept by `numpy.distutils`.
If not given, `display` is equal to `cmd`.
Returns
-------
None
Raises
------
DistutilsExecError
If the command failed, i.e. the exit status was not 0.
"""
if display is None:
display = cmd
if is_sequence(display):
display = ' '.join(list(display))
log.info(display)
s,o = exec_command(cmd)
if s:
if is_sequence(cmd):
cmd = ' '.join(list(cmd))
print(o)
if re.search('Too many open files', o):
msg = '\nTry rerunning setup command until build succeeds.'
else:
msg = ''
raise DistutilsExecError('Command "%s" failed with exit status %d%s' % (cmd, s, msg))
replace_method(CCompiler, 'spawn', CCompiler_spawn)
def CCompiler_object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
"""
Return the name of the object files for the given source files.
Parameters
----------
source_filenames : list of str
The list of paths to source files. Paths can be either relative or
absolute, this is handled transparently.
strip_dir : bool, optional
Whether to strip the directory from the returned paths. If True,
the file name prepended by `output_dir` is returned. Default is False.
output_dir : str, optional
If given, this path is prepended to the returned paths to the
object files.
Returns
-------
obj_names : list of str
The list of paths to the object files corresponding to the source
files in `source_filenames`.
"""
if output_dir is None:
output_dir = ''
obj_names = []
for src_name in source_filenames:
base, ext = os.path.splitext(os.path.normpath(src_name))
base = os.path.splitdrive(base)[1] # Chop off the drive
base = base[os.path.isabs(base):] # If abs, chop off leading /
if base.startswith('..'):
# Resolve starting relative path components, middle ones
# (if any) have been handled by os.path.normpath above.
i = base.rfind('..')+2
d = base[:i]
d = os.path.basename(os.path.abspath(d))
base = d + base[i:]
if ext not in self.src_extensions:
raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name))
if strip_dir:
base = os.path.basename(base)
obj_name = os.path.join(output_dir,base + self.obj_extension)
obj_names.append(obj_name)
return obj_names
replace_method(CCompiler, 'object_filenames', CCompiler_object_filenames)
def CCompiler_compile(self, sources, output_dir=None, macros=None,
include_dirs=None, debug=0, extra_preargs=None,
extra_postargs=None, depends=None):
"""
Compile one or more source files.
Please refer to the Python distutils API reference for more details.
Parameters
----------
sources : list of str
A list of filenames
output_dir : str, optional
Path to the output directory.
macros : list of tuples
A list of macro definitions.
include_dirs : list of str, optional
The directories to add to the default include file search path for
this compilation only.
debug : bool, optional
Whether or not to output debug symbols in or alongside the object
file(s).
extra_preargs, extra_postargs : ?
Extra pre- and post-arguments.
depends : list of str, optional
A list of file names that all targets depend on.
Returns
-------
objects : list of str
A list of object file names, one per source file `sources`.
Raises
------
CompileError
If compilation fails.
"""
# This method is effective only with Python >=2.3 distutils.
# Any changes here should be applied also to fcompiler.compile
# method to support pre Python 2.3 distutils.
if not sources:
return []
# FIXME:RELATIVE_IMPORT
if sys.version_info[0] < 3:
from fcompiler import FCompiler
else:
from numpy.distutils.fcompiler import FCompiler
if isinstance(self, FCompiler):
display = []
for fc in ['f77','f90','fix']:
fcomp = getattr(self,'compiler_'+fc)
if fcomp is None:
continue
display.append("Fortran %s compiler: %s" % (fc, ' '.join(fcomp)))
display = '\n'.join(display)
else:
ccomp = self.compiler_so
display = "C compiler: %s\n" % (' '.join(ccomp),)
log.info(display)
macros, objects, extra_postargs, pp_opts, build = \
self._setup_compile(output_dir, macros, include_dirs, sources,
depends, extra_postargs)
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
display = "compile options: '%s'" % (' '.join(cc_args))
if extra_postargs:
display += "\nextra options: '%s'" % (' '.join(extra_postargs))
log.info(display)
# build any sources in same order as they were originally specified
# especially important for fortran .f90 files using modules
if isinstance(self, FCompiler):
objects_to_build = build.keys()
for obj in objects:
if obj in objects_to_build:
src, ext = build[obj]
if self.compiler_type=='absoft':
obj = cyg2win32(obj)
src = cyg2win32(src)
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
else:
for obj, (src, ext) in build.items():
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
# Return *all* object filenames, not just the ones we just built.
return objects
replace_method(CCompiler, 'compile', CCompiler_compile)
def CCompiler_customize_cmd(self, cmd, ignore=()):
"""
Customize compiler using distutils command.
Parameters
----------
cmd : class instance
An instance inheriting from `distutils.cmd.Command`.
ignore : sequence of str, optional
List of `CCompiler` commands (without ``'set_'``) that should not be
altered. Strings that are checked for are:
``('include_dirs', 'define', 'undef', 'libraries', 'library_dirs',
'rpath', 'link_objects')``.
Returns
-------
None
"""
log.info('customize %s using %s' % (self.__class__.__name__,
cmd.__class__.__name__))
def allow(attr):
return getattr(cmd, attr, None) is not None and attr not in ignore
if allow('include_dirs'):
self.set_include_dirs(cmd.include_dirs)
if allow('define'):
for (name,value) in cmd.define:
self.define_macro(name, value)
if allow('undef'):
for macro in cmd.undef:
self.undefine_macro(macro)
if allow('libraries'):
self.set_libraries(self.libraries + cmd.libraries)
if allow('library_dirs'):
self.set_library_dirs(self.library_dirs + cmd.library_dirs)
if allow('rpath'):
self.set_runtime_library_dirs(cmd.rpath)
if allow('link_objects'):
self.set_link_objects(cmd.link_objects)
replace_method(CCompiler, 'customize_cmd', CCompiler_customize_cmd)
def _compiler_to_string(compiler):
props = []
mx = 0
keys = compiler.executables.keys()
for key in ['version','libraries','library_dirs',
'object_switch','compile_switch',
'include_dirs','define','undef','rpath','link_objects']:
if key not in keys:
keys.append(key)
for key in keys:
if hasattr(compiler,key):
v = getattr(compiler, key)
mx = max(mx,len(key))
props.append((key,repr(v)))
lines = []
format = '%-' + repr(mx+1) + 's = %s'
for prop in props:
lines.append(format % prop)
return '\n'.join(lines)
def CCompiler_show_customization(self):
"""
Print the compiler customizations to stdout.
Parameters
----------
None
Returns
-------
None
Notes
-----
Printing is only done if the distutils log threshold is < 2.
"""
if 0:
for attrname in ['include_dirs','define','undef',
'libraries','library_dirs',
'rpath','link_objects']:
attr = getattr(self,attrname,None)
if not attr:
continue
log.info("compiler '%s' is set to %s" % (attrname,attr))
try:
self.get_version()
except:
pass
if log._global_log.threshold<2:
print('*'*80)
print(self.__class__)
print(_compiler_to_string(self))
print('*'*80)
replace_method(CCompiler, 'show_customization', CCompiler_show_customization)
def CCompiler_customize(self, dist, need_cxx=0):
"""
Do any platform-specific customization of a compiler instance.
This method calls `distutils.sysconfig.customize_compiler` for
platform-specific customization, as well as optionally remove a flag
to suppress spurious warnings in case C++ code is being compiled.
Parameters
----------
dist : object
This parameter is not used for anything.
need_cxx : bool, optional
Whether or not C++ has to be compiled. If so (True), the
``"-Wstrict-prototypes"`` option is removed to prevent spurious
warnings. Default is False.
Returns
-------
None
Notes
-----
All the default options used by distutils can be extracted with::
from distutils import sysconfig
sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS',
'CCSHARED', 'LDSHARED', 'SO')
"""
# See FCompiler.customize for suggested usage.
log.info('customize %s' % (self.__class__.__name__))
customize_compiler(self)
if need_cxx:
# In general, distutils uses -Wstrict-prototypes, but this option is
# not valid for C++ code, only for C. Remove it if it's there to
# avoid a spurious warning on every compilation.
try:
self.compiler_so.remove('-Wstrict-prototypes')
except (AttributeError, ValueError):
pass
if hasattr(self,'compiler') and 'cc' in self.compiler[0]:
if not self.compiler_cxx:
if self.compiler[0].startswith('gcc'):
a, b = 'gcc', 'g++'
else:
a, b = 'cc', 'c++'
self.compiler_cxx = [self.compiler[0].replace(a,b)]\
+ self.compiler[1:]
else:
if hasattr(self,'compiler'):
log.warn("#### %s #######" % (self.compiler,))
log.warn('Missing compiler_cxx fix for '+self.__class__.__name__)
return
replace_method(CCompiler, 'customize', CCompiler_customize)
def simple_version_match(pat=r'[-.\d]+', ignore='', start=''):
"""
Simple matching of version numbers, for use in CCompiler and FCompiler.
Parameters
----------
pat : str, optional
A regular expression matching version numbers.
Default is ``r'[-.\\d]+'``.
ignore : str, optional
A regular expression matching patterns to skip.
Default is ``''``, in which case nothing is skipped.
start : str, optional
A regular expression matching the start of where to start looking
for version numbers.
Default is ``''``, in which case searching is started at the
beginning of the version string given to `matcher`.
Returns
-------
matcher : callable
A function that is appropriate to use as the ``.version_match``
attribute of a `CCompiler` class. `matcher` takes a single parameter,
a version string.
"""
def matcher(self, version_string):
# version string may appear in the second line, so getting rid
# of new lines:
version_string = version_string.replace('\n',' ')
pos = 0
if start:
m = re.match(start, version_string)
if not m:
return None
pos = m.end()
while 1:
m = re.search(pat, version_string[pos:])
if not m:
return None
if ignore and re.match(ignore, m.group(0)):
pos = m.end()
continue
break
return m.group(0)
return matcher
def CCompiler_get_version(self, force=False, ok_status=[0]):
"""
Return compiler version, or None if compiler is not available.
Parameters
----------
force : bool, optional
If True, force a new determination of the version, even if the
compiler already has a version attribute. Default is False.
ok_status : list of int, optional
The list of status values returned by the version look-up process
for which a version string is returned. If the status value is not
in `ok_status`, None is returned. Default is ``[0]``.
Returns
-------
version : str or None
Version string, in the format of `distutils.version.LooseVersion`.
"""
if not force and hasattr(self,'version'):
return self.version
self.find_executables()
try:
version_cmd = self.version_cmd
except AttributeError:
return None
if not version_cmd or not version_cmd[0]:
return None
try:
matcher = self.version_match
except AttributeError:
try:
pat = self.version_pattern
except AttributeError:
return None
def matcher(version_string):
m = re.match(pat, version_string)
if not m:
return None
version = m.group('version')
return version
status, output = exec_command(version_cmd,use_tee=0)
version = None
if status in ok_status:
version = matcher(output)
if version:
version = LooseVersion(version)
self.version = version
return version
replace_method(CCompiler, 'get_version', CCompiler_get_version)
def CCompiler_cxx_compiler(self):
"""
Return the C++ compiler.
Parameters
----------
None
Returns
-------
cxx : class instance
The C++ compiler, as a `CCompiler` instance.
"""
if self.compiler_type=='msvc': return self
cxx = copy(self)
cxx.compiler_so = [cxx.compiler_cxx[0]] + cxx.compiler_so[1:]
if sys.platform.startswith('aix') and 'ld_so_aix' in cxx.linker_so[0]:
# AIX needs the ld_so_aix script included with Python
cxx.linker_so = [cxx.linker_so[0], cxx.compiler_cxx[0]] \
+ cxx.linker_so[2:]
else:
cxx.linker_so = [cxx.compiler_cxx[0]] + cxx.linker_so[1:]
return cxx
replace_method(CCompiler, 'cxx_compiler', CCompiler_cxx_compiler)
compiler_class['intel'] = ('intelccompiler','IntelCCompiler',
"Intel C Compiler for 32-bit applications")
compiler_class['intele'] = ('intelccompiler','IntelItaniumCCompiler',
"Intel C Itanium Compiler for Itanium-based applications")
compiler_class['intelem'] = ('intelccompiler','IntelEM64TCCompiler',
"Intel C Compiler for 64-bit applications")
compiler_class['pathcc'] = ('pathccompiler','PathScaleCCompiler',
"PathScale Compiler for SiCortex-based applications")
ccompiler._default_compilers += (('linux.*','intel'),
('linux.*','intele'),
('linux.*','intelem'),
('linux.*','pathcc'))
if sys.platform == 'win32':
compiler_class['mingw32'] = ('mingw32ccompiler', 'Mingw32CCompiler',
"Mingw32 port of GNU C Compiler for Win32"\
"(for MSC built Python)")
if mingw32():
# On windows platforms, we want to default to mingw32 (gcc)
# because msvc can't build blitz stuff.
log.info('Setting mingw32 as default compiler for nt.')
ccompiler._default_compilers = (('nt', 'mingw32'),) \
+ ccompiler._default_compilers
_distutils_new_compiler = new_compiler
def new_compiler (plat=None,
compiler=None,
verbose=0,
dry_run=0,
force=0):
# Try first C compilers from numpy.distutils.
if plat is None:
plat = os.name
try:
if compiler is None:
compiler = get_default_compiler(plat)
(module_name, class_name, long_description) = compiler_class[compiler]
except KeyError:
msg = "don't know how to compile C/C++ code on platform '%s'" % plat
if compiler is not None:
msg = msg + " with '%s' compiler" % compiler
raise DistutilsPlatformError(msg)
module_name = "numpy.distutils." + module_name
try:
__import__ (module_name)
except ImportError:
msg = str(get_exception())
log.info('%s in numpy.distutils; trying from distutils',
str(msg))
module_name = module_name[6:]
try:
__import__(module_name)
except ImportError:
msg = str(get_exception())
raise DistutilsModuleError("can't compile C/C++ code: unable to load module '%s'" % \
module_name)
try:
module = sys.modules[module_name]
klass = vars(module)[class_name]
except KeyError:
raise DistutilsModuleError(("can't compile C/C++ code: unable to find class '%s' " +
"in module '%s'") % (class_name, module_name))
compiler = klass(None, dry_run, force)
log.debug('new_compiler returns %s' % (klass))
return compiler
ccompiler.new_compiler = new_compiler
_distutils_gen_lib_options = gen_lib_options
def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
library_dirs = quote_args(library_dirs)
runtime_library_dirs = quote_args(runtime_library_dirs)
r = _distutils_gen_lib_options(compiler, library_dirs,
runtime_library_dirs, libraries)
lib_opts = []
for i in r:
if is_sequence(i):
lib_opts.extend(list(i))
else:
lib_opts.append(i)
return lib_opts
ccompiler.gen_lib_options = gen_lib_options
# Also fix up the various compiler modules, which do
# from distutils.ccompiler import gen_lib_options
# Don't bother with mwerks, as we don't support Classic Mac.
for _cc in ['msvc', 'bcpp', 'cygwinc', 'emxc', 'unixc']:
_m = sys.modules.get('distutils.'+_cc+'compiler')
if _m is not None:
setattr(_m, 'gen_lib_options', gen_lib_options)
_distutils_gen_preprocess_options = gen_preprocess_options
def gen_preprocess_options (macros, include_dirs):
include_dirs = quote_args(include_dirs)
return _distutils_gen_preprocess_options(macros, include_dirs)
ccompiler.gen_preprocess_options = gen_preprocess_options
##Fix distutils.util.split_quoted:
# NOTE: I removed this fix in revision 4481 (see ticket #619), but it appears
# that removing this fix causes f2py problems on Windows XP (see ticket #723).
# Specifically, on WinXP when gfortran is installed in a directory path, which
# contains spaces, then f2py is unable to find it.
import re
import string
_wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
_has_white_re = re.compile(r'\s')
def split_quoted(s):
s = s.strip()
words = []
pos = 0
while s:
m = _wordchars_re.match(s, pos)
end = m.end()
if end == len(s):
words.append(s[:end])
break
if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
words.append(s[:end]) # we definitely have a word delimiter
s = s[end:].lstrip()
pos = 0
elif s[end] == '\\': # preserve whatever is being escaped;
# will become part of the current word
s = s[:end] + s[end+1:]
pos = end+1
else:
if s[end] == "'": # slurp singly-quoted string
m = _squote_re.match(s, end)
elif s[end] == '"': # slurp doubly-quoted string
m = _dquote_re.match(s, end)
else:
raise RuntimeError("this can't happen (bad char '%c')" % s[end])
if m is None:
raise ValueError("bad string (mismatched %s quotes?)" % s[end])
(beg, end) = m.span()
if _has_white_re.search(s[beg+1:end-1]):
s = s[:beg] + s[beg+1:end-1] + s[end:]
pos = m.end() - 2
else:
# Keeping quotes when a quoted word does not contain
# white-space. XXX: send a patch to distutils
pos = m.end()
if pos >= len(s):
words.append(s)
break
return words
ccompiler.split_quoted = split_quoted
##Fix distutils.util.split_quoted:
# define DISTUTILS_USE_SDK when necessary to workaround distutils/msvccompiler.py bug
msvc_on_amd64()
|
|
# Copyright 2013 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import six
import unittest
try:
import unittest.mock as mock
except ImportError:
import mock
from cloudbaseinit.utils import x509constants
class CryptoAPICertManagerTests(unittest.TestCase):
def setUp(self):
self._ctypes = mock.MagicMock()
self._module_patcher = mock.patch.dict(
'sys.modules', {'ctypes': self._ctypes})
self._module_patcher.start()
self.x509 = importlib.import_module("cloudbaseinit.utils.windows.x509")
self._x509_manager = self.x509.CryptoAPICertManager()
def tearDown(self):
self._module_patcher.stop()
@mock.patch('cloudbaseinit.utils.windows.x509.free')
@mock.patch('cloudbaseinit.utils.windows.x509.malloc')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertGetCertificateContextProperty')
def _test_get_cert_thumprint(self, mock_CertGetCertificateContextProperty,
mock_malloc, mock_free, ret_val):
mock_DWORD = self._ctypes.wintypes.DWORD
mock_CSIZET = self._ctypes.c_size_t
mock_cast = self._ctypes.cast
mock_POINTER = self._ctypes.POINTER
mock_byref = self._ctypes.byref
mock_pointer = mock.MagicMock()
fake_cert_context_p = 'fake context'
mock_DWORD.return_value.value = 10
mock_CSIZET.return_value.value = mock_DWORD.return_value.value
mock_CertGetCertificateContextProperty.return_value = ret_val
mock_POINTER.return_value = mock_pointer
mock_cast.return_value.contents = [16]
if not ret_val:
self.assertRaises(self.x509.cryptoapi.CryptoAPIException,
self._x509_manager._get_cert_thumprint,
fake_cert_context_p)
else:
expected = [mock.call(fake_cert_context_p,
self.x509.cryptoapi.CERT_SHA1_HASH_PROP_ID,
None, mock_byref.return_value),
mock.call(fake_cert_context_p,
self.x509.cryptoapi.CERT_SHA1_HASH_PROP_ID,
mock_malloc.return_value,
mock_byref.return_value)]
response = self._x509_manager._get_cert_thumprint(
fake_cert_context_p)
self.assertEqual(
expected,
mock_CertGetCertificateContextProperty.call_args_list)
mock_malloc.assert_called_with(mock_CSIZET.return_value)
mock_cast.assert_called_with(mock_malloc(), mock_pointer)
mock_free.assert_called_with(mock_malloc())
self.assertEqual('10', response)
def test_get_cert_thumprint(self):
self._test_get_cert_thumprint(ret_val=True)
def test_get_cert_thumprint_GetCertificateContextProperty_exception(self):
self._test_get_cert_thumprint(ret_val=False)
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.CryptDestroyKey')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.CryptReleaseContext')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.CryptGenKey')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.CryptAcquireContext')
def _test_generate_key(self, mock_CryptAcquireContext, mock_CryptGenKey,
mock_CryptReleaseContext, mock_CryptDestroyKey,
acquired_context, generate_key_ret_val):
mock_HANDLE = self._ctypes.wintypes.HANDLE
mock_byref = self._ctypes.byref
mock_CryptAcquireContext.return_value = acquired_context
mock_CryptGenKey.return_value = generate_key_ret_val
if not acquired_context:
self.assertRaises(self.x509.cryptoapi.CryptoAPIException,
self._x509_manager._generate_key,
'fake container', True)
else:
if not generate_key_ret_val:
self.assertRaises(self.x509.cryptoapi.CryptoAPIException,
self._x509_manager._generate_key,
'fake container', True)
else:
self._x509_manager._generate_key('fake container', True)
mock_CryptAcquireContext.assert_called_with(
mock_byref(), 'fake container', None,
self.x509.cryptoapi.PROV_RSA_FULL,
self.x509.cryptoapi.CRYPT_MACHINE_KEYSET)
mock_CryptGenKey.assert_called_with(
mock_HANDLE(), self.x509.cryptoapi.AT_SIGNATURE,
0x08000000, mock_HANDLE())
mock_CryptDestroyKey.assert_called_once_with(
mock_HANDLE())
mock_CryptReleaseContext.assert_called_once_with(
mock_HANDLE(), 0)
def test_generate_key(self):
self._test_generate_key(acquired_context=True,
generate_key_ret_val='fake key')
def test_generate_key_GetCertificateContextProperty_exception(self):
self._test_generate_key(acquired_context=False,
generate_key_ret_val='fake key')
def test_generate_key_CryptGenKey_exception(self):
self._test_generate_key(acquired_context=True,
generate_key_ret_val=None)
@mock.patch('cloudbaseinit.utils.windows.x509.free')
@mock.patch('copy.copy')
@mock.patch('cloudbaseinit.utils.windows.x509.malloc')
@mock.patch('cloudbaseinit.utils.windows.x509.CryptoAPICertManager'
'._generate_key')
@mock.patch('cloudbaseinit.utils.windows.x509.CryptoAPICertManager'
'._get_cert_thumprint')
@mock.patch('uuid.uuid4')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertStrToName')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CRYPTOAPI_BLOB')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CRYPT_KEY_PROV_INFO')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CRYPT_ALGORITHM_IDENTIFIER')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'SYSTEMTIME')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'GetSystemTime')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertCreateSelfSignCertificate')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertAddEnhancedKeyUsageIdentifier')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertOpenStore')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertAddCertificateContextToStore')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertCloseStore')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertFreeCertificateContext')
def _test_create_self_signed_cert(self, mock_CertFreeCertificateContext,
mock_CertCloseStore,
mock_CertAddCertificateContextToStore,
mock_CertOpenStore,
mock_CertAddEnhancedKeyUsageIdentifier,
mock_CertCreateSelfSignCertificate,
mock_GetSystemTime, mock_SYSTEMTIME,
mock_CRYPT_ALGORITHM_IDENTIFIER,
mock_CRYPT_KEY_PROV_INFO,
mock_CRYPTOAPI_BLOB,
mock_CertStrToName,
mock_uuid4, mock_get_cert_thumprint,
mock_generate_key, mock_malloc,
mock_copy, mock_free, certstr,
certificate, enhanced_key, store_handle,
context_to_store):
mock_POINTER = self._ctypes.POINTER
mock_byref = self._ctypes.byref
mock_cast = self._ctypes.cast
mock_uuid4.return_value = 'fake_name'
mock_CertCreateSelfSignCertificate.return_value = certificate
mock_CertAddEnhancedKeyUsageIdentifier.return_value = enhanced_key
mock_CertStrToName.return_value = certstr
mock_CertOpenStore.return_value = store_handle
mock_CertAddCertificateContextToStore.return_value = context_to_store
if (certstr is None or certificate is None or enhanced_key is None
or store_handle is None or context_to_store is None):
self.assertRaises(self.x509.cryptoapi.CryptoAPIException,
self._x509_manager.create_self_signed_cert,
'fake subject', 10, True,
self.x509.STORE_NAME_MY)
else:
response = self._x509_manager.create_self_signed_cert(
subject='fake subject')
mock_cast.assert_called_with(mock_malloc(), mock_POINTER())
mock_CRYPTOAPI_BLOB.assert_called_once_with()
mock_CRYPT_KEY_PROV_INFO.assert_called_once_with()
mock_CRYPT_ALGORITHM_IDENTIFIER.assert_called_once_with()
mock_SYSTEMTIME.assert_called_once_with()
mock_GetSystemTime.assert_called_once_with(mock_byref())
mock_copy.assert_called_once_with(mock_SYSTEMTIME())
mock_CertCreateSelfSignCertificate.assert_called_once_with(
None, mock_byref(), 0, mock_byref(),
mock_byref(), mock_byref(), mock_byref(), None)
mock_CertAddEnhancedKeyUsageIdentifier.assert_called_with(
mock_CertCreateSelfSignCertificate(),
self.x509.cryptoapi.szOID_PKIX_KP_SERVER_AUTH)
mock_CertOpenStore.assert_called_with(
self.x509.cryptoapi.CERT_STORE_PROV_SYSTEM, 0, 0,
self.x509.cryptoapi.CERT_SYSTEM_STORE_LOCAL_MACHINE,
six.text_type(self.x509.STORE_NAME_MY))
mock_get_cert_thumprint.assert_called_once_with(
mock_CertCreateSelfSignCertificate())
mock_CertCloseStore.assert_called_once_with(store_handle, 0)
mock_CertFreeCertificateContext.assert_called_once_with(
mock_CertCreateSelfSignCertificate())
mock_free.assert_called_once_with(mock_cast())
self.assertEqual(mock_get_cert_thumprint.return_value, response)
mock_generate_key.assert_called_once_with('fake_name', True)
def test_create_self_signed_cert(self):
self._test_create_self_signed_cert(certstr='fake cert name',
certificate='fake certificate',
enhanced_key='fake key',
store_handle='fake handle',
context_to_store='fake context')
def test_create_self_signed_cert_CertStrToName_fail(self):
self._test_create_self_signed_cert(certstr=None,
certificate='fake certificate',
enhanced_key='fake key',
store_handle='fake handle',
context_to_store='fake context')
def test_create_self_signed_cert_CertCreateSelfSignCertificate_fail(self):
self._test_create_self_signed_cert(certstr='fake cert name',
certificate=None,
enhanced_key='fake key',
store_handle='fake handle',
context_to_store='fake context')
def test_create_self_signed_cert_AddEnhancedKeyUsageIdentifier_fail(self):
self._test_create_self_signed_cert(certstr='fake cert name',
certificate='fake certificate',
enhanced_key=None,
store_handle='fake handle',
context_to_store='fake context')
def test_create_self_signed_cert_CertOpenStore_fail(self):
self._test_create_self_signed_cert(certstr='fake cert name',
certificate='fake certificate',
enhanced_key='fake key',
store_handle=None,
context_to_store='fake context')
def test_create_self_signed_cert_AddCertificateContextToStore_fail(self):
self._test_create_self_signed_cert(certstr='fake cert name',
certificate='fake certificate',
enhanced_key='fake key',
store_handle='fake handle',
context_to_store=None)
def test_get_cert_base64(self):
fake_cert_data = ''
fake_cert_data += x509constants.PEM_HEADER + '\n'
fake_cert_data += 'fake cert' + '\n'
fake_cert_data += x509constants.PEM_FOOTER
response = self._x509_manager._get_cert_base64(fake_cert_data)
self.assertEqual('fake cert', response)
@mock.patch('cloudbaseinit.utils.windows.x509.free')
@mock.patch('cloudbaseinit.utils.windows.x509.CryptoAPICertManager'
'._get_cert_thumprint')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertCloseStore')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertFreeCertificateContext')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertGetNameString')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertAddEncodedCertificateToStore')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CertOpenStore')
@mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
'CryptStringToBinaryA')
@mock.patch('cloudbaseinit.utils.windows.x509.CryptoAPICertManager'
'._get_cert_base64')
@mock.patch('cloudbaseinit.utils.windows.x509.malloc')
def _test_import_cert(self, mock_malloc, mock_get_cert_base64,
mock_CryptStringToBinaryA, mock_CertOpenStore,
mock_CertAddEncodedCertificateToStore,
mock_CertGetNameString,
mock_CertFreeCertificateContext,
mock_CertCloseStore, mock_get_cert_thumprint,
mock_free, crypttstr, store_handle, add_enc_cert,
upn_len):
mock_POINTER = self._ctypes.POINTER
mock_cast = self._ctypes.cast
mock_byref = self._ctypes.byref
mock_DWORD = self._ctypes.wintypes.DWORD
mock_create_unicode_buffer = self._ctypes.create_unicode_buffer
fake_cert_data = ''
fake_cert_data += x509constants.PEM_HEADER + '\n'
fake_cert_data += 'fake cert' + '\n'
fake_cert_data += x509constants.PEM_FOOTER
mock_get_cert_base64.return_value = 'fake cert'
mock_CryptStringToBinaryA.return_value = crypttstr
mock_CertOpenStore.return_value = store_handle
mock_CertAddEncodedCertificateToStore.return_value = add_enc_cert
mock_CertGetNameString.side_effect = [2, upn_len]
expected = [mock.call('fake cert', len('fake cert'),
self.x509.cryptoapi.CRYPT_STRING_BASE64, None,
mock_byref(), None, None),
mock.call('fake cert', len('fake cert'),
self.x509.cryptoapi.CRYPT_STRING_BASE64,
mock_cast(), mock_byref(), None, None)]
expected2 = [mock.call(mock_POINTER()(),
self.x509.cryptoapi.CERT_NAME_UPN_TYPE,
0, None, None, 0),
mock.call(mock_POINTER()(),
self.x509.cryptoapi.CERT_NAME_UPN_TYPE,
0, None, mock_create_unicode_buffer(), 2)]
if (not crypttstr or store_handle is None or add_enc_cert is None or
upn_len != 2):
self.assertRaises(self.x509.cryptoapi.CryptoAPIException,
self._x509_manager.import_cert, fake_cert_data,
True, self.x509.STORE_NAME_MY)
else:
response = self._x509_manager.import_cert(fake_cert_data)
mock_cast.assert_called_with(mock_malloc(), mock_POINTER())
self.assertEqual(expected,
mock_CryptStringToBinaryA.call_args_list)
mock_CertOpenStore.assert_called_with(
self.x509.cryptoapi.CERT_STORE_PROV_SYSTEM, 0, 0,
self.x509.cryptoapi.CERT_SYSTEM_STORE_LOCAL_MACHINE,
six.text_type(self.x509.STORE_NAME_MY))
mock_CertAddEncodedCertificateToStore.assert_called_with(
mock_CertOpenStore(),
self.x509.cryptoapi.X509_ASN_ENCODING |
self.x509.cryptoapi.PKCS_7_ASN_ENCODING,
mock_cast(), mock_DWORD(),
self.x509.cryptoapi.CERT_STORE_ADD_REPLACE_EXISTING,
mock_byref())
mock_create_unicode_buffer.assert_called_with(2)
self.assertEqual(expected2, mock_CertGetNameString.call_args_list)
mock_get_cert_thumprint.assert_called_once_with(mock_POINTER()())
mock_CertFreeCertificateContext.assert_called_once_with(
mock_POINTER()())
mock_CertCloseStore.assert_called_once_with(
mock_CertOpenStore(), 0)
mock_free.assert_called_once_with(mock_cast())
self.assertEqual(
(mock_get_cert_thumprint(),
mock_create_unicode_buffer().value), response)
mock_get_cert_base64.assert_called_with(fake_cert_data)
def test_import_cert(self):
self._test_import_cert(crypttstr=True, store_handle='fake handle',
add_enc_cert='fake encoded cert', upn_len=2)
def test_import_cert_CryptStringToBinaryA_fail(self):
self._test_import_cert(crypttstr=False, store_handle='fake handle',
add_enc_cert='fake encoded cert', upn_len=2)
def test_import_cert_CertOpenStore_fail(self):
self._test_import_cert(crypttstr=False, store_handle=None,
add_enc_cert='fake encoded cert', upn_len=2)
def test_import_cert_CertAddEncodedCertificateToStore_fail(self):
self._test_import_cert(crypttstr=True, store_handle='fake handle',
add_enc_cert=None, upn_len=2)
def test_import_cert_CertGetNameString_fail(self):
self._test_import_cert(crypttstr=True, store_handle='fake handle',
add_enc_cert='fake encoded cert', upn_len=3)
|
|
import unittest
from meerkat_abacus import model
from meerkat_abacus.codes.variable import Variable
positive = {
"applicable": 1,
"value": 1
}
negative = {
"applicable": 0,
"value": 0
}
class VariableTest(unittest.TestCase):
"""
Tests to check that Variables class gives the correct results on test cases
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_not_null(self):
"""
testing the not_null method
"""
agg_variable = model.AggregationVariables(
id=4,
method="not_null",
condition="None",
db_column="index")
variable = Variable(agg_variable)
row = {"index": "hei"}
self.assertEqual(variable.test(row), positive)
row = {"index": ""}
self.assertEqual(variable.test(row), negative)
row = {"index": 0}
self.assertEqual(variable.test(row), negative)
row = {"index": None}
self.assertEqual(variable.test(row), negative)
def test_value(self):
"""
testing the not_null method
"""
agg_variable = model.AggregationVariables(
id=4,
method="value",
condition="None",
db_column="index")
variable = Variable(agg_variable)
row = {"index": "hei"}
self.assertEqual(variable.test(row),
{"applicable": True,
"value": "hei"
})
row = {"index": ""}
self.assertEqual(variable.test(row), negative)
row = {"index": 0}
self.assertEqual(variable.test(row), negative)
row = {"index": None}
self.assertEqual(variable.test(row), negative)
def test_between(self):
"""
testing the between method
"""
agg_variable = model.AggregationVariables(
id=4,
method="between",
condition="0,1",
calculation="A**2/(B-4)",
db_column="A,B")
variable = Variable(agg_variable)
row = {"A": "1", "B": "6"}
self.assertEqual(variable.test(row), positive)
row = {"A": "2", "B": "6"}
self.assertEqual(variable.test(row), negative)
row = {"A": "2", "B": "400"}
self.assertEqual(variable.test(row), positive)
row = {"A": "2", "B": "1"}
self.assertEqual(variable.test(row), negative)
row = {"A": "2"} # test if column is missing
self.assertEqual(variable.test(row), negative)
agg_variable = model.AggregationVariables(
id=4,
method="between",
condition="0,1",
calculation="C/(B-4)",
db_column="A,B")
# note we have used C which is not one of the columns, so the test should give an error
variable = Variable(agg_variable)
row = {"A": "2", "B": "6"}
with self.assertRaises(NameError):
variable.test(row)
# Test with date
agg_variable = model.AggregationVariables(
id=4,
method="between",
condition="1388527200,2019679200", # 2014-2034
calculation="Variable.to_date(A)",
db_column="A")
variable = Variable(agg_variable)
row = {"A": "01-Jan-2016"}
self.assertEqual(variable.test(row), positive)
row = {"A": "01-Jan-2035"}
self.assertEqual(variable.test(row), negative)
row = {"A": "01-Jan-2010"}
self.assertEqual(variable.test(row), negative)
def test_calc(self):
"""
testing the calc_between method
"""
agg_variable = model.AggregationVariables(
id=4,
method="calc",
condition="None",
calculation="A+B",
db_column="A,B")
variable = Variable(agg_variable)
row = {"A": "1", "B": "6"}
self.assertEqual(variable.test(row), {
"applicable": True,
"value": 7
}
)
row = {"A": "2", "B": "400"}
self.assertEqual(variable.test(row), {
"applicable": True,
"value": 402
}
)
row = {"A": "2"} # test if column is missing
self.assertEqual(variable.test(row),
{"applicable": False,
"value": 0})
row = {"A": "2", "B": "-2"} # test if result is 0
self.assertEqual(variable.test(row),
{"applicable": True,
"value": 0})
agg_variable = model.AggregationVariables(
id=4,
method="calc",
condition="None",
calculation="A+C",
db_column="A,B")
# note we have used C which is not one of the columns, so the test should give an error
variable = Variable(agg_variable)
row = {"A": "2", "B": "6"}
with self.assertRaises(NameError):
variable.test(row)
def test_match(self):
agg_variable = model.AggregationVariables(
id=4,
method="match",
db_column="column1",
condition="A")
variable = Variable(agg_variable)
row = {"column1": "A"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa"}
self.assertEqual(variable.test(row), negative)
agg_variable.condition = "A,C"
variable = Variable(agg_variable)
row = {"column1": "A"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "C"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa"}
self.assertEqual(variable.test(row), negative)
def test_sub_match(self):
agg_variable = model.AggregationVariables(
id=4,
method="sub_match",
db_column="column1",
condition="A")
variable = Variable(agg_variable)
row = {"column1": "A"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "A3"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B"}
self.assertEqual(variable.test(row), negative)
agg_variable.condition = "A,C"
variable = Variable(agg_variable)
row = {"column1": "A"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "C"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "A1"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "C3"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B"}
self.assertEqual(variable.test(row), negative)
def test_and(self):
agg_variable = model.AggregationVariables(
id=4,
method="match and match",
db_column="column1;column2",
condition="A;B")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "B"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B", "column2": "A"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa", "column2": "B"}
self.assertEqual(variable.test(row), negative)
agg_variable = model.AggregationVariables(
id=4,
method="match and match",
db_column="column1;column2",
condition="A,C;B")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "B"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "C", "column2": "B"}
self.assertEqual(variable.test(row), positive)
def test_or(self):
agg_variable = model.AggregationVariables(
id=4,
method="match or match",
db_column="column1;column2",
condition="A;B")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "B"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B", "column2": "A"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa", "column2": "B"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "Aa", "column2": "C"}
self.assertEqual(variable.test(row), negative)
agg_variable = model.AggregationVariables(
id=4,
method="match or match",
db_column="column1;column2",
condition="A,C;B")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "B"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "C", "column2": "D"}
self.assertEqual(variable.test(row), positive)
def test_different_test_types(self):
agg_variable = model.AggregationVariables(
id=4,
method="match and sub_match",
db_column="column1;column2",
condition="A;B")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "Bb"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B", "column2": "A"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa", "column2": "B"}
self.assertEqual(variable.test(row), negative)
agg_variable = model.AggregationVariables(
id=4,
method="match and between",
db_column="column1;column2",
calculation="None;column2",
condition="A;4,9")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "5"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "A", "column2": "3"}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa", "column2": "5"}
self.assertEqual(variable.test(row), negative)
agg_variable = model.AggregationVariables(
id=4,
method="sub_match or not_null",
db_column="column1;column2",
condition="A;None")
variable = Variable(agg_variable)
row = {"column1": "A", "column2": "5"}
self.assertEqual(variable.test(row), positive)
row = {"column1": "A", "column2": ""}
self.assertEqual(variable.test(row), positive)
row = {"column1": "B", "column2": ""}
self.assertEqual(variable.test(row), negative)
row = {"column1": "Aa", "column2": "5"}
self.assertEqual(variable.test(row), positive)
def test_no_such_method(self):
agg_variable = model.AggregationVariables(
id=4,
method="no_such_method",
db_column="column1",
condition="A")
with self.assertRaises(NameError):
variable = Variable(agg_variable)
if __name__ == "__main__":
unittest.main()
|
|
from pymutils.process import Process
import pymutils.verifier as verifier
from optparse import OptionParser
import pymutils.http_service as http_service
from pymutils.debug import verbose, debug, log
import collections
import os
import json
import inspect
import signal
import sys
import time
from pymutils.global_storage import Globals
version = "0.2.6.1"
__version__ = version
def parse(filename):
try:
with open(filename, 'r') as f:
config_data = f.read()
except FileNotFoundError:
print("Cannot find file {0}.".format(filename))
exit(1)
except Exception as e:
print("Error while loading file {0}: {1}.".format(filename, e))
exit(2)
try:
jdata = json.JSONDecoder(object_pairs_hook=collections.OrderedDict).decode(config_data)
except ValueError:
print("{0} is not a valid JSON file.".format(filename))
exit(3)
return jdata
def clean_outfile():
if Globals.outfile is not sys.stdout and Globals.outfile is not None:
Globals.outfile.close()
Globals.outfile = None
def graceful_shutdown(signum, frame):
if Globals.in_force_quit:
return
if Globals.shutdown:
if signum == signal.SIGINT:
Globals.in_force_quit = True
Globals.status = "force shutdown"
for proc in Process.processes:
if proc.poll() is None:
proc.kill()
Globals.may_terminate = True
clean_outfile()
return
print("Shutting down gracefully (SIGINT again to terminate immediately)...")
Globals.shutdown = True
Globals.status = "shutdown"
for proc in Process.processes:
if Globals.in_force_quit:
return
try:
if proc.poll() is None:
proc.force_terminate(Globals.terminate_time_allowed)
except Exception:
pass
Globals.may_terminate = True
clean_outfile()
def spawnDaemon(func, conf):
try:
pid = os.fork()
if pid > 0:
return
except OSError as e:
print("fork #1 failed: {0} ({1})".format(e.errno, e.strerror))
sys.exit(6)
os.setsid()
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError as e:
print("fork #2 failed: {0} ({1})".format(e.errno, e.strerror))
sys.exit(7)
func(conf)
os._exit(os.EX_OK)
def main():
parser = OptionParser()
parser.add_option("-V", "--version", dest="version", default=False, action="store_true", help="Display version and exit.")
parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true", help="Display process launch and verification step-by-step.")
parser.add_option("-w", "--debug", dest="debug", default=False, action="store_true", help="Display debug information. Implies verbose.")
parser.add_option("-f", "--file", dest="filename", default="pymanager.json", help="The name of the pymanager file to use, defaults to pymanager.json.", metavar="FILE")
parser.add_option("-d", "--daemon", dest="daemon", default=False, action="store_true", help="Daemonize self after processes are launched.")
parser.add_option("-l", "--logfile", dest="logfile", default=None, help="Send all messages to this logfile instead of output.")
opts, args = parser.parse_args()
if opts.version:
print("pymanager version {0}".format(version))
exit(0)
config = parse(opts.filename)
if opts.debug:
config["verbose"] = 2
elif opts.verbose:
config["verbose"] = 1
else:
config["verbose"] = 0
if opts.logfile != None:
config["logfile"] = opts.logfile
if opts.daemon:
spawnDaemon(spawn_and_monitor, config)
return 0
else:
return spawn_and_monitor(config)
def spawn_and_monitor(config):
verifiers = {}
if "verbose" in config:
Globals.verbose = config["verbose"]
if "logfile" in config:
Globals.outfile = open(config["logfile"], "wb", 0)
else:
Globals.outfile = sys.stdout
verbose("Checking HTTP configuration.")
if "http" in config:
hconf = config["http"]
debug("HTTP is present.")
if "enabled" in hconf and hconf["enabled"]:
debug("HTTP is enabled.")
port = 5001
if "port" in hconf:
debug("Port is present in configuration.")
port = hconf["port"]
http_service.fork_http_service(port)
verbose("HTTP service listening on port :{0}".format(port))
else:
debug("HTTP is disabled.")
if "default_shell" in config:
debug("Default shell is present, value: {0}".format(config["default_shell"]))
Globals.default_shell = config["default_shell"]
verbose("Parsing modules list.")
Globals.status = "parsing modules"
if "modules" in config:
for module, definition in config["modules"].items():
debug("Loading module {0}".format(module))
if "verifiers" not in definition:
log("[WARNING] module {0} does not contain a list of verifiers to load.".format(module))
else:
try:
mod = __import__(module)
for v in definition["verifiers"]:
try:
a = getattr(mod, v)
if inspect.isclass(a):
if issubclass(a, verifier.Verifier):
debug("Loading verifier {0}".format(v))
verifiers["{0}.{1}".format(module, v)] = getattr(mod, v)
else:
log("[WARNING] object '{0}' from module {1} is not a subclass of Verifier".format(v, module))
else:
log("[WARNING] object '{0}' from module {1} is not a class".format(v, module))
except AttributeError:
log("[WARNING] missing verifier '{0}' from module {1}".format(v, module))
except ImportError:
log("[WARNING] module {0} not found.".format(module))
verbose("Modules are loaded, parsing processes.")
if not "processes" in config:
log("[ERROR] No processes listed in the configuration file.")
clean_outfile()
return 4
signal.signal(signal.SIGINT, graceful_shutdown)
signal.signal(signal.SIGTERM, graceful_shutdown)
signal.signal(signal.SIGQUIT, graceful_shutdown)
verbose("Processes parsed, launching.")
Globals.status = "launching processes"
if "messages" in config:
Globals.messages = config["messages"]
try:
for key, procdef in config["processes"].items():
verbose("Launching process key '{0}'.".format(key))
if "executable" not in procdef or "arguments" not in procdef:
raise KeyError("Missing executable or arguments in definition for process {0}.".format(key))
cmdargs = [procdef["executable"]]
cmdargs += procdef["arguments"]
vfy = None
if "verifier" in procdef:
if "type" not in procdef["verifier"]:
raise KeyError("Missing verifier type for verifier of process {0}.".format(key))
if procdef["verifier"]["type"] not in verifiers:
raise ValueError("Missing verifier {0} used in process {1}".format(procdef["verifier"]["type"], key))
args = {}
if "arguments" in procdef["verifier"]:
args = procdef["verifier"]["arguments"]
debug("Setting up verifier {0} for process.".format(procdef["verifier"]["type"]))
vfy = verifiers[procdef["verifier"]["type"]](**args)
options = {}
if "options" in procdef:
options = procdef["options"]
verbose("Creating process.")
proc = Process(cmdargs, vfy, **options)
Process.add_process(proc)
verbose("Process creation finished.")
except Exception as e:
etype, _, _ = sys.exc_info()
log("[ERROR] could not set up processes: {0}: {1}".format(etype.__name__, e))
Globals.status = "shutdown"
#traceback.print_exc()
for proc in Process.processes:
try:
proc.kill()
except Exception:
pass
clean_outfile()
return 5
verbose("Finished setting up processes.")
if "keep_alive" in config:
if config["keep_alive"]:
Globals.keep_alive = True
if "graceful_time" in config:
try:
t = int(config["graceful_time"])
if t < 0:
raise ValueError
Globals.terminate_time_allowed = t
except ValueError:
log("[WARNING] invalid graceful_time '{0}', must be a positive number.".format(t))
Globals.status = "running"
runningProcesses = len(Process.processes)
while (runningProcesses or Globals.keep_alive) and not Globals.shutdown:
runningProcesses = 0
for proc in Process.processes:
result = proc.poll()
if result is None:
runningProcesses += 1
time.sleep(5)
if not Globals.keep_alive and not runningProcesses:
Globals.may_terminate = True
verbose("Entering shutdown phase.")
Globals.status = "shutdown"
while not Globals.may_terminate:
time.sleep(5)
clean_outfile()
return 0
if __name__ == "__main__":
exit(main())
|
|
import re
import sys
from numbers import Number
import six
ERRORS = {
'unexp_end_string': u'Unexpected end of string while parsing Lua string.',
'unexp_end_table': u'Unexpected end of table while parsing Lua string.',
'mfnumber_minus': u'Malformed number (no digits after initial minus).',
'mfnumber_dec_point': u'Malformed number (no digits after decimal point).',
'mfnumber_sci': u'Malformed number (bad scientific format).',
}
class ParseError(Exception):
pass
class SLPP(object):
def __init__(self):
self.text = ''
self.ch = ''
self.at = 0
self.len = 0
self.depth = 0
self.space = re.compile('\s', re.M)
self.alnum = re.compile('\w', re.M)
self.newline = '\n'
self.tab = '\t'
def decode(self, text):
if not text or not isinstance(text, six.string_types):
return
# FIXME: only short comments removed
reg = re.compile('--.*$', re.M)
text = reg.sub('', text, 0)
self.text = text
self.at, self.ch, self.depth = 0, '', 0
self.len = len(text)
self.next_chr()
result = self.value()
return result
def encode(self, obj):
self.depth = 0
return self.__encode(obj)
def __encode(self, obj):
s = ''
tab = self.tab
newline = self.newline
if isinstance(obj, str):
s += '"%s"' % obj.replace(r'"', r'\"')
elif six.PY2 and isinstance(obj, unicode):
s += '"%s"' % obj.encode('utf-8').replace(r'"', r'\"')
elif six.PY3 and isinstance(obj, bytes):
s += '"{}"'.format(''.join(r'\x{:02x}'.format(c) for c in obj))
elif isinstance(obj, bool):
s += str(obj).lower()
elif obj is None:
s += 'nil'
elif isinstance(obj, Number):
s += str(obj)
elif isinstance(obj, (list, tuple, dict)):
self.depth += 1
if len(obj) == 0 or (not isinstance(obj, dict) and len([
x for x in obj
if isinstance(x, Number) or (isinstance(x, six.string_types) and len(x) < 10)
]) == len(obj)):
newline = tab = ''
dp = tab * self.depth
s += "%s{%s" % (tab * (self.depth - 2), newline)
if isinstance(obj, dict):
contents = []
all_keys_int = all(isinstance(k, int) for k in obj.keys())
for k, v in obj.items():
if all_keys_int:
contents.append(self.__encode(v))
else:
contents.append(dp + '%s = %s' % (k, self.__encode(v)))
s += (',%s' % newline).join(contents)
else:
s += (',%s' % newline).join(
[dp + self.__encode(el) for el in obj])
self.depth -= 1
s += "%s%s}" % (newline, tab * self.depth)
return s
def white(self):
while self.ch:
if self.space.match(self.ch):
self.next_chr()
else:
break
def next_chr(self):
if self.at >= self.len:
self.ch = None
return None
self.ch = self.text[self.at]
self.at += 1
return True
def value(self):
self.white()
if not self.ch:
return
if self.ch == '{':
return self.object()
if self.ch == "[":
self.next_chr()
if self.ch in ['"', "'", '[']:
return self.string(self.ch)
if self.ch.isdigit() or self.ch == '-':
return self.number()
return self.word()
def string(self, end=None):
s = ''
start = self.ch
if end == '[':
end = ']'
if start in ['"', "'", '[']:
while self.next_chr():
if self.ch == end:
self.next_chr()
if start != "[" or self.ch == ']':
return s
if self.ch == '\\' and start == end:
self.next_chr()
if self.ch != end:
s += '\\'
s += self.ch
raise ParseError(ERRORS['unexp_end_string'])
def object(self):
o = {}
k = None
idx = 0
numeric_keys = False
self.depth += 1
self.next_chr()
self.white()
if self.ch and self.ch == '}':
self.depth -= 1
self.next_chr()
return o # Exit here
else:
while self.ch:
self.white()
if self.ch == '{':
o[idx] = self.object()
idx += 1
continue
elif self.ch == '}':
self.depth -= 1
self.next_chr()
if k is not None:
o[idx] = k
if not numeric_keys and len([key for key in o if isinstance(key, six.string_types + (float, bool, tuple))]) == 0:
ar = []
for key in o:
ar.insert(key, o[key])
o = ar
return o # or here
else:
if self.ch == ',':
self.next_chr()
continue
else:
k = self.value()
if self.ch == ']':
numeric_keys = True
self.next_chr()
self.white()
ch = self.ch
if ch in ('=', ','):
self.next_chr()
self.white()
if ch == '=':
o[k] = self.value()
else:
o[idx] = k
idx += 1
k = None
raise ParseError(ERRORS['unexp_end_table']) # Bad exit here
words = {'true': True, 'false': False, 'nil': None}
def word(self):
s = ''
if self.ch != '\n':
s = self.ch
self.next_chr()
while self.ch is not None and self.alnum.match(self.ch) and s not in self.words:
s += self.ch
self.next_chr()
return self.words.get(s, s)
def number(self):
def next_digit(err):
n = self.ch
self.next_chr()
if not self.ch or not self.ch.isdigit():
raise ParseError(err)
return n
n = ''
try:
if self.ch == '-':
n += next_digit(ERRORS['mfnumber_minus'])
n += self.digit()
if n == '0' and self.ch in ['x', 'X']:
n += self.ch
self.next_chr()
n += self.hex()
else:
if self.ch and self.ch == '.':
n += next_digit(ERRORS['mfnumber_dec_point'])
n += self.digit()
if self.ch and self.ch in ['e', 'E']:
n += self.ch
self.next_chr()
if not self.ch or self.ch not in ('+', '-'):
raise ParseError(ERRORS['mfnumber_sci'])
n += next_digit(ERRORS['mfnumber_sci'])
n += self.digit()
except ParseError:
t, e = sys.exc_info()[:2]
print(e)
return 0
try:
return int(n, 0)
except:
pass
return float(n)
def digit(self):
n = ''
while self.ch and self.ch.isdigit():
n += self.ch
self.next_chr()
return n
def hex(self):
n = ''
while self.ch and (self.ch in 'ABCDEFabcdef' or self.ch.isdigit()):
n += self.ch
self.next_chr()
return n
slpp = SLPP()
__all__ = ['slpp']
|
|
#!/usr/bin/env python3
"""Standalone utility functions for Mininet tests."""
import collections
import os
import socket
import subprocess
import time
# pylint: disable=import-error
from mininet.log import error, output
DEVNULL = open(os.devnull, 'wb', encoding=None) # pylint: disable=consider-using-with
GETPORT = 'GETPORT'
PUTPORTS = 'PUTPORTS'
GETSERIAL = 'GETSERIAL'
LISTPORTS = 'LISTPORTS'
LOCALHOST = '127.0.0.1'
LOCALHOSTV6 = '::1'
FAUCET_DIR = os.getenv('FAUCET_DIR', '../faucet')
RESERVED_FOR_TESTS_PORTS = (179, 5001, 5002, 6633, 6653)
with open('/proc/sys/net/netfilter/nf_conntrack_tcp_timeout_time_wait', encoding='utf-8') as pf:
MIN_PORT_AGE = max(int(pf.read()) / 2, 10)
def flat_test_name(_id):
"""Return short form test name from TestCase ID."""
return '-'.join(_id.split('.')[1:])
def lsof_tcp_listening_cmd(port, ipv, state, terse):
"""Return a command line for lsof for processes with specified TCP state."""
terse_arg = ''
if terse:
terse_arg = '-t'
return 'lsof -b -P -n %s -sTCP:%s -i %u -a -i tcp:%u' % (
terse_arg, state, ipv, port)
def lsof_udp_listening_cmd(port, terse):
"""Return a command line for lsof for processes with specified TCP state."""
terse_arg = ''
if terse:
terse_arg = '-t'
return 'lsof -b -P -n %s -i udp:%u -a' % (
terse_arg, port)
def tcp_listening_cmd(port, ipv=4, state='LISTEN', terse=True):
"""Call lsof_tcp_listening_cmd() with default args."""
return lsof_tcp_listening_cmd(port, ipv, state, terse)
def udp_listening_cmd(port, terse=True):
"""Call lsof_tcp_listening_cmd() with default args."""
return lsof_udp_listening_cmd(port, terse)
def mininet_dpid(int_dpid):
"""Return stringified hex version, of int DPID for mininet."""
return str('%x' % int(int_dpid))
def str_int_dpid(str_dpid):
"""Return stringified int version, of int or hex DPID from YAML."""
str_dpid = str(str_dpid)
if str_dpid.startswith('0x'):
return str(int(str_dpid, 16))
return str(int(str_dpid))
def receive_sock_line(sock):
"""Receive a \n terminated line from a socket."""
buf = ''
while buf.find('\n') <= -1:
buf += sock.recv(2**10).decode()
return buf.strip()
def tcp_listening(port):
"""Return True if any process listening on a port."""
return subprocess.call(
tcp_listening_cmd(port).split(),
stdin=DEVNULL,
stdout=DEVNULL,
stderr=DEVNULL,
close_fds=True) == 0
def udp_listening(port):
"""Return True if any process listening on a port."""
return subprocess.call(
udp_listening_cmd(port).split(),
stdin=DEVNULL,
stdout=DEVNULL,
stderr=DEVNULL,
close_fds=True) == 0
def test_server_request(ports_socket, name, command):
assert name is not None
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(ports_socket)
sock.sendall(('%s,%s\n' % (command, name)).encode())
output('%s %s\n' % (name, command))
buf = receive_sock_line(sock)
responses = [int(i) for i in buf.split('\n')]
sock.close()
if len(responses) == 1:
responses = responses[0]
output('%s %s: %u\n' % (name, command, responses))
return responses
def get_serialno(ports_socket, name):
"""Retrieve serial number from test server."""
return test_server_request(ports_socket, name, GETSERIAL)
def find_free_port(ports_socket, name):
"""Retrieve a free TCP port from test server."""
request_name = '-'.join((name, str(os.getpid())))
while True:
port = test_server_request(ports_socket, request_name, GETPORT)
if not tcp_listening(port):
return port
error('port %u is busy, try another' % port)
def find_free_udp_port(ports_socket, name):
request_name = '-'.join((name, str(os.getpid())))
while True:
port = test_server_request(ports_socket, request_name, GETPORT)
if not udp_listening(port):
return port
error('port %u is busy, try another' % port)
def return_free_ports(ports_socket, name):
"""Notify test server that all ports under name are released."""
return test_server_request(ports_socket, name, PUTPORTS)
def serve_ports(ports_socket, start_free_ports, min_free_ports):
"""Implement a TCP server to dispense free TCP ports."""
ports_q = collections.deque()
free_ports = set()
port_age = {}
serialno = 0
def get_port():
while True:
free_socket = socket.socket()
free_socket.bind(('', 0))
free_port = free_socket.getsockname()[1]
free_socket.close()
if free_port < 1024:
continue
if free_port in RESERVED_FOR_TESTS_PORTS:
continue
if free_port in free_ports:
continue
break
free_ports.add(free_port)
port_age[free_port] = time.time()
return free_port
def queue_free_ports(min_queue_size):
while len(ports_q) < min_queue_size:
port = get_port()
ports_q.append(port)
port_age[port] = time.time()
queue_free_ports(start_free_ports)
ports_by_name = collections.defaultdict(set)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(ports_socket)
sock.listen(1)
cold_start = True
while True:
connection, _ = sock.accept()
command, name = receive_sock_line(connection).split(',')
response = None
if command == GETSERIAL:
serialno += 1
response = serialno
elif command == PUTPORTS:
ports_returned = 0
for port in ports_by_name[name]:
ports_returned += 1
ports_q.append(port)
port_age[port] = time.time()
del ports_by_name[name]
response = ports_returned
if ports_returned:
cold_start = False
elif command == GETPORT:
while True:
port = ports_q.popleft()
if time.time() - port_age[port] > MIN_PORT_AGE or cold_start:
break
ports_q.append(port)
time.sleep(1)
ports_by_name[name].add(port)
response = port
queue_free_ports(min_free_ports)
elif command == LISTPORTS:
response = list(ports_by_name[name])
if response is not None:
response_str = ''
if isinstance(response, int):
response = [response]
response_str = ''.join(['%u\n' % i for i in response])
connection.sendall(response_str.encode()) # pylint: disable=no-member
connection.close()
def timeout_cmd(cmd, timeout):
"""Return a command line prefaced with a timeout wrappers and stdout/err unbuffered."""
return 'timeout -sKILL %us stdbuf -o0 -e0 %s' % (timeout, cmd)
def timeout_soft_cmd(cmd, timeout):
"""Same as timeout_cmd buf using SIGTERM on timeout."""
return 'timeout %us stdbuf -o0 -e0 %s' % (timeout, cmd)
|
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import datetime
from frappe import _
import frappe
import frappe.database
import frappe.utils
import frappe.utils.user
from frappe import conf
from frappe.sessions import Session, clear_sessions, delete_session
from frappe.modules.patch_handler import check_session_stopped
from urllib import quote
class HTTPRequest:
def __init__(self):
# Get Environment variables
self.domain = frappe.request.host
if self.domain and self.domain.startswith('www.'):
self.domain = self.domain[4:]
if frappe.get_request_header('X-Forwarded-For'):
frappe.local.request_ip = (frappe.get_request_header('X-Forwarded-For').split(",")[0]).strip()
elif frappe.get_request_header('REMOTE_ADDR'):
frappe.local.request_ip = frappe.get_request_header('REMOTE_ADDR')
else:
frappe.local.request_ip = '127.0.0.1'
# language
self.set_lang()
# load cookies
frappe.local.cookie_manager = CookieManager()
# set db
self.connect()
# login
frappe.local.login_manager = LoginManager()
self.validate_csrf_token()
# write out latest cookies
frappe.local.cookie_manager.init_cookies()
# check status
check_session_stopped()
# run login triggers
if frappe.form_dict.get('cmd')=='login':
frappe.local.login_manager.run_trigger('on_session_creation')
def validate_csrf_token(self):
if frappe.local.request and frappe.local.request.method=="POST":
if not frappe.local.session.data.csrf_token or frappe.local.session.data.device=="mobile":
# not via boot
return
csrf_token = frappe.get_request_header("X-Frappe-CSRF-Token")
if not csrf_token and "csrf_token" in frappe.local.form_dict:
csrf_token = frappe.local.form_dict.csrf_token
del frappe.local.form_dict["csrf_token"]
if frappe.local.session.data.csrf_token != csrf_token:
frappe.local.flags.disable_traceback = True
frappe.throw(_("Invalid Request"), frappe.CSRFTokenError)
def set_lang(self):
from frappe.translate import guess_language
frappe.local.lang = guess_language()
def get_db_name(self):
"""get database name from conf"""
return conf.db_name
def connect(self, ac_name = None):
"""connect to db, from ac_name or db_name"""
frappe.local.db = frappe.database.Database(user = self.get_db_name(), \
password = getattr(conf,'db_password', ''))
class LoginManager:
def __init__(self):
self.user = None
self.info = None
self.full_name = None
self.user_type = None
if frappe.local.form_dict.get('cmd')=='login' or frappe.local.request.path=="/api/method/login":
self.login()
self.resume = False
else:
try:
self.resume = True
self.make_session(resume=True)
self.set_user_info(resume=True)
except AttributeError:
self.user = "Guest"
self.make_session()
self.set_user_info()
def login(self):
# clear cache
frappe.clear_cache(user = frappe.form_dict.get('usr'))
self.authenticate()
self.post_login()
def post_login(self):
self.run_trigger('on_login')
self.validate_ip_address()
self.validate_hour()
self.make_session()
self.set_user_info()
def set_user_info(self, resume=False):
# set sid again
frappe.local.cookie_manager.init_cookies()
self.info = frappe.db.get_value("User", self.user,
["user_type", "first_name", "last_name", "user_image"], as_dict=1)
self.full_name = " ".join(filter(None, [self.info.first_name,
self.info.last_name]))
self.user_type = self.info.user_type
if self.info.user_type=="Website User":
frappe.local.cookie_manager.set_cookie("system_user", "no")
if not resume:
frappe.local.response["message"] = "No App"
else:
frappe.local.cookie_manager.set_cookie("system_user", "yes")
if not resume:
frappe.local.response['message'] = 'Logged In'
if not resume:
frappe.response["full_name"] = self.full_name
frappe.local.cookie_manager.set_cookie("full_name", self.full_name)
frappe.local.cookie_manager.set_cookie("user_id", self.user)
frappe.local.cookie_manager.set_cookie("user_image", self.info.user_image or "")
def make_session(self, resume=False):
# start session
frappe.local.session_obj = Session(user=self.user, resume=resume,
full_name=self.full_name, user_type=self.user_type)
# reset user if changed to Guest
self.user = frappe.local.session_obj.user
frappe.local.session = frappe.local.session_obj.data
self.clear_active_sessions()
def clear_active_sessions(self):
if not frappe.conf.get("deny_multiple_sessions"):
return
if frappe.session.user != "Guest":
clear_sessions(frappe.session.user, keep_current=True)
def authenticate(self, user=None, pwd=None):
if not (user and pwd):
user, pwd = frappe.form_dict.get('usr'), frappe.form_dict.get('pwd')
if not (user and pwd):
self.fail('Incomplete login details')
self.check_if_enabled(user)
self.user = self.check_password(user, pwd)
def check_if_enabled(self, user):
"""raise exception if user not enabled"""
from frappe.utils import cint
if user=='Administrator': return
if not cint(frappe.db.get_value('User', user, 'enabled')):
self.fail('User disabled or missing')
def check_password(self, user, pwd):
"""check password"""
user = frappe.db.sql("""select `user` from __Auth where `user`=%s
and `password`=password(%s)""", (user, pwd))
if not user:
self.fail('Incorrect password')
else:
return user[0][0] # in correct case
def fail(self, message):
frappe.local.response['message'] = message
raise frappe.AuthenticationError
def run_trigger(self, event='on_login'):
for method in frappe.get_hooks().get(event, []):
frappe.call(frappe.get_attr(method), login_manager=self)
def validate_ip_address(self):
"""check if IP Address is valid"""
ip_list = frappe.db.get_value('User', self.user, 'restrict_ip', ignore=True)
if not ip_list:
return
ip_list = ip_list.replace(",", "\n").split('\n')
ip_list = [i.strip() for i in ip_list]
for ip in ip_list:
if frappe.local.request_ip.startswith(ip):
return
frappe.throw(_("Not allowed from this IP Address"), frappe.AuthenticationError)
def validate_hour(self):
"""check if user is logging in during restricted hours"""
login_before = int(frappe.db.get_value('User', self.user, 'login_before', ignore=True) or 0)
login_after = int(frappe.db.get_value('User', self.user, 'login_after', ignore=True) or 0)
if not (login_before or login_after):
return
from frappe.utils import now_datetime
current_hour = int(now_datetime().strftime('%H'))
if login_before and current_hour > login_before:
frappe.throw(_("Login not allowed at this time"), frappe.AuthenticationError)
if login_after and current_hour < login_after:
frappe.throw(_("Login not allowed at this time"), frappe.AuthenticationError)
def login_as_guest(self):
"""login as guest"""
self.login_as("Guest")
def login_as(self, user):
self.user = user
self.post_login()
def logout(self, arg='', user=None):
if not user: user = frappe.session.user
self.run_trigger('on_logout')
if user == frappe.session.user:
delete_session(frappe.session.sid)
self.clear_cookies()
else:
clear_sessions(user)
def clear_cookies(self):
clear_cookies()
class CookieManager:
def __init__(self):
self.cookies = {}
self.to_delete = []
def init_cookies(self):
if not frappe.local.session.get('sid'): return
# sid expires in 3 days
expires = datetime.datetime.now() + datetime.timedelta(days=3)
if frappe.session.sid:
self.cookies["sid"] = {"value": frappe.session.sid, "expires": expires}
if frappe.session.session_country:
self.cookies["country"] = {"value": frappe.session.get("session_country")}
def set_cookie(self, key, value, expires=None):
self.cookies[key] = {"value": value, "expires": expires}
def delete_cookie(self, to_delete):
if not isinstance(to_delete, (list, tuple)):
to_delete = [to_delete]
self.to_delete.extend(to_delete)
def flush_cookies(self, response):
for key, opts in self.cookies.items():
response.set_cookie(key, quote((opts.get("value") or "").encode('utf-8')),
expires=opts.get("expires"))
# expires yesterday!
expires = datetime.datetime.now() + datetime.timedelta(days=-1)
for key in set(self.to_delete):
response.set_cookie(key, "", expires=expires)
def _update_password(user, password):
frappe.db.sql("""insert into __Auth (user, `password`)
values (%s, password(%s))
on duplicate key update `password`=password(%s)""", (user,
password, password))
@frappe.whitelist()
def get_logged_user():
return frappe.session.user
def clear_cookies():
if hasattr(frappe.local, "session"):
frappe.session.sid = ""
frappe.local.cookie_manager.delete_cookie(["full_name", "user_id", "sid", "user_image", "system_user"])
|
|
import datetime
from exchangelib.errors import ErrorInvalidOperation, ErrorItemNotFound, ErrorMissingInformationReferenceItemId
from exchangelib.ewsdatetime import UTC
from exchangelib.fields import MONDAY, NOVEMBER, THIRD, WEDNESDAY, WEEK_DAY, WEEKEND_DAY
from exchangelib.folders import Calendar
from exchangelib.items import BulkCreateResult, CalendarItem
from exchangelib.items.calendar_item import EXCEPTION, OCCURRENCE, RECURRING_MASTER, SINGLE, AcceptItem, MeetingRequest
from exchangelib.recurrence import (
AbsoluteMonthlyPattern,
AbsoluteYearlyPattern,
DailyPattern,
DeletedOccurrence,
FirstOccurrence,
LastOccurrence,
Occurrence,
Recurrence,
RelativeMonthlyPattern,
RelativeYearlyPattern,
WeeklyPattern,
)
from exchangelib.version import EXCHANGE_2007, Version
from ..common import get_random_date, get_random_datetime_range, get_random_string
from .test_basics import CommonItemTest
class CalendarTest(CommonItemTest):
TEST_FOLDER = "calendar"
FOLDER_CLASS = Calendar
ITEM_CLASS = CalendarItem
def match_cat(self, i):
if isinstance(i, Exception):
return False
return set(i.categories or []) == set(self.categories)
def test_cancel(self):
item = self.get_test_item().save()
res = item.cancel() # Returns (id, changekey) of cancelled item
self.assertIsInstance(res, BulkCreateResult)
with self.assertRaises(ErrorItemNotFound):
# Item is already cancelled
item.cancel()
def test_updating_timestamps(self):
# Test that we can update an item without changing anything, and maintain the hidden timezone fields as local
# timezones, and that returned timestamps are in UTC.
item = self.get_test_item()
item.reminder_is_set = True
item.is_all_day = False
item.recurrence = None
item.save()
item.refresh()
self.assertEqual(item.type, SINGLE)
for i in self.account.calendar.filter(categories__contains=self.categories).only("start", "end", "categories"):
self.assertEqual(i.start, item.start)
self.assertEqual(i.start.tzinfo, UTC)
self.assertEqual(i.end, item.end)
self.assertEqual(i.end.tzinfo, UTC)
self.assertEqual(i._start_timezone, self.account.default_timezone)
self.assertEqual(i._end_timezone, self.account.default_timezone)
i.save(update_fields=["start", "end"])
self.assertEqual(i.start, item.start)
self.assertEqual(i.start.tzinfo, UTC)
self.assertEqual(i.end, item.end)
self.assertEqual(i.end.tzinfo, UTC)
self.assertEqual(i._start_timezone, self.account.default_timezone)
self.assertEqual(i._end_timezone, self.account.default_timezone)
for i in self.account.calendar.filter(categories__contains=self.categories).only("start", "end", "categories"):
self.assertEqual(i.start, item.start)
self.assertEqual(i.start.tzinfo, UTC)
self.assertEqual(i.end, item.end)
self.assertEqual(i.end.tzinfo, UTC)
self.assertEqual(i._start_timezone, self.account.default_timezone)
self.assertEqual(i._end_timezone, self.account.default_timezone)
i.delete()
def test_update_to_non_utc_datetime(self):
# Test updating with non-UTC datetime values. This is a separate code path in UpdateItem code
item = self.get_test_item()
item.reminder_is_set = True
item.is_all_day = False
item.save()
# Update start, end and recurrence with timezoned datetimes. For some reason, EWS throws
# 'ErrorOccurrenceTimeSpanTooBig' is we go back in time.
start = get_random_date(start_date=item.start.date() + datetime.timedelta(days=1))
dt_start, dt_end = [
dt.astimezone(self.account.default_timezone)
for dt in get_random_datetime_range(start_date=start, end_date=start, tz=self.account.default_timezone)
]
item.start, item.end = dt_start, dt_end
item.recurrence.boundary.start = dt_start.date()
item.save()
item.refresh()
self.assertEqual(item.start, dt_start)
self.assertEqual(item.end, dt_end)
def test_all_day_datetimes(self):
# Test that we can use plain dates for start and end values for all-day items
start = get_random_date()
start_dt, end_dt = get_random_datetime_range(
start_date=start, end_date=start + datetime.timedelta(days=365), tz=self.account.default_timezone
)
# Assign datetimes for start and end
item = self.ITEM_CLASS(
folder=self.test_folder, start=start_dt, end=end_dt, is_all_day=True, categories=self.categories
).save()
# Returned item start and end values should be EWSDate instances
item = self.test_folder.all().only("is_all_day", "start", "end").get(id=item.id, changekey=item.changekey)
self.assertEqual(item.is_all_day, True)
self.assertEqual(item.start, start_dt.date())
self.assertEqual(item.end, end_dt.date())
item.save() # Make sure we can update
item.delete()
# We are also allowed to assign plain dates as values for all-day items
item = self.ITEM_CLASS(
folder=self.test_folder,
start=start_dt.date(),
end=end_dt.date(),
is_all_day=True,
categories=self.categories,
).save()
# Returned item start and end values should be EWSDate instances
item = self.test_folder.all().only("is_all_day", "start", "end").get(id=item.id, changekey=item.changekey)
self.assertEqual(item.is_all_day, True)
self.assertEqual(item.start, start_dt.date())
self.assertEqual(item.end, end_dt.date())
item.save() # Make sure we can update
def test_view(self):
item1 = self.ITEM_CLASS(
account=self.account,
folder=self.test_folder,
subject=get_random_string(16),
start=datetime.datetime(2016, 1, 1, 8, tzinfo=self.account.default_timezone),
end=datetime.datetime(2016, 1, 1, 10, tzinfo=self.account.default_timezone),
categories=self.categories,
)
item2 = self.ITEM_CLASS(
account=self.account,
folder=self.test_folder,
subject=get_random_string(16),
start=datetime.datetime(2016, 2, 1, 8, tzinfo=self.account.default_timezone),
end=datetime.datetime(2016, 2, 1, 10, tzinfo=self.account.default_timezone),
categories=self.categories,
)
self.test_folder.bulk_create(items=[item1, item2])
qs = self.test_folder.view(start=item1.start, end=item2.end)
# Test missing args
with self.assertRaises(TypeError):
self.test_folder.view()
# Test bad args
with self.assertRaises(ValueError):
list(self.test_folder.view(start=item1.end, end=item1.start))
with self.assertRaises(TypeError):
list(self.test_folder.view(start="xxx", end=item1.end))
with self.assertRaises(ValueError):
list(self.test_folder.view(start=item1.start, end=item1.end, max_items=0))
# Test dates
self.assertEqual(
len([i for i in self.test_folder.view(start=item1.start, end=item1.end) if self.match_cat(i)]), 1
)
self.assertEqual(
len([i for i in self.test_folder.view(start=item1.start, end=item2.end) if self.match_cat(i)]), 2
)
# Edge cases. Get view from end of item1 to start of item2. Should logically return 0 items, but Exchange wants
# it differently and returns item1 even though there is no overlap.
self.assertEqual(
len([i for i in self.test_folder.view(start=item1.end, end=item2.start) if self.match_cat(i)]), 1
)
self.assertEqual(
len([i for i in self.test_folder.view(start=item1.start, end=item2.start) if self.match_cat(i)]), 1
)
# Test max_items
self.assertEqual(
len(
[
i
for i in self.test_folder.view(start=item1.start, end=item2.end, max_items=9999)
if self.match_cat(i)
]
),
2,
)
self.assertEqual(self.test_folder.view(start=item1.start, end=item2.end, max_items=1).count(), 1)
# Test client-side ordering
self.assertListEqual(
[i.subject for i in qs.order_by("subject") if self.match_cat(i)], sorted([item1.subject, item2.subject])
)
# Test client-side ordering on a field with no default value and no default value_cls value
self.assertListEqual([i.start for i in qs.order_by("-start") if self.match_cat(i)], [item2.start, item1.start])
# Test client-side ordering on multiple fields. Intentionally sort first on a field where values are equal,
# to see that we can sort on the 2nd field.
self.assertListEqual(
[i.start for i in qs.order_by("categories", "-start") if self.match_cat(i)], [item2.start, item1.start]
)
# Test chaining
self.assertTrue(qs.count() >= 2)
with self.assertRaises(ErrorInvalidOperation):
qs.filter(subject=item1.subject).count() # EWS does not allow restrictions
self.assertListEqual(
[i for i in qs.order_by("subject").values("subject") if i["subject"] in (item1.subject, item2.subject)],
[{"subject": s} for s in sorted([item1.subject, item2.subject])],
)
def test_client_side_ordering_on_mixed_all_day_and_normal(self):
# Test that client-side ordering on start and end fields works for items that are a mix of normal an all-day
# items. This requires us to compare datetime.datetime -> EWSDate values which is not allowed by default
# (EWSDate -> datetime.datetime *is* allowed).
start = datetime.datetime(2016, 1, 1, 8, tzinfo=self.account.default_timezone)
end = datetime.datetime(2016, 1, 1, 10, tzinfo=self.account.default_timezone)
all_day_date = (start - datetime.timedelta(days=1)).date()
item1 = self.ITEM_CLASS(
account=self.account,
folder=self.test_folder,
subject=get_random_string(16),
start=all_day_date,
end=all_day_date,
is_all_day=True,
categories=self.categories,
)
item2 = self.ITEM_CLASS(
account=self.account,
folder=self.test_folder,
subject=get_random_string(16),
start=start,
end=end,
categories=self.categories,
)
self.test_folder.bulk_create(items=[item1, item2])
list(self.test_folder.view(start=start - datetime.timedelta(days=1), end=end).order_by("start"))
list(self.test_folder.view(start=start - datetime.timedelta(days=1), end=end).order_by("-start"))
# Test that client-side ordering on non-selected fields works
list(self.test_folder.view(start=start - datetime.timedelta(days=1), end=end).only("end").order_by("start"))
list(self.test_folder.view(start=start - datetime.timedelta(days=1), end=end).only("end").order_by("-start"))
def test_all_recurring_pattern_types(self):
start = datetime.datetime(2016, 1, 1, 8, tzinfo=self.account.default_timezone)
end = datetime.datetime(2016, 1, 1, 10, tzinfo=self.account.default_timezone)
for pattern in (
AbsoluteYearlyPattern(day_of_month=13, month=NOVEMBER),
RelativeYearlyPattern(weekday=1, week_number=THIRD, month=11),
RelativeYearlyPattern(weekday=WEEKEND_DAY, week_number=3, month=11),
AbsoluteMonthlyPattern(interval=3, day_of_month=13),
RelativeMonthlyPattern(interval=3, weekday=2, week_number=3),
RelativeMonthlyPattern(interval=3, weekday=WEEK_DAY, week_number=3),
WeeklyPattern(interval=3, weekdays=[MONDAY, WEDNESDAY], first_day_of_week=1),
DailyPattern(interval=1),
):
master_item = self.ITEM_CLASS(
folder=self.test_folder,
start=start,
end=end,
recurrence=Recurrence(pattern=pattern, start=start.date(), number=4),
categories=self.categories,
).save()
master_item.refresh()
self.assertEqual(pattern, master_item.recurrence.pattern)
master_item.delete()
def test_recurring_item(self):
# Create a recurring calendar item. Test that occurrence fields are correct on the master item
# Create a master item with 4 daily occurrences from 8:00 to 10:00. 'start' and 'end' are values for the first
# occurrence.
start = datetime.datetime(2016, 1, 1, 8, tzinfo=self.account.default_timezone)
end = datetime.datetime(2016, 1, 1, 10, tzinfo=self.account.default_timezone)
master_item = self.ITEM_CLASS(
folder=self.test_folder,
start=start,
end=end,
recurrence=Recurrence(pattern=DailyPattern(interval=1), start=start.date(), number=4),
categories=self.categories,
).save()
master_item.refresh()
self.assertEqual(master_item.is_recurring, False)
self.assertEqual(master_item.type, RECURRING_MASTER)
self.assertIsInstance(master_item.first_occurrence, FirstOccurrence)
self.assertEqual(master_item.first_occurrence.start, start)
self.assertEqual(master_item.first_occurrence.end, end)
self.assertIsInstance(master_item.last_occurrence, LastOccurrence)
self.assertEqual(master_item.last_occurrence.start, start + datetime.timedelta(days=3))
self.assertEqual(master_item.last_occurrence.end, end + datetime.timedelta(days=3))
self.assertEqual(master_item.modified_occurrences, None)
self.assertEqual(master_item.deleted_occurrences, None)
# Test occurrences as full calendar items, unfolded from the master
range_start, range_end = start, end + datetime.timedelta(days=3)
unfolded = [i for i in self.test_folder.view(start=range_start, end=range_end) if self.match_cat(i)]
self.assertEqual(len(unfolded), 4)
for item in unfolded:
self.assertEqual(item.type, OCCURRENCE)
self.assertEqual(item.is_recurring, True)
first_occurrence = unfolded[0]
self.assertEqual(first_occurrence.id, master_item.first_occurrence.id)
self.assertEqual(first_occurrence.start, master_item.first_occurrence.start)
self.assertEqual(first_occurrence.end, master_item.first_occurrence.end)
second_occurrence = unfolded[1]
self.assertEqual(second_occurrence.start, master_item.start + datetime.timedelta(days=1))
self.assertEqual(second_occurrence.end, master_item.end + datetime.timedelta(days=1))
third_occurrence = unfolded[2]
self.assertEqual(third_occurrence.start, master_item.start + datetime.timedelta(days=2))
self.assertEqual(third_occurrence.end, master_item.end + datetime.timedelta(days=2))
last_occurrence = unfolded[3]
self.assertEqual(last_occurrence.id, master_item.last_occurrence.id)
self.assertEqual(last_occurrence.start, master_item.last_occurrence.start)
self.assertEqual(last_occurrence.end, master_item.last_occurrence.end)
def test_change_occurrence(self):
# Test that we can make changes to individual occurrences and see the effect on the master item.
start = datetime.datetime(2016, 1, 1, 8, tzinfo=self.account.default_timezone)
end = datetime.datetime(2016, 1, 1, 10, tzinfo=self.account.default_timezone)
master_item = self.ITEM_CLASS(
folder=self.test_folder,
start=start,
end=end,
recurrence=Recurrence(pattern=DailyPattern(interval=1), start=start.date(), number=4),
categories=self.categories,
).save()
master_item.refresh()
# Test occurrences as full calendar items, unfolded from the master
range_start, range_end = start, end + datetime.timedelta(days=3)
unfolded = [i for i in self.test_folder.view(start=range_start, end=range_end) if self.match_cat(i)]
# Change the start and end of the second occurrence
second_occurrence = unfolded[1]
second_occurrence.start += datetime.timedelta(hours=1)
second_occurrence.end += datetime.timedelta(hours=1)
second_occurrence.save()
# Test change on the master item
master_item.refresh()
self.assertEqual(len(master_item.modified_occurrences), 1)
modified_occurrence = master_item.modified_occurrences[0]
self.assertIsInstance(modified_occurrence, Occurrence)
self.assertEqual(modified_occurrence.id, second_occurrence.id)
self.assertEqual(modified_occurrence.start, second_occurrence.start)
self.assertEqual(modified_occurrence.end, second_occurrence.end)
self.assertEqual(modified_occurrence.original_start, second_occurrence.start - datetime.timedelta(hours=1))
self.assertEqual(master_item.deleted_occurrences, None)
# Test change on the unfolded item
unfolded = [i for i in self.test_folder.view(start=range_start, end=range_end) if self.match_cat(i)]
self.assertEqual(len(unfolded), 4)
self.assertEqual(unfolded[1].type, EXCEPTION)
self.assertEqual(unfolded[1].start, second_occurrence.start)
self.assertEqual(unfolded[1].end, second_occurrence.end)
self.assertEqual(unfolded[1].original_start, second_occurrence.start - datetime.timedelta(hours=1))
def test_delete_occurrence(self):
# Test that we can delete an occurrence and see the cange on the master item
start = datetime.datetime(2016, 1, 1, 8, tzinfo=self.account.default_timezone)
end = datetime.datetime(2016, 1, 1, 10, tzinfo=self.account.default_timezone)
master_item = self.ITEM_CLASS(
folder=self.test_folder,
start=start,
end=end,
recurrence=Recurrence(pattern=DailyPattern(interval=1), start=start.date(), number=4),
categories=self.categories,
).save()
master_item.refresh()
# Test occurrences as full calendar items, unfolded from the master
range_start, range_end = start, end + datetime.timedelta(days=3)
unfolded = [i for i in self.test_folder.view(start=range_start, end=range_end) if self.match_cat(i)]
# Delete the third occurrence
third_occurrence = unfolded[2]
third_occurrence.delete()
# Test change on the master item
master_item.refresh()
self.assertEqual(master_item.modified_occurrences, None)
self.assertEqual(len(master_item.deleted_occurrences), 1)
deleted_occurrence = master_item.deleted_occurrences[0]
self.assertIsInstance(deleted_occurrence, DeletedOccurrence)
self.assertEqual(deleted_occurrence.start, third_occurrence.start)
# Test change on the unfolded items
unfolded = [i for i in self.test_folder.view(start=range_start, end=range_end) if self.match_cat(i)]
self.assertEqual(len(unfolded), 3)
def test_change_occurrence_via_index(self):
# Test updating occurrences via occurrence index without knowing the ID of the occurrence.
start = datetime.datetime(2016, 1, 1, 8, tzinfo=self.account.default_timezone)
end = datetime.datetime(2016, 1, 1, 10, tzinfo=self.account.default_timezone)
master_item = self.ITEM_CLASS(
folder=self.test_folder,
start=start,
end=end,
subject=get_random_string(16),
recurrence=Recurrence(pattern=DailyPattern(interval=1), start=start.date(), number=4),
categories=self.categories,
).save()
# Change the start and end of the second occurrence
second_occurrence = master_item.occurrence(index=2)
second_occurrence.start = start + datetime.timedelta(days=1, hours=1)
second_occurrence.end = end + datetime.timedelta(days=1, hours=1)
second_occurrence.save(update_fields=["start", "end"]) # Test that UpdateItem works with only a few fields
second_occurrence = master_item.occurrence(index=2)
second_occurrence.refresh()
self.assertEqual(second_occurrence.subject, master_item.subject)
second_occurrence.start += datetime.timedelta(hours=1)
second_occurrence.end += datetime.timedelta(hours=1)
second_occurrence.save(update_fields=["start", "end"]) # Test that UpdateItem works after refresh
# Test change on the master item
master_item.refresh()
self.assertEqual(len(master_item.modified_occurrences), 1)
modified_occurrence = master_item.modified_occurrences[0]
self.assertIsInstance(modified_occurrence, Occurrence)
self.assertEqual(modified_occurrence.id, second_occurrence.id)
self.assertEqual(modified_occurrence.start, second_occurrence.start)
self.assertEqual(modified_occurrence.end, second_occurrence.end)
self.assertEqual(modified_occurrence.original_start, second_occurrence.start - datetime.timedelta(hours=2))
self.assertEqual(master_item.deleted_occurrences, None)
def test_delete_occurrence_via_index(self):
# Test deleting occurrences via occurrence index without knowing the ID of the occurrence.
start = datetime.datetime(2016, 1, 1, 8, tzinfo=self.account.default_timezone)
end = datetime.datetime(2016, 1, 1, 10, tzinfo=self.account.default_timezone)
master_item = self.ITEM_CLASS(
folder=self.test_folder,
start=start,
end=end,
subject=get_random_string(16),
recurrence=Recurrence(pattern=DailyPattern(interval=1), start=start.date(), number=4),
categories=self.categories,
).save()
# Delete the third occurrence
third_occurrence = master_item.occurrence(index=3)
third_occurrence.refresh() # Test that GetItem works
third_occurrence = master_item.occurrence(index=3)
third_occurrence.delete() # Test that DeleteItem works
# Test change on the master item
master_item.refresh()
self.assertEqual(master_item.modified_occurrences, None)
self.assertEqual(len(master_item.deleted_occurrences), 1)
deleted_occurrence = master_item.deleted_occurrences[0]
self.assertIsInstance(deleted_occurrence, DeletedOccurrence)
self.assertEqual(deleted_occurrence.start, start + datetime.timedelta(days=2))
def test_get_master_recurrence(self):
# Test getting the master recurrence via an occurrence
start = datetime.datetime(2016, 1, 1, 8, tzinfo=self.account.default_timezone)
end = datetime.datetime(2016, 1, 1, 10, tzinfo=self.account.default_timezone)
recurrence = Recurrence(pattern=DailyPattern(interval=1), start=start.date(), number=4)
master_item = self.ITEM_CLASS(
folder=self.test_folder,
start=start,
end=end,
subject=get_random_string(16),
recurrence=recurrence,
categories=self.categories,
).save()
# Get the master from an occurrence
range_start, range_end = start, end + datetime.timedelta(days=3)
unfolded = [i for i in self.test_folder.view(start=range_start, end=range_end) if self.match_cat(i)]
third_occurrence = unfolded[2]
self.assertEqual(third_occurrence.recurrence, None)
master_from_occurrence = third_occurrence.recurring_master()
master_from_occurrence.refresh() # Test that GetItem works
self.assertEqual(master_from_occurrence.recurrence, recurrence)
self.assertEqual(master_from_occurrence.subject, master_item.subject)
master_from_occurrence = third_occurrence.recurring_master()
master_from_occurrence.subject = get_random_string(16)
master_from_occurrence.save(update_fields=["subject"]) # Test that UpdateItem works
master_from_occurrence.delete() # Test that DeleteItem works
with self.assertRaises(ErrorItemNotFound):
master_item.delete() # Item is gone from the server, so this should fail
with self.assertRaises(ErrorItemNotFound):
third_occurrence.delete() # Item is gone from the server, so this should fail
def test_invalid_updateitem_items(self):
# Test here because CalendarItem is the only item that has a requiref field with no default
item = self.get_test_item().save()
with self.assertRaises(ValueError) as e:
self.account.bulk_update([(item, [])])
self.assertEqual(e.exception.args[0], "'fieldnames' must not be empty")
# Test a field that has is_required=True
start = item.start
item.start = None
with self.assertRaises(ValueError) as e:
self.account.bulk_update([(item, ["start"])])
self.assertEqual(e.exception.args[0], "'start' is a required field with no default")
item.start = start
# Test a field that has is_required_after_safe=True
uid = item.uid
item.uid = None
with self.assertRaises(ValueError) as e:
self.account.bulk_update([(item, ["uid"])])
self.assertEqual(e.exception.args[0], "'uid' is a required field and may not be deleted")
item.uid = uid
item.is_meeting = None
with self.assertRaises(ValueError) as e:
self.account.bulk_update([(item, ["is_meeting"])])
self.assertEqual(e.exception.args[0], "'is_meeting' is a read-only field")
def test_meeting_request(self):
# The test server only has one account so we cannot test meeting invitations
with self.assertRaises(ValueError) as e:
MeetingRequest(account=self.account).accept()
self.assertEqual(e.exception.args[0], "'id' is a required field with no default")
with self.assertRaises(ValueError) as e:
MeetingRequest(account=self.account).decline()
self.assertEqual(e.exception.args[0], "'id' is a required field with no default")
with self.assertRaises(ValueError) as e:
MeetingRequest(account=self.account).tentatively_accept()
self.assertEqual(e.exception.args[0], "'id' is a required field with no default")
with self.assertRaises(ErrorMissingInformationReferenceItemId) as e:
AcceptItem(account=self.account).send()
def test_clean(self):
start = get_random_date()
start_dt, end_dt = get_random_datetime_range(
start_date=start, end_date=start + datetime.timedelta(days=365), tz=self.account.default_timezone
)
with self.assertRaises(ValueError) as e:
CalendarItem(start=end_dt, end=start_dt).clean(version=self.account.version)
self.assertIn("'end' must be greater than 'start'", e.exception.args[0])
item = CalendarItem(start=start_dt, end=end_dt)
item.clean(version=Version(EXCHANGE_2007))
self.assertEqual(item._meeting_timezone, start_dt.tzinfo)
self.assertEqual(item._start_timezone, None)
self.assertEqual(item._end_timezone, None)
def test_tz_field_for_field_name(self):
self.assertEqual(
CalendarItem(account=self.account).tz_field_for_field_name("start").name,
"_start_timezone",
)
self.assertEqual(
CalendarItem(account=self.account).tz_field_for_field_name("end").name,
"_end_timezone",
)
account = self.get_account()
account.version = Version(EXCHANGE_2007)
self.assertEqual(
CalendarItem(account=account).tz_field_for_field_name("start").name,
"_meeting_timezone",
)
self.assertEqual(
CalendarItem(account=account).tz_field_for_field_name("end").name,
"_meeting_timezone",
)
|
|
'''created by Yuriy Sivalnev. Its free!!! Totally...i mean it'''
'''
----------------------------------------------------------------------------------------
INSTALLATION:
put in is scripts folder(C:\Users\__SOME_NAME__\Documents\maya\2015-x64\scripts)
----------------------------------------------------------------------------------------
Type in Script Editor(in PYTHON tab) to paint or tweak curves:
import ysvCurveTweakerCtx
ysvCurveTweakerCtx.paintCtx('ysvCurveTweaker').run()
and press CTRL+ENTER
OR for ui to set step for curves CVs:
import ysvCurveTweakerCtx
ysvCurveTweakerCtx.UI().create()
----------------------------------------------------------------------------------------
USAGE:
CTRL_SHIFT_ALT LMB: unselect selected(you need this to work whis another curve)
You dont need to select curve whith select tool, tool will work at any curve that is on screen(and project on any poly that is on screen)
LMB click-drag :
for paint curve on the virtual plane that has current camera "point of interest" and parallel to view plane
if start or/and end curve is on top of some poly, cvs will snap to that poly object
Ctrl+LMB click-drag:
paint on poly surface(no need to select or make leave!!!!)
Shift+LMB click drag:
smooth curve from start click to release mouse button(you just mark cv on which start cmooth operation and end cv - it is NOT BRUSH)
CTRL+Shift+ LMB: same effect but much stronger
MMB : tweak curve CV that is closest to cursor
Ctrl MMB: bigger radius(like soft select effect)
Shift MMB: even bigger radius
Ctrl Shift MMB: radius that equal crvLen/2
Ctrl Shift Alt MMB: snap curve ends to closest polys(like them is live)
I guess thats all, happy tweaking!!!
'''
import maya.cmds as mc
import maya.OpenMaya as om
import maya.OpenMayaUI as omui
import math
from pymel.core import *
import pymel.core.datatypes as dt
import pymel.core.nodetypes as nt
def getMods():
mods = getModifiers()
Ctrl, Alt, Shift, Wnd = 0, 0, 0, 0
if (mods & 1) > 0: Shift = 1
if (mods & 4) > 0: Ctrl = 1
if (mods & 8) > 0: Alt = 1
if (mods & 16): Wnd = 1
return Ctrl, Alt, Shift
class UI():
def __init__(self):
self.winName = "ysvPaintCurveOnPoly"
self.winTitle = "Paint curve on poly UI"
def create(self):
if not optionVar(ex='ysvPaintCurveStep'):
optionVar(fv=('ysvPaintCurveStep', 1.0))
if window(self.winName, exists=True):
deleteUI(self.winName)
with window(self.winName, title=self.winTitle):
with columnLayout():
stepVal = optionVar(q='ysvPaintCurveStep')
self.stepSlider = floatSliderGrp(label='Step : ', field=1, columnWidth=(1, 40), min=0, max=1000, fmx=20, value=stepVal, pre=2, cc=Callback(self.setStep))
showWindow(self.winName)
def setStep(self):
value = floatSliderGrp(self.stepSlider, q=1, v=1)
optionVar(fv=('ysvPaintCurveStep', value))
class baseDraggerCtx():
def __init__(self, ctxName):
self.initSel = ls(sl=1)
self.initInViewObjs = getInVewObjs()
liveMeshes = ls(lv=1, dag=1, et=nt.Mesh, ni=1)
if liveMeshes: self.liveMesh = liveMeshes[0]
else: self.liveMesh = None
self.ctxName = ctxName
if draggerContext(self.ctxName, ex=1):
deleteUI(self.ctxName)
draggerContext(ctxName,
ppc = self.prePress, pc=self.onPress,
dragCommand=self.onDrag,
releaseCommand=self.onRelease,
finalize=self.finalize,
name = ctxName,
cursor='crossHair', undoMode='step')
#print 'context with name {0} created'.format(self.ctxName)
def prePress(self):
try:
self.currCam = PyNode(modelPanel(getPanel(wf=1), q=1, cam=1))
self.viewDir = self.currCam.viewDirection(space='world')
self.eyePnt = self.currCam.getEyePoint(space='world')
self.centerOfInterest = self.currCam.getWorldCenterOfInterest()
except:
inViewMessage(msg='error in prePress: Set focus in 3d viewPort', fade=1, fst=300)
def setCursorData(self, xScreen, yScreen):
self.cursorScreenCoords = (xScreen, yScreen)
self.cursorWPos, self.cursorWDir = viewToWorld(xScreen, yScreen)
def onPress(self):
xScreen, yScreen, dummy = draggerContext(self.ctxName, q=1, ap=1)
self.setCursorData(xScreen, yScreen)
self.btn = draggerContext(self.ctxName, q=1, bu=1)
self.mods = getMods()
def onHold(self):
pass
def onDrag(self):
xScreen, yScreen, dummy = draggerContext(self.ctxName, q=1, dp=1)
self.setCursorData(xScreen, yScreen)
def onRelease(self):
pass
def finalize(self):
pass
def run(self):
if draggerContext(self.ctxName, ex=1):
setToolTo(self.ctxName)
def optionsPopupMenu(self):
pass
class paintCtx(baseDraggerCtx):
def __init__(self, ctxName):
baseDraggerCtx.__init__(self, ctxName)
#modelEditor(getPanel(wf=1), e=1, xray=1)
modelEditor(getPanel(wf=1), e=1, nurbsCurves=1)
self.inMeshes = ls(sl=1, dag=1, et=nt.Mesh, ni=1) + ls(lv=1, dag=1, et=nt.Mesh, ni=1)
if not self.inMeshes:
self.inMeshes = ls(self.initInViewObjs, dag=1, et=nt.Mesh, ni=1)
self.meshFns = [mesh.__apimfn__() for mesh in self.inMeshes]
self.step = optionVar(q='ysvPaintCurveStep')
print 'in view objs: ', self.initInViewObjs
def planeIsect(self, planePnt, planeNormal):
rayLen = 10000
startL = dt.Point(self.cursorWPos)
endL = startL+dt.Vector(self.cursorWDir)*rayLen
return linePlaneIntersect(startL, endL, planePnt, planeNormal)
def paintOnPress(self):
self.startScreenWSPos = self.cursorWPos
pnt = self.planeIsect(self.centerOfInterest, self.viewDir)
self.crv = curve(p=[pnt])
self.crv.dispCV.set(1)
meshes = ls(getInVewObjs(), dag=1, et=nt.Mesh, ni=1)
self.meshFns = [mesh.__apimfn__() for mesh in meshes]
self.prevPnt = pnt
def paintOnDrag(self):
pnt = self.planeIsect(self.centerOfInterest, self.viewDir)
if pnt:
if (pnt - self.prevPnt).length() > self.step:
curve(self.crv, a=1, p=pnt)
self.prevPnt = pnt
def paintOnRelease(self):
sPnt = pointPosition(self.crv.cv[0])
xform(self.crv, sp=sPnt, rp=sPnt, a=1, ws=1)
self.endScreenWSPos = self.cursorWPos
crvLen = self.crv.length()
sCVPos = pointPosition(self.crv.cv[0])
eCVPos = pointPosition(self.crv.cv[-1])
sMeshHit = closestHitToMeshes(self.meshFns, self.startScreenWSPos, sCVPos - self.startScreenWSPos)
eMeshHit = closestHitToMeshes(self.meshFns, self.endScreenWSPos, eCVPos - self.endScreenWSPos)
if sMeshHit:
move(self.crv, sMeshHit - sCVPos, r=1, ws=1)
if eMeshHit:
setToolTo('moveSuperContext')
softSelect(e=1, sse=1, ssf=1, ssc = '1,0,2,0,1,2', ssd = crvLen/1.3)
select(self.crv.cv[-1])
move(eMeshHit, a=1, ws=1)
select(self.crv)
self.crv.dispCV.set(0)
softSelect(e=1, sse=0)
setToolTo(self.ctxName)
def paintOnPolyOnPress(self):
meshHit = closestHitToMeshes(self.meshFns, self.cursorWPos, self.cursorWDir)
if meshHit:
self.paintCrv = curve(p=(meshHit))
select(self.paintCrv)
self.paintCrv.dispCV.set(1)
self.prevHit = meshHit
def paintOnPolyOnDrag(self):
meshHit = closestHitToMeshes(self.meshFns, self.cursorWPos, self.cursorWDir)
try:
if meshHit and self.paintCrv:
if (meshHit - self.prevHit).length() > self.step:
curve(self.paintCrv, append=1, p=(meshHit))
self.prevHit = meshHit
except: pass
def getCVNearCursor(self, singleCurve = None):
xScr, yScr = self.cursorScreenCoords
scrPnt = dt.Point(xScr, yScr, 0)
currView = omui.M3dView.active3dView()
distances = []
if singleCurve:
curves = [singleCurve]
else:
curves = self.curves
for crv in curves:
for i in range(crv.numCVs()):
cvPos = pointPosition(crv.cv[i])
xu, yu = om.MScriptUtil(), om.MScriptUtil()
xPtr, yPtr = xu.asShortPtr(), yu.asShortPtr()
mPnt = om.MPoint(cvPos[0], cvPos[1], cvPos[2])
notClipped = currView.worldToView(mPnt, xPtr, yPtr)
if notClipped:
x = xu.getShort(xPtr)
y = yu.getShort(yPtr)
crvScrPnt = dt.Point(x, y, 0)
dist = (scrPnt - crvScrPnt).length()
distances.append([dist, crv, i])
if distances:
crv, cvId = min(distances, key = lambda x:x[0])[1:]
return crv.cv[cvId]
else:
return []
def smoothOpOnPress(self):
curves = ls(sl=1, dag=1, et=nt.NurbsCurve, ni=1)
if not curves:
self.curves = ls(getInVewObjs(), dag=1, et=nt.NurbsCurve, ni=1)
else:
self.curves = curves
self.startClickCv = self.getCVNearCursor()
select(self.startClickCv.node().getParent())
def smoothOpOnRelease(self, iterations):
crv = self.startClickCv.node()
self.endClickCv = self.getCVNearCursor(crv)
#select(self.endClickCv, add=1)
sId, eId = self.startClickCv.index(), self.endClickCv.index()
s = min(sId, eId)
e = max(sId, eId)
cvs = [cv for cv in crv.cv[s:e]]
pnts = [pointPosition(cv) for cv in cvs]
for i in range(iterations):
smoothCrvPoints(pnts)
for cv, pnt in zip(cvs, pnts):
move(cv, pnt, ws=1, a=1)
def getCVsWeights(self, crv, sId, midId, eId, radiusCount, lockEnds = True):
maxId = crv.numCVs()
weights = [0.0]*maxId
weights[midId] = 1.0
if radiusCount == 0:
if lockEnds:
weights[0] = 0
weights[-1] = 0
return weights
for i in range(1, radiusCount+1):
leftId, rightId = midId-i, midId+i
w = 1.0 - float(i)/(radiusCount+1)
if leftId > 0:
weights[leftId] = w
if rightId < maxId:
weights[rightId] = w
#print weights
if lockEnds:
weights[0] = 0
weights[-1] = 0
return weights
def moveOpOnPress(self, radius):
curves = ls(sl=1, dag=1, et=nt.NurbsCurve, ni=1)
if not curves:
self.curves = ls(getInVewObjs(), dag=1, et=nt.NurbsCurve, ni=1)
else:
self.curves = curves
self.startClickCv = self.getCVNearCursor()
self.crv = self.startClickCv.node()
select(self.crv.getParent())
midId = self.startClickCv.index()
numCVs = self.crv.numCVs()
if radius == "none":
radiusCount = 0
if radius == "short":
radiusCount = int(numCVs/6)
if radius == "mid":
radiusCount = int(numCVs/4)
if radius == "large":
radiusCount = int(numCVs/2)
sId = max(0, midId-radiusCount)
eId = min(numCVs-1, midId+radiusCount)
self.cvsToMove = [self.crv.cv[cvId] for cvId in range(sId, eId+1)] #self.crv.cv[sId:eId]
self.cvsPositions = [pointPosition(cv) for cv in self.cvsToMove]
self.cvWeights = self.getCVsWeights(self.crv, sId, midId, eId, radiusCount)
self.midCvWPos = pointPosition(self.startClickCv)
l0 = self.cursorWPos
l1 = l0 + self.cursorWDir * (self.midCvWPos - self.cursorWPos).length()*10
self.startPlaneProjPnt = linePlaneIntersect(l0, l1, self.midCvWPos, self.cursorWDir)
def moveOpOnDrag(self): #radius in range (0..numCVs/2)
l0 = self.cursorWPos
l1 = l0 + self.cursorWDir * (self.midCvWPos - self.cursorWPos).length()*10
dragPlaneProjPnt = linePlaneIntersect(l0, l1, self.midCvWPos, self.cursorWDir)
offsetVec = dragPlaneProjPnt - self.startPlaneProjPnt
for cv, pos in zip(self.cvsToMove, self.cvsPositions):
w = self.cvWeights[cv.index()]
move(cv, pos+offsetVec*w, ws=1, a=1)
#x, y = self.cursorScreenCoords
#print (x, y)
def moveEndsOpPress(self):
curves = ls(sl=1, dag=1, et=nt.NurbsCurve, ni=1)
if not curves:
self.curves = ls(getInVewObjs(), dag=1, et=nt.NurbsCurve, ni=1)
else:
self.curves = curves
self.startClickCv = self.getCVNearCursor()
self.moveEndsCurve = self.startClickCv.node()
select(self.moveEndsCurve.getParent())
clickCVId = self.startClickCv.index()
idChooserList = [ [self.moveEndsCurve.cv[0], clickCVId], [self.moveEndsCurve.cv[-1], self.moveEndsCurve.numCVs()-clickCVId] ]
self.closestCrvEnd = min(idChooserList, key = lambda x:x[1])[0]
#select(self.closestCrvEnd)
numCVs = self.moveEndsCurve.numCVs()
radiusCount = int(numCVs/1.3)
midId = self.closestCrvEnd.index()
sId = max(0, midId-radiusCount)
eId = min(numCVs-1, midId+radiusCount)
self.cvsToMove = [self.moveEndsCurve.cv[cvId] for cvId in range(sId, eId+1)] #self.crv.cv[sId:eId]
self.cvsPositions = [pointPosition(cv) for cv in self.cvsToMove]
self.cvWeights = self.getCVsWeights(self.moveEndsCurve, sId, midId, eId, radiusCount, False)
self.midCvWPos = pointPosition(self.startClickCv)
l0 = self.cursorWPos
l1 = l0 + self.cursorWDir * (self.midCvWPos - self.cursorWPos).length()*10
self.startPlaneProjPnt = linePlaneIntersect(l0, l1, self.midCvWPos, self.cursorWDir)
def moveEndsOpDrag(self):
meshHit = closestHitToMeshes(self.meshFns, self.cursorWPos, self.cursorWDir)
if meshHit:
offsetVec = meshHit - self.midCvWPos
else:
l0 = self.cursorWPos
l1 = l0 + self.cursorWDir * (self.midCvWPos - self.cursorWPos).length()*10
dragPlaneProjPnt = linePlaneIntersect(l0, l1, self.midCvWPos, self.cursorWDir)
offsetVec = dragPlaneProjPnt - self.startPlaneProjPnt
for cv, pos in zip(self.cvsToMove, self.cvsPositions):
w = self.cvWeights[cv.index()]
move(cv, pos+offsetVec*w, ws=1, a=1)
def moveEndsOpRelease(self):
select(self.moveEndsCurve)
def onPress(self):
baseDraggerCtx.onPress(self)
cntrl, alt, shift = self.mods
if self.btn ==1:
if not cntrl and not alt and not shift:
self.paintOnPress()
elif shift and not alt:
self.smoothOpOnPress()
elif cntrl and not shift and not alt:
self.paintOnPolyOnPress()
elif cntrl and shift and alt:
select(cl=1)
elif self.btn==2:
if not cntrl and not shift and not alt:
self.moveOpOnPress('none')
elif cntrl and not shift and not alt:
self.moveOpOnPress('short')
elif not cntrl and shift and not alt:
self.moveOpOnPress('mid')
elif cntrl and shift and not alt:
self.moveOpOnPress('large')
elif cntrl and shift and alt:
self.moveEndsOpPress()
def onDrag(self):
baseDraggerCtx.onDrag(self)
cntrl, alt, shift = self.mods
if self.btn ==1:
if not cntrl and not alt and not shift:
self.paintOnDrag()
elif cntrl and not shift and not alt:
self.paintOnPolyOnDrag()
#curve(self.crv, append=1, p=[pnt])
elif self.btn==2:
if not cntrl and not shift and not alt:
self.moveOpOnDrag()
elif cntrl and not shift and not alt:
self.moveOpOnDrag()
elif cntrl and shift and not alt:
self.moveOpOnDrag()
elif not cntrl and shift and not alt:
self.moveOpOnDrag()
elif cntrl and shift and alt:
self.moveEndsOpDrag()
mc.refresh(cv=True)
def onRelease(self):
#baseDraggerCtx.onRelease(self)
cntrl, alt, shift = self.mods
if self.btn == 1:
if not cntrl and not shift and not alt:
self.paintOnRelease()
elif shift and not cntrl and not alt:
self.smoothOpOnRelease(1)
elif shift and cntrl and not alt:
self.smoothOpOnRelease(7)
elif self.btn ==2:
if cntrl and shift and alt:
self.moveEndsOpRelease()
#elif not shift and cntrl:
#self.smoothOpOnRelease(7)
try: self.paintCrv.dispCV.set(0)
except: pass
def finalize(self):
#baseDraggerCtx.finalize(self)
#modelEditor(getPanel(wf=1), e=1, xray=0)
pass
def run(self):
baseDraggerCtx.run(self)
def getMods():
mods = getModifiers()
Ctrl, Alt, Shift, Wnd = 0, 0, 0, 0
if (mods & 1) > 0: Shift = 1
if (mods & 4) > 0: Ctrl = 1
if (mods & 8) > 0: Alt = 1
if (mods & 16): Wnd = 1
return Ctrl, Alt, Shift
def selectFromScreen():
select(cl=1)
try:
activeView = omui.M3dView.active3dView()
om.MGlobal.selectFromScreen(0,0,activeView.portWidth(),activeView.portHeight(),om.MGlobal.kReplaceList)
except:
inViewMessage(msg='Failed to select from screen(in ysvUtils.py)', fade=1, fst=500, pos='midCenter')
def getInVewObjs():
sel = ls(sl=1)
select(cl=1)
selectMode(o=1)
hilite(ls(hl=1), u=1)
try:
activeView = omui.M3dView.active3dView()
om.MGlobal.selectFromScreen(0,0,activeView.portWidth(),activeView.portHeight(),om.MGlobal.kReplaceList)
except:
inViewMessage(msg='Failed to select from screen', fade=1, fst=500, pos='midCenter')
result = ls(sl=1)
select(sel)
return result
def viewToWorld(xScreen, yScreen):
pnt, vec = om.MPoint(), om.MVector()
try: omui.M3dView().active3dView().viewToWorld(
int(xScreen), int(yScreen), pnt, vec)
except: pass
return dt.Point(pnt), dt.Vector(vec)
def getEulerRotationQuaternion(normal, upvector):
'''
returns the x,y,z degree angle rotation corresponding to a direction vector
input: upvector (MVector) & normal (MVector)
'''
upvector = om.MVector (upvector[0], upvector[1], upvector[2])
normalvector = om.MVector(normal[0], normal[1], normal[2])
quat = om.MQuaternion(upvector, normalvector)
quatAsEuler = quat.asEulerRotation()
return math.degrees(quatAsEuler.x), math.degrees(quatAsEuler.y), math.degrees(quatAsEuler.z)
def getCurrCam():
try:return PyNode(modelPanel(getPanel(wf=1), q=1, cam=1))
except:return None
def meshIntersect(meshFn, inPos, inDir):
# inMesh object
pos = om.MFloatPoint(inPos[0], inPos[1], inPos[2])
rayDir = om.MFloatVector(inDir[0], inDir[1], inDir[2])
hitPnt = om.MFloatPoint() # intersection
hitFace = om.MScriptUtil()
hitTri = om.MScriptUtil()
hitFace.createFromInt(0)
hitTri.createFromInt(0)
hFacePtr = hitFace.asIntPtr()
hTriPtr = hitTri.asIntPtr()
farclip = getCurrCam().getFarClipPlane()
# print 'getting intersection ',
try:
hit = meshFn.closestIntersection(pos, # RaySource,
rayDir, # rayDirection
None, # faceIds
None, # triIds
True, # idsSorted
om.MSpace.kWorld, # space
farclip, # maxParam
True, # testBothDirections
None, # accelParams
hitPnt, # hitPoint
None, # hitRayParam
hFacePtr, # hitFace
hTriPtr, # hitTriangle
None, # hitBary1
None) # hitBary2
except:
print 'ERROR: hit failed'
# raise
return hit, hitPnt # , hitFace.getInt(hFacePtr), hitTri.getInt(hTriPtr)
def closestHitToMeshes(meshFns, inPos, inDir):
meshHits = []
for meshFn in meshFns:
state, hit = meshIntersect(meshFn, inPos, inDir)
if state:
dist = (dt.Point(hit) - inPos).length()
meshHits.append([dist, dt.Point(hit)])
if meshHits:
return min(meshHits, key = lambda x: x[0])[1]
else:
return False
def linePlaneIntersect(linePnt0, linePnt1, planePnt, planeNormal, epsilon=0.00001):
lineNormal = linePnt1 - linePnt0
w = linePnt0 - planePnt
dot = planeNormal.dot(lineNormal)
if abs(dot) > epsilon:
factor = -planeNormal.dot(w)/dot
return linePnt0 + (linePnt1-linePnt0)*factor
else:
# The segment is parallel to plane
return None
def smoothCrvPoints(points):
for i in range(1, len(points)-1):
points[i] = points[i] * 0.4 + (points[i+1] + points[i-1]) * 0.3
|
|
from __future__ import absolute_import, division, print_function
import pytest
pytest.importorskip('numpy')
import dask.array as da
from dask.core import get_deps
from dask.context import set_options
import numpy as np
# temporary until numpy functions migrated
try:
from numpy import nanprod
except ImportError: # pragma: no cover
import dask.array.numpy_compat as npcompat
nanprod = npcompat.nanprod
def eq(a, b):
if isinstance(a, da.Array):
a = a.compute()
if isinstance(b, da.Array):
b = b.compute()
if isinstance(a, (np.generic, np.ndarray)):
return np.all(np.isclose(a, b, equal_nan=True))
else:
return a == b
def same_keys(a, b):
def key(k):
if isinstance(k, str):
return (k, -1, -1, -1)
else:
return k
return sorted(a.dask, key=key) == sorted(b.dask, key=key)
def reduction_1d_test(da_func, darr, np_func, narr, use_dtype=True, split_every=True):
assert eq(da_func(darr), np_func(narr))
assert eq(da_func(darr, keepdims=True), np_func(narr, keepdims=True))
assert same_keys(da_func(darr), da_func(darr))
assert same_keys(da_func(darr, keepdims=True), da_func(darr, keepdims=True))
if use_dtype:
assert eq(da_func(darr, dtype='f8'), np_func(narr, dtype='f8'))
assert eq(da_func(darr, dtype='i8'), np_func(narr, dtype='i8'))
assert same_keys(da_func(darr, dtype='i8'), da_func(darr, dtype='i8'))
if split_every:
a1 = da_func(darr, split_every=2)
a2 = da_func(darr, split_every={0: 2})
assert same_keys(a1, a2)
assert eq(a1, np_func(narr))
assert eq(a2, np_func(narr))
assert eq(da_func(darr, keepdims=True, split_every=2),
np_func(narr, keepdims=True))
@pytest.mark.parametrize('dtype', ['f4', 'i4'])
def test_reductions_1D(dtype):
x = np.arange(5).astype(dtype)
a = da.from_array(x, chunks=(2,))
reduction_1d_test(da.sum, a, np.sum, x)
reduction_1d_test(da.prod, a, np.prod, x)
reduction_1d_test(da.mean, a, np.mean, x)
reduction_1d_test(da.var, a, np.var, x)
reduction_1d_test(da.std, a, np.std, x)
reduction_1d_test(da.min, a, np.min, x, False)
reduction_1d_test(da.max, a, np.max, x, False)
reduction_1d_test(da.any, a, np.any, x, False)
reduction_1d_test(da.all, a, np.all, x, False)
reduction_1d_test(da.nansum, a, np.nansum, x)
reduction_1d_test(da.nanprod, a, nanprod, x)
reduction_1d_test(da.nanmean, a, np.mean, x)
reduction_1d_test(da.nanvar, a, np.var, x)
reduction_1d_test(da.nanstd, a, np.std, x)
reduction_1d_test(da.nanmin, a, np.nanmin, x, False)
reduction_1d_test(da.nanmax, a, np.nanmax, x, False)
def reduction_2d_test(da_func, darr, np_func, narr, use_dtype=True,
split_every=True):
assert eq(da_func(darr), np_func(narr))
assert eq(da_func(darr, keepdims=True), np_func(narr, keepdims=True))
assert eq(da_func(darr, axis=0), np_func(narr, axis=0))
assert eq(da_func(darr, axis=1), np_func(narr, axis=1))
assert eq(da_func(darr, axis=1, keepdims=True),
np_func(narr, axis=1, keepdims=True))
assert eq(da_func(darr, axis=(1, 0)), np_func(narr, axis=(1, 0)))
assert same_keys(da_func(darr, axis=1), da_func(darr, axis=1))
assert same_keys(da_func(darr, axis=(1, 0)), da_func(darr, axis=(1, 0)))
if use_dtype:
assert eq(da_func(darr, dtype='f8'), np_func(narr, dtype='f8'))
assert eq(da_func(darr, dtype='i8'), np_func(narr, dtype='i8'))
if split_every:
a1 = da_func(darr, split_every=4)
a2 = da_func(darr, split_every={0: 2, 1: 2})
assert same_keys(a1, a2)
assert eq(a1, np_func(narr))
assert eq(a2, np_func(narr))
assert eq(da_func(darr, keepdims=True, split_every=4),
np_func(narr, keepdims=True))
assert eq(da_func(darr, axis=0, split_every=2), np_func(narr, axis=0))
assert eq(da_func(darr, axis=0, keepdims=True, split_every=2),
np_func(narr, axis=0, keepdims=True))
assert eq(da_func(darr, axis=1, split_every=2), np_func(narr, axis=1))
assert eq(da_func(darr, axis=1, keepdims=True, split_every=2),
np_func(narr, axis=1, keepdims=True))
@pytest.mark.parametrize('dtype', ['f4', 'i4'])
def test_reductions_2D(dtype):
x = np.arange(1, 122).reshape((11, 11)).astype(dtype)
a = da.from_array(x, chunks=(4, 4))
b = a.sum(keepdims=True)
assert b._keys() == [[(b.name, 0, 0)]]
reduction_2d_test(da.sum, a, np.sum, x)
reduction_2d_test(da.prod, a, np.prod, x)
reduction_2d_test(da.mean, a, np.mean, x)
reduction_2d_test(da.var, a, np.var, x, False) # Difference in dtype algo
reduction_2d_test(da.std, a, np.std, x, False) # Difference in dtype algo
reduction_2d_test(da.min, a, np.min, x, False)
reduction_2d_test(da.max, a, np.max, x, False)
reduction_2d_test(da.any, a, np.any, x, False)
reduction_2d_test(da.all, a, np.all, x, False)
reduction_2d_test(da.nansum, a, np.nansum, x)
reduction_2d_test(da.nanprod, a, nanprod, x)
reduction_2d_test(da.nanmean, a, np.mean, x)
reduction_2d_test(da.nanvar, a, np.nanvar, x, False) # Difference in dtype algo
reduction_2d_test(da.nanstd, a, np.nanstd, x, False) # Difference in dtype algo
reduction_2d_test(da.nanmin, a, np.nanmin, x, False)
reduction_2d_test(da.nanmax, a, np.nanmax, x, False)
@pytest.mark.parametrize(['dfunc', 'func'],
[(da.argmin, np.argmin), (da.argmax, np.argmax),
(da.nanargmin, np.nanargmin), (da.nanargmax, np.nanargmax)])
def test_arg_reductions(dfunc, func):
x = np.random.random((10, 10, 10))
a = da.from_array(x, chunks=(3, 4, 5))
assert eq(dfunc(a), func(x))
assert eq(dfunc(a, 0), func(x, 0))
assert eq(dfunc(a, 1), func(x, 1))
assert eq(dfunc(a, 2), func(x, 2))
with set_options(split_every=2):
assert eq(dfunc(a), func(x))
assert eq(dfunc(a, 0), func(x, 0))
assert eq(dfunc(a, 1), func(x, 1))
assert eq(dfunc(a, 2), func(x, 2))
pytest.raises(ValueError, lambda: dfunc(a, 3))
pytest.raises(TypeError, lambda: dfunc(a, (0, 1)))
x2 = np.arange(10)
a2 = da.from_array(x2, chunks=3)
assert eq(dfunc(a2), func(x2))
assert eq(dfunc(a2, 0), func(x2, 0))
assert eq(dfunc(a2, 0, split_every=2), func(x2, 0))
@pytest.mark.parametrize(['dfunc', 'func'],
[(da.nanargmin, np.nanargmin), (da.nanargmax, np.nanargmax)])
def test_nanarg_reductions(dfunc, func):
x = np.random.random((10, 10, 10))
x[5] = np.nan
a = da.from_array(x, chunks=(3, 4, 5))
assert eq(dfunc(a), func(x))
assert eq(dfunc(a, 0), func(x, 0))
with pytest.raises(ValueError):
dfunc(a, 1).compute()
with pytest.raises(ValueError):
dfunc(a, 2).compute()
x[:] = np.nan
a = da.from_array(x, chunks=(3, 4, 5))
with pytest.raises(ValueError):
dfunc(a).compute()
def test_reductions_2D_nans():
# chunks are a mix of some/all/no NaNs
x = np.full((4, 4), np.nan)
x[:2, :2] = np.array([[1, 2], [3, 4]])
x[2, 2] = 5
x[3, 3] = 6
a = da.from_array(x, chunks=(2, 2))
reduction_2d_test(da.sum, a, np.sum, x, False, False)
reduction_2d_test(da.prod, a, np.prod, x, False, False)
reduction_2d_test(da.mean, a, np.mean, x, False, False)
reduction_2d_test(da.var, a, np.var, x, False, False)
reduction_2d_test(da.std, a, np.std, x, False, False)
reduction_2d_test(da.min, a, np.min, x, False, False)
reduction_2d_test(da.max, a, np.max, x, False, False)
reduction_2d_test(da.any, a, np.any, x, False, False)
reduction_2d_test(da.all, a, np.all, x, False, False)
reduction_2d_test(da.nansum, a, np.nansum, x, False, False)
reduction_2d_test(da.nanprod, a, nanprod, x, False, False)
reduction_2d_test(da.nanmean, a, np.nanmean, x, False, False)
reduction_2d_test(da.nanvar, a, np.nanvar, x, False, False)
reduction_2d_test(da.nanstd, a, np.nanstd, x, False, False)
reduction_2d_test(da.nanmin, a, np.nanmin, x, False, False)
reduction_2d_test(da.nanmax, a, np.nanmax, x, False, False)
assert eq(da.argmax(a), np.argmax(x))
assert eq(da.argmin(a), np.argmin(x))
assert eq(da.nanargmax(a), np.nanargmax(x))
assert eq(da.nanargmin(a), np.nanargmin(x))
assert eq(da.argmax(a, axis=0), np.argmax(x, axis=0))
assert eq(da.argmin(a, axis=0), np.argmin(x, axis=0))
assert eq(da.nanargmax(a, axis=0), np.nanargmax(x, axis=0))
assert eq(da.nanargmin(a, axis=0), np.nanargmin(x, axis=0))
assert eq(da.argmax(a, axis=1), np.argmax(x, axis=1))
assert eq(da.argmin(a, axis=1), np.argmin(x, axis=1))
assert eq(da.nanargmax(a, axis=1), np.nanargmax(x, axis=1))
assert eq(da.nanargmin(a, axis=1), np.nanargmin(x, axis=1))
def test_moment():
def moment(x, n, axis=None):
return ((x - x.mean(axis=axis, keepdims=True))**n).sum(
axis=axis)/np.ones_like(x).sum(axis=axis)
# Poorly conditioned
x = np.array([1., 2., 3.]*10).reshape((3, 10)) + 1e8
a = da.from_array(x, chunks=5)
assert eq(a.moment(2), moment(x, 2))
assert eq(a.moment(3), moment(x, 3))
assert eq(a.moment(4), moment(x, 4))
x = np.arange(1, 122).reshape((11, 11)).astype('f8')
a = da.from_array(x, chunks=(4, 4))
assert eq(a.moment(4, axis=1), moment(x, 4, axis=1))
assert eq(a.moment(4, axis=(1, 0)), moment(x, 4, axis=(1, 0)))
# Tree reduction
assert eq(a.moment(order=4, split_every=4), moment(x, 4))
assert eq(a.moment(order=4, axis=0, split_every=4), moment(x, 4, axis=0))
assert eq(a.moment(order=4, axis=1, split_every=4), moment(x, 4, axis=1))
def test_reductions_with_negative_axes():
x = np.random.random((4, 4, 4))
a = da.from_array(x, chunks=2)
assert eq(a.argmin(axis=-1), x.argmin(axis=-1))
assert eq(a.argmin(axis=-1, split_every=2), x.argmin(axis=-1))
assert eq(a.sum(axis=-1), x.sum(axis=-1))
assert eq(a.sum(axis=(0, -1)), x.sum(axis=(0, -1)))
def test_nan():
x = np.array([[1, np.nan, 3, 4],
[5, 6, 7, np.nan],
[9, 10, 11, 12]])
d = da.from_array(x, chunks=(2, 2))
assert eq(np.nansum(x), da.nansum(d))
assert eq(np.nansum(x, axis=0), da.nansum(d, axis=0))
assert eq(np.nanmean(x, axis=1), da.nanmean(d, axis=1))
assert eq(np.nanmin(x, axis=1), da.nanmin(d, axis=1))
assert eq(np.nanmax(x, axis=(0, 1)), da.nanmax(d, axis=(0, 1)))
assert eq(np.nanvar(x), da.nanvar(d))
assert eq(np.nanstd(x, axis=0), da.nanstd(d, axis=0))
assert eq(np.nanargmin(x, axis=0), da.nanargmin(d, axis=0))
assert eq(np.nanargmax(x, axis=0), da.nanargmax(d, axis=0))
assert eq(nanprod(x), da.nanprod(d))
def test_0d_array():
x = da.mean(da.ones(4, chunks=4), axis=0).compute()
y = np.mean(np.ones(4))
assert type(x) == type(y)
x = da.sum(da.zeros(4, chunks=1)).compute()
y = np.sum(np.zeros(4))
assert type(x) == type(y)
def test_reduction_on_scalar():
x = da.from_array(np.array(1.0), chunks=())
assert (x == x).all()
def assert_max_deps(x, n, eq=True):
dependencies, dependents = get_deps(x.dask)
if eq:
assert max(map(len, dependencies.values())) == n
else:
assert max(map(len, dependencies.values())) <= n
def test_tree_reduce_depth():
# 2D
x = da.from_array(np.arange(242).reshape((11, 22)), chunks=(3, 4))
thresh = {0: 2, 1: 3}
assert_max_deps(x.sum(split_every=thresh), 2 * 3)
assert_max_deps(x.sum(axis=0, split_every=thresh), 2)
assert_max_deps(x.sum(axis=1, split_every=thresh), 3)
assert_max_deps(x.sum(split_every=20), 20, False)
assert_max_deps(x.sum(axis=0, split_every=20), 4)
assert_max_deps(x.sum(axis=1, split_every=20), 6)
# 3D
x = da.from_array(np.arange(11 * 22 * 29).reshape((11, 22, 29)), chunks=(3, 4, 5))
thresh = {0: 2, 1: 3, 2: 4}
assert_max_deps(x.sum(split_every=thresh), 2 * 3 * 4)
assert_max_deps(x.sum(axis=0, split_every=thresh), 2)
assert_max_deps(x.sum(axis=1, split_every=thresh), 3)
assert_max_deps(x.sum(axis=2, split_every=thresh), 4)
assert_max_deps(x.sum(axis=(0, 1), split_every=thresh), 2 * 3)
assert_max_deps(x.sum(axis=(0, 2), split_every=thresh), 2 * 4)
assert_max_deps(x.sum(axis=(1, 2), split_every=thresh), 3 * 4)
assert_max_deps(x.sum(split_every=20), 20, False)
assert_max_deps(x.sum(axis=0, split_every=20), 4)
assert_max_deps(x.sum(axis=1, split_every=20), 6)
assert_max_deps(x.sum(axis=2, split_every=20), 6)
assert_max_deps(x.sum(axis=(0, 1), split_every=20), 20, False)
assert_max_deps(x.sum(axis=(0, 2), split_every=20), 20, False)
assert_max_deps(x.sum(axis=(1, 2), split_every=20), 20, False)
assert_max_deps(x.sum(axis=(0, 1), split_every=40), 4 * 6)
assert_max_deps(x.sum(axis=(0, 2), split_every=40), 4 * 6)
assert_max_deps(x.sum(axis=(1, 2), split_every=40), 6 * 6)
def test_tree_reduce_set_options():
x = da.from_array(np.arange(242).reshape((11, 22)), chunks=(3, 4))
with set_options(split_every={0: 2, 1: 3}):
assert_max_deps(x.sum(), 2 * 3)
assert_max_deps(x.sum(axis=0), 2)
def test_reduction_names():
x = da.ones(5, chunks=(2,))
assert x.sum().name.startswith('sum')
assert 'max' in x.max().name.split('-')[0]
assert x.var().name.startswith('var')
assert x.all().name.startswith('all')
assert any(k[0].startswith('nansum') for k in da.nansum(x).dask)
assert x.mean().name.startswith('mean')
|
|
"""
Field classes.
"""
from __future__ import absolute_import, unicode_literals
import copy
import datetime
import os
import re
import urlparse
from decimal import Decimal, DecimalException
from io import BytesIO
from django.core import validators
from django.core.exceptions import ValidationError
from django.forms.util import ErrorList, from_current_timezone, to_current_timezone
from django.forms.widgets import (TextInput, PasswordInput, HiddenInput,
MultipleHiddenInput, ClearableFileInput, CheckboxInput, Select,
NullBooleanSelect, SelectMultiple, DateInput, DateTimeInput, TimeInput,
SplitDateTimeWidget, SplitHiddenDateTimeWidget, FILE_INPUT_CONTRADICTION)
from django.utils import formats
from django.utils.encoding import smart_unicode, force_unicode
from django.utils.ipv6 import clean_ipv6_address
from django.utils.translation import ugettext_lazy as _
# Provide this import for backwards compatibility.
from django.core.validators import EMPTY_VALUES
__all__ = (
'Field', 'CharField', 'IntegerField',
'DateField', 'TimeField', 'DateTimeField', 'TimeField',
'RegexField', 'EmailField', 'FileField', 'ImageField', 'URLField',
'BooleanField', 'NullBooleanField', 'ChoiceField', 'MultipleChoiceField',
'ComboField', 'MultiValueField', 'FloatField', 'DecimalField',
'SplitDateTimeField', 'IPAddressField', 'GenericIPAddressField', 'FilePathField',
'SlugField', 'TypedChoiceField', 'TypedMultipleChoiceField'
)
class Field(object):
widget = TextInput # Default widget to use when rendering this type of Field.
hidden_widget = HiddenInput # Default widget to use when rendering this as "hidden".
default_validators = [] # Default set of validators
default_error_messages = {
'required': _('This field is required.'),
'invalid': _('Enter a valid value.'),
}
# Tracks each time a Field instance is created. Used to retain order.
creation_counter = 0
def __init__(self, required=True, widget=None, label=None, initial=None,
help_text=None, error_messages=None, show_hidden_initial=False,
validators=[], localize=False):
# required -- Boolean that specifies whether the field is required.
# True by default.
# widget -- A Widget class, or instance of a Widget class, that should
# be used for this Field when displaying it. Each Field has a
# default Widget that it'll use if you don't specify this. In
# most cases, the default widget is TextInput.
# label -- A verbose name for this field, for use in displaying this
# field in a form. By default, Django will use a "pretty"
# version of the form field name, if the Field is part of a
# Form.
# initial -- A value to use in this Field's initial display. This value
# is *not* used as a fallback if data isn't given.
# help_text -- An optional string to use as "help text" for this Field.
# error_messages -- An optional dictionary to override the default
# messages that the field will raise.
# show_hidden_initial -- Boolean that specifies if it is needed to render a
# hidden widget with initial value after widget.
# validators -- List of addtional validators to use
# localize -- Boolean that specifies if the field should be localized.
if label is not None:
label = smart_unicode(label)
self.required, self.label, self.initial = required, label, initial
self.show_hidden_initial = show_hidden_initial
if help_text is None:
self.help_text = ''
else:
self.help_text = smart_unicode(help_text)
widget = widget or self.widget
if isinstance(widget, type):
widget = widget()
# Trigger the localization machinery if needed.
self.localize = localize
if self.localize:
widget.is_localized = True
# Let the widget know whether it should display as required.
widget.is_required = self.required
# Hook into self.widget_attrs() for any Field-specific HTML attributes.
extra_attrs = self.widget_attrs(widget)
if extra_attrs:
widget.attrs.update(extra_attrs)
self.widget = widget
# Increase the creation counter, and save our local copy.
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, 'default_error_messages', {}))
messages.update(error_messages or {})
self.error_messages = messages
self.validators = self.default_validators + validators
def prepare_value(self, value):
return value
def to_python(self, value):
return value
def validate(self, value):
if value in validators.EMPTY_VALUES and self.required:
raise ValidationError(self.error_messages['required'])
def run_validators(self, value):
if value in validators.EMPTY_VALUES:
return
errors = []
for v in self.validators:
try:
v(value)
except ValidationError as e:
if hasattr(e, 'code') and e.code in self.error_messages:
message = self.error_messages[e.code]
if e.params:
message = message % e.params
errors.append(message)
else:
errors.extend(e.messages)
if errors:
raise ValidationError(errors)
def clean(self, value):
"""
Validates the given value and returns its "cleaned" value as an
appropriate Python object.
Raises ValidationError for any errors.
"""
value = self.to_python(value)
self.validate(value)
self.run_validators(value)
return value
def bound_data(self, data, initial):
"""
Return the value that should be shown for this field on render of a
bound form, given the submitted POST data for the field and the initial
data, if any.
For most fields, this will simply be data; FileFields need to handle it
a bit differently.
"""
return data
def widget_attrs(self, widget):
"""
Given a Widget instance (*not* a Widget class), returns a dictionary of
any HTML attributes that should be added to the Widget, based on this
Field.
"""
return {}
def __deepcopy__(self, memo):
result = copy.copy(self)
memo[id(self)] = result
result.widget = copy.deepcopy(self.widget, memo)
result.validators = self.validators[:]
return result
class CharField(Field):
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
self.max_length, self.min_length = max_length, min_length
super(CharField, self).__init__(*args, **kwargs)
if min_length is not None:
self.validators.append(validators.MinLengthValidator(min_length))
if max_length is not None:
self.validators.append(validators.MaxLengthValidator(max_length))
def to_python(self, value):
"Returns a Unicode object."
if value in validators.EMPTY_VALUES:
return ''
return smart_unicode(value)
def widget_attrs(self, widget):
attrs = super(CharField, self).widget_attrs(widget)
if self.max_length is not None and isinstance(widget, (TextInput, PasswordInput)):
# The HTML attribute is maxlength, not max_length.
attrs.update({'maxlength': str(self.max_length)})
return attrs
class IntegerField(Field):
default_error_messages = {
'invalid': _('Enter a whole number.'),
'max_value': _('Ensure this value is less than or equal to %(limit_value)s.'),
'min_value': _('Ensure this value is greater than or equal to %(limit_value)s.'),
}
def __init__(self, max_value=None, min_value=None, *args, **kwargs):
self.max_value, self.min_value = max_value, min_value
super(IntegerField, self).__init__(*args, **kwargs)
if max_value is not None:
self.validators.append(validators.MaxValueValidator(max_value))
if min_value is not None:
self.validators.append(validators.MinValueValidator(min_value))
def to_python(self, value):
"""
Validates that int() can be called on the input. Returns the result
of int(). Returns None for empty values.
"""
value = super(IntegerField, self).to_python(value)
if value in validators.EMPTY_VALUES:
return None
if self.localize:
value = formats.sanitize_separators(value)
try:
value = int(str(value))
except (ValueError, TypeError):
raise ValidationError(self.error_messages['invalid'])
return value
class FloatField(IntegerField):
default_error_messages = {
'invalid': _('Enter a number.'),
}
def to_python(self, value):
"""
Validates that float() can be called on the input. Returns the result
of float(). Returns None for empty values.
"""
value = super(IntegerField, self).to_python(value)
if value in validators.EMPTY_VALUES:
return None
if self.localize:
value = formats.sanitize_separators(value)
try:
value = float(value)
except (ValueError, TypeError):
raise ValidationError(self.error_messages['invalid'])
return value
class DecimalField(Field):
default_error_messages = {
'invalid': _('Enter a number.'),
'max_value': _('Ensure this value is less than or equal to %(limit_value)s.'),
'min_value': _('Ensure this value is greater than or equal to %(limit_value)s.'),
'max_digits': _('Ensure that there are no more than %s digits in total.'),
'max_decimal_places': _('Ensure that there are no more than %s decimal places.'),
'max_whole_digits': _('Ensure that there are no more than %s digits before the decimal point.')
}
def __init__(self, max_value=None, min_value=None, max_digits=None, decimal_places=None, *args, **kwargs):
self.max_value, self.min_value = max_value, min_value
self.max_digits, self.decimal_places = max_digits, decimal_places
Field.__init__(self, *args, **kwargs)
if max_value is not None:
self.validators.append(validators.MaxValueValidator(max_value))
if min_value is not None:
self.validators.append(validators.MinValueValidator(min_value))
def to_python(self, value):
"""
Validates that the input is a decimal number. Returns a Decimal
instance. Returns None for empty values. Ensures that there are no more
than max_digits in the number, and no more than decimal_places digits
after the decimal point.
"""
if value in validators.EMPTY_VALUES:
return None
if self.localize:
value = formats.sanitize_separators(value)
value = smart_unicode(value).strip()
try:
value = Decimal(value)
except DecimalException:
raise ValidationError(self.error_messages['invalid'])
return value
def validate(self, value):
super(DecimalField, self).validate(value)
if value in validators.EMPTY_VALUES:
return
# Check for NaN, Inf and -Inf values. We can't compare directly for NaN,
# since it is never equal to itself. However, NaN is the only value that
# isn't equal to itself, so we can use this to identify NaN
if value != value or value == Decimal("Inf") or value == Decimal("-Inf"):
raise ValidationError(self.error_messages['invalid'])
sign, digittuple, exponent = value.as_tuple()
decimals = abs(exponent)
# digittuple doesn't include any leading zeros.
digits = len(digittuple)
if decimals > digits:
# We have leading zeros up to or past the decimal point. Count
# everything past the decimal point as a digit. We do not count
# 0 before the decimal point as a digit since that would mean
# we would not allow max_digits = decimal_places.
digits = decimals
whole_digits = digits - decimals
if self.max_digits is not None and digits > self.max_digits:
raise ValidationError(self.error_messages['max_digits'] % self.max_digits)
if self.decimal_places is not None and decimals > self.decimal_places:
raise ValidationError(self.error_messages['max_decimal_places'] % self.decimal_places)
if self.max_digits is not None and self.decimal_places is not None and whole_digits > (self.max_digits - self.decimal_places):
raise ValidationError(self.error_messages['max_whole_digits'] % (self.max_digits - self.decimal_places))
return value
class BaseTemporalField(Field):
def __init__(self, input_formats=None, *args, **kwargs):
super(BaseTemporalField, self).__init__(*args, **kwargs)
if input_formats is not None:
self.input_formats = input_formats
def to_python(self, value):
# Try to coerce the value to unicode.
unicode_value = force_unicode(value, strings_only=True)
if isinstance(unicode_value, unicode):
value = unicode_value.strip()
# If unicode, try to strptime against each input format.
if isinstance(value, unicode):
for format in self.input_formats:
try:
return self.strptime(value, format)
except ValueError:
continue
raise ValidationError(self.error_messages['invalid'])
def strptime(self, value, format):
raise NotImplementedError('Subclasses must define this method.')
class DateField(BaseTemporalField):
widget = DateInput
input_formats = formats.get_format_lazy('DATE_INPUT_FORMATS')
default_error_messages = {
'invalid': _('Enter a valid date.'),
}
def to_python(self, value):
"""
Validates that the input can be converted to a date. Returns a Python
datetime.date object.
"""
if value in validators.EMPTY_VALUES:
return None
if isinstance(value, datetime.datetime):
return value.date()
if isinstance(value, datetime.date):
return value
return super(DateField, self).to_python(value)
def strptime(self, value, format):
return datetime.datetime.strptime(value, format).date()
class TimeField(BaseTemporalField):
widget = TimeInput
input_formats = formats.get_format_lazy('TIME_INPUT_FORMATS')
default_error_messages = {
'invalid': _('Enter a valid time.')
}
def to_python(self, value):
"""
Validates that the input can be converted to a time. Returns a Python
datetime.time object.
"""
if value in validators.EMPTY_VALUES:
return None
if isinstance(value, datetime.time):
return value
return super(TimeField, self).to_python(value)
def strptime(self, value, format):
return datetime.datetime.strptime(value, format).time()
class DateTimeField(BaseTemporalField):
widget = DateTimeInput
input_formats = formats.get_format_lazy('DATETIME_INPUT_FORMATS')
default_error_messages = {
'invalid': _('Enter a valid date/time.'),
}
def prepare_value(self, value):
if isinstance(value, datetime.datetime):
value = to_current_timezone(value)
return value
def to_python(self, value):
"""
Validates that the input can be converted to a datetime. Returns a
Python datetime.datetime object.
"""
if value in validators.EMPTY_VALUES:
return None
if isinstance(value, datetime.datetime):
return from_current_timezone(value)
if isinstance(value, datetime.date):
result = datetime.datetime(value.year, value.month, value.day)
return from_current_timezone(result)
if isinstance(value, list):
# Input comes from a SplitDateTimeWidget, for example. So, it's two
# components: date and time.
if len(value) != 2:
raise ValidationError(self.error_messages['invalid'])
if value[0] in validators.EMPTY_VALUES and value[1] in validators.EMPTY_VALUES:
return None
value = '%s %s' % tuple(value)
result = super(DateTimeField, self).to_python(value)
return from_current_timezone(result)
def strptime(self, value, format):
return datetime.datetime.strptime(value, format)
class RegexField(CharField):
def __init__(self, regex, max_length=None, min_length=None, error_message=None, *args, **kwargs):
"""
regex can be either a string or a compiled regular expression object.
error_message is an optional error message to use, if
'Enter a valid value' is too generic for you.
"""
# error_message is just kept for backwards compatibility:
if error_message:
error_messages = kwargs.get('error_messages') or {}
error_messages['invalid'] = error_message
kwargs['error_messages'] = error_messages
super(RegexField, self).__init__(max_length, min_length, *args, **kwargs)
self._set_regex(regex)
def _get_regex(self):
return self._regex
def _set_regex(self, regex):
if isinstance(regex, basestring):
regex = re.compile(regex, re.UNICODE)
self._regex = regex
if hasattr(self, '_regex_validator') and self._regex_validator in self.validators:
self.validators.remove(self._regex_validator)
self._regex_validator = validators.RegexValidator(regex=regex)
self.validators.append(self._regex_validator)
regex = property(_get_regex, _set_regex)
class EmailField(CharField):
default_error_messages = {
'invalid': _('Enter a valid e-mail address.'),
}
default_validators = [validators.validate_email]
def clean(self, value):
value = self.to_python(value).strip()
return super(EmailField, self).clean(value)
class FileField(Field):
widget = ClearableFileInput
default_error_messages = {
'invalid': _("No file was submitted. Check the encoding type on the form."),
'missing': _("No file was submitted."),
'empty': _("The submitted file is empty."),
'max_length': _('Ensure this filename has at most %(max)d characters (it has %(length)d).'),
'contradiction': _('Please either submit a file or check the clear checkbox, not both.')
}
def __init__(self, *args, **kwargs):
self.max_length = kwargs.pop('max_length', None)
self.allow_empty_file = kwargs.pop('allow_empty_file', False)
super(FileField, self).__init__(*args, **kwargs)
def to_python(self, data):
if data in validators.EMPTY_VALUES:
return None
# UploadedFile objects should have name and size attributes.
try:
file_name = data.name
file_size = data.size
except AttributeError:
raise ValidationError(self.error_messages['invalid'])
if self.max_length is not None and len(file_name) > self.max_length:
error_values = {'max': self.max_length, 'length': len(file_name)}
raise ValidationError(self.error_messages['max_length'] % error_values)
if not file_name:
raise ValidationError(self.error_messages['invalid'])
if not self.allow_empty_file and not file_size:
raise ValidationError(self.error_messages['empty'])
return data
def clean(self, data, initial=None):
# If the widget got contradictory inputs, we raise a validation error
if data is FILE_INPUT_CONTRADICTION:
raise ValidationError(self.error_messages['contradiction'])
# False means the field value should be cleared; further validation is
# not needed.
if data is False:
if not self.required:
return False
# If the field is required, clearing is not possible (the widget
# shouldn't return False data in that case anyway). False is not
# in validators.EMPTY_VALUES; if a False value makes it this far
# it should be validated from here on out as None (so it will be
# caught by the required check).
data = None
if not data and initial:
return initial
return super(FileField, self).clean(data)
def bound_data(self, data, initial):
if data in (None, FILE_INPUT_CONTRADICTION):
return initial
return data
class ImageField(FileField):
default_error_messages = {
'invalid_image': _("Upload a valid image. The file you uploaded was either not an image or a corrupted image."),
}
def to_python(self, data):
"""
Checks that the file-upload field data contains a valid image (GIF, JPG,
PNG, possibly others -- whatever the Python Imaging Library supports).
"""
f = super(ImageField, self).to_python(data)
if f is None:
return None
# Try to import PIL in either of the two ways it can end up installed.
try:
from PIL import Image
except ImportError:
import Image
# We need to get a file object for PIL. We might have a path or we might
# have to read the data into memory.
if hasattr(data, 'temporary_file_path'):
file = data.temporary_file_path()
else:
if hasattr(data, 'read'):
file = BytesIO(data.read())
else:
file = BytesIO(data['content'])
try:
# load() is the only method that can spot a truncated JPEG,
# but it cannot be called sanely after verify()
trial_image = Image.open(file)
trial_image.load()
# Since we're about to use the file again we have to reset the
# file object if possible.
if hasattr(file, 'seek') and callable(file.seek):
file.seek(0)
# verify() is the only method that can spot a corrupt PNG,
# but it must be called immediately after the constructor
trial_image = Image.open(file)
trial_image.verify()
except ImportError:
# Under PyPy, it is possible to import PIL. However, the underlying
# _imaging C module isn't available, so an ImportError will be
# raised. Catch and re-raise.
raise
except Exception: # Python Imaging Library doesn't recognize it as an image
raise ValidationError(self.error_messages['invalid_image'])
if hasattr(f, 'seek') and callable(f.seek):
f.seek(0)
return f
class URLField(CharField):
default_error_messages = {
'invalid': _('Enter a valid URL.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(URLField, self).__init__(max_length, min_length, *args, **kwargs)
self.validators.append(validators.URLValidator())
def to_python(self, value):
def split_url(url):
"""
Returns a list of url parts via ``urlparse.urlsplit`` (or raises a
``ValidationError`` exception for certain).
"""
try:
return list(urlparse.urlsplit(url))
except ValueError:
# urlparse.urlsplit can raise a ValueError with some
# misformatted URLs.
raise ValidationError(self.error_messages['invalid'])
value = super(URLField, self).to_python(value)
if value:
url_fields = split_url(value)
if not url_fields[0]:
# If no URL scheme given, assume http://
url_fields[0] = 'http'
if not url_fields[1]:
# Assume that if no domain is provided, that the path segment
# contains the domain.
url_fields[1] = url_fields[2]
url_fields[2] = ''
# Rebuild the url_fields list, since the domain segment may now
# contain the path too.
url_fields = split_url(urlparse.urlunsplit(url_fields))
if not url_fields[2]:
# the path portion may need to be added before query params
url_fields[2] = '/'
value = urlparse.urlunsplit(url_fields)
return value
class BooleanField(Field):
widget = CheckboxInput
def to_python(self, value):
"""Returns a Python boolean object."""
# Explicitly check for the string 'False', which is what a hidden field
# will submit for False. Also check for '0', since this is what
# RadioSelect will provide. Because bool("True") == bool('1') == True,
# we don't need to handle that explicitly.
if isinstance(value, basestring) and value.lower() in ('false', '0'):
value = False
else:
value = bool(value)
value = super(BooleanField, self).to_python(value)
if not value and self.required:
raise ValidationError(self.error_messages['required'])
return value
class NullBooleanField(BooleanField):
"""
A field whose valid values are None, True and False. Invalid values are
cleaned to None.
"""
widget = NullBooleanSelect
def to_python(self, value):
"""
Explicitly checks for the string 'True' and 'False', which is what a
hidden field will submit for True and False, and for '1' and '0', which
is what a RadioField will submit. Unlike the Booleanfield we need to
explicitly check for True, because we are not using the bool() function
"""
if value in (True, 'True', '1'):
return True
elif value in (False, 'False', '0'):
return False
else:
return None
def validate(self, value):
pass
class ChoiceField(Field):
widget = Select
default_error_messages = {
'invalid_choice': _('Select a valid choice. %(value)s is not one of the available choices.'),
}
def __init__(self, choices=(), required=True, widget=None, label=None,
initial=None, help_text=None, *args, **kwargs):
super(ChoiceField, self).__init__(required=required, widget=widget, label=label,
initial=initial, help_text=help_text, *args, **kwargs)
self.choices = choices
def __deepcopy__(self, memo):
result = super(ChoiceField, self).__deepcopy__(memo)
result._choices = copy.deepcopy(self._choices, memo)
return result
def _get_choices(self):
return self._choices
def _set_choices(self, value):
# Setting choices also sets the choices on the widget.
# choices can be any iterable, but we call list() on it because
# it will be consumed more than once.
self._choices = self.widget.choices = list(value)
choices = property(_get_choices, _set_choices)
def to_python(self, value):
"Returns a Unicode object."
if value in validators.EMPTY_VALUES:
return ''
return smart_unicode(value)
def validate(self, value):
"""
Validates that the input is in self.choices.
"""
super(ChoiceField, self).validate(value)
if value and not self.valid_value(value):
raise ValidationError(self.error_messages['invalid_choice'] % {'value': value})
def valid_value(self, value):
"Check to see if the provided value is a valid choice"
for k, v in self.choices:
if isinstance(v, (list, tuple)):
# This is an optgroup, so look inside the group for options
for k2, v2 in v:
if value == smart_unicode(k2):
return True
else:
if value == smart_unicode(k):
return True
return False
class TypedChoiceField(ChoiceField):
def __init__(self, *args, **kwargs):
self.coerce = kwargs.pop('coerce', lambda val: val)
self.empty_value = kwargs.pop('empty_value', '')
super(TypedChoiceField, self).__init__(*args, **kwargs)
def to_python(self, value):
"""
Validates that the value is in self.choices and can be coerced to the
right type.
"""
value = super(TypedChoiceField, self).to_python(value)
super(TypedChoiceField, self).validate(value)
if value == self.empty_value or value in validators.EMPTY_VALUES:
return self.empty_value
try:
value = self.coerce(value)
except (ValueError, TypeError, ValidationError):
raise ValidationError(self.error_messages['invalid_choice'] % {'value': value})
return value
def validate(self, value):
pass
class MultipleChoiceField(ChoiceField):
hidden_widget = MultipleHiddenInput
widget = SelectMultiple
default_error_messages = {
'invalid_choice': _('Select a valid choice. %(value)s is not one of the available choices.'),
'invalid_list': _('Enter a list of values.'),
}
def to_python(self, value):
if not value:
return []
elif not isinstance(value, (list, tuple)):
raise ValidationError(self.error_messages['invalid_list'])
return [smart_unicode(val) for val in value]
def validate(self, value):
"""
Validates that the input is a list or tuple.
"""
if self.required and not value:
raise ValidationError(self.error_messages['required'])
# Validate that each value in the value list is in self.choices.
for val in value:
if not self.valid_value(val):
raise ValidationError(self.error_messages['invalid_choice'] % {'value': val})
class TypedMultipleChoiceField(MultipleChoiceField):
def __init__(self, *args, **kwargs):
self.coerce = kwargs.pop('coerce', lambda val: val)
self.empty_value = kwargs.pop('empty_value', [])
super(TypedMultipleChoiceField, self).__init__(*args, **kwargs)
def to_python(self, value):
"""
Validates that the values are in self.choices and can be coerced to the
right type.
"""
value = super(TypedMultipleChoiceField, self).to_python(value)
super(TypedMultipleChoiceField, self).validate(value)
if value == self.empty_value or value in validators.EMPTY_VALUES:
return self.empty_value
new_value = []
for choice in value:
try:
new_value.append(self.coerce(choice))
except (ValueError, TypeError, ValidationError):
raise ValidationError(self.error_messages['invalid_choice'] % {'value': choice})
return new_value
def validate(self, value):
pass
class ComboField(Field):
"""
A Field whose clean() method calls multiple Field clean() methods.
"""
def __init__(self, fields=(), *args, **kwargs):
super(ComboField, self).__init__(*args, **kwargs)
# Set 'required' to False on the individual fields, because the
# required validation will be handled by ComboField, not by those
# individual fields.
for f in fields:
f.required = False
self.fields = fields
def clean(self, value):
"""
Validates the given value against all of self.fields, which is a
list of Field instances.
"""
super(ComboField, self).clean(value)
for field in self.fields:
value = field.clean(value)
return value
class MultiValueField(Field):
"""
A Field that aggregates the logic of multiple Fields.
Its clean() method takes a "decompressed" list of values, which are then
cleaned into a single value according to self.fields. Each value in
this list is cleaned by the corresponding field -- the first value is
cleaned by the first field, the second value is cleaned by the second
field, etc. Once all fields are cleaned, the list of clean values is
"compressed" into a single value.
Subclasses should not have to implement clean(). Instead, they must
implement compress(), which takes a list of valid values and returns a
"compressed" version of those values -- a single value.
You'll probably want to use this with MultiWidget.
"""
default_error_messages = {
'invalid': _('Enter a list of values.'),
}
def __init__(self, fields=(), *args, **kwargs):
super(MultiValueField, self).__init__(*args, **kwargs)
# Set 'required' to False on the individual fields, because the
# required validation will be handled by MultiValueField, not by those
# individual fields.
for f in fields:
f.required = False
self.fields = fields
def validate(self, value):
pass
def clean(self, value):
"""
Validates every value in the given list. A value is validated against
the corresponding Field in self.fields.
For example, if this MultiValueField was instantiated with
fields=(DateField(), TimeField()), clean() would call
DateField.clean(value[0]) and TimeField.clean(value[1]).
"""
clean_data = []
errors = ErrorList()
if not value or isinstance(value, (list, tuple)):
if not value or not [v for v in value if v not in validators.EMPTY_VALUES]:
if self.required:
raise ValidationError(self.error_messages['required'])
else:
return self.compress([])
else:
raise ValidationError(self.error_messages['invalid'])
for i, field in enumerate(self.fields):
try:
field_value = value[i]
except IndexError:
field_value = None
if self.required and field_value in validators.EMPTY_VALUES:
raise ValidationError(self.error_messages['required'])
try:
clean_data.append(field.clean(field_value))
except ValidationError as e:
# Collect all validation errors in a single list, which we'll
# raise at the end of clean(), rather than raising a single
# exception for the first error we encounter.
errors.extend(e.messages)
if errors:
raise ValidationError(errors)
out = self.compress(clean_data)
self.validate(out)
self.run_validators(out)
return out
def compress(self, data_list):
"""
Returns a single value for the given list of values. The values can be
assumed to be valid.
For example, if this MultiValueField was instantiated with
fields=(DateField(), TimeField()), this might return a datetime
object created by combining the date and time in data_list.
"""
raise NotImplementedError('Subclasses must implement this method.')
class FilePathField(ChoiceField):
def __init__(self, path, match=None, recursive=False, allow_files=True,
allow_folders=False, required=True, widget=None, label=None,
initial=None, help_text=None, *args, **kwargs):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
super(FilePathField, self).__init__(choices=(), required=required,
widget=widget, label=label, initial=initial, help_text=help_text,
*args, **kwargs)
if self.required:
self.choices = []
else:
self.choices = [("", "---------")]
if self.match is not None:
self.match_re = re.compile(self.match)
if recursive:
for root, dirs, files in sorted(os.walk(self.path)):
if self.allow_files:
for f in files:
if self.match is None or self.match_re.search(f):
f = os.path.join(root, f)
self.choices.append((f, f.replace(path, "", 1)))
if self.allow_folders:
for f in dirs:
if f == '__pycache__':
continue
if self.match is None or self.match_re.search(f):
f = os.path.join(root, f)
self.choices.append((f, f.replace(path, "", 1)))
else:
try:
for f in sorted(os.listdir(self.path)):
if f == '__pycache__':
continue
full_file = os.path.join(self.path, f)
if (((self.allow_files and os.path.isfile(full_file)) or
(self.allow_folders and os.path.isdir(full_file))) and
(self.match is None or self.match_re.search(f))):
self.choices.append((full_file, f))
except OSError:
pass
self.widget.choices = self.choices
class SplitDateTimeField(MultiValueField):
widget = SplitDateTimeWidget
hidden_widget = SplitHiddenDateTimeWidget
default_error_messages = {
'invalid_date': _('Enter a valid date.'),
'invalid_time': _('Enter a valid time.'),
}
def __init__(self, input_date_formats=None, input_time_formats=None, *args, **kwargs):
errors = self.default_error_messages.copy()
if 'error_messages' in kwargs:
errors.update(kwargs['error_messages'])
localize = kwargs.get('localize', False)
fields = (
DateField(input_formats=input_date_formats,
error_messages={'invalid': errors['invalid_date']},
localize=localize),
TimeField(input_formats=input_time_formats,
error_messages={'invalid': errors['invalid_time']},
localize=localize),
)
super(SplitDateTimeField, self).__init__(fields, *args, **kwargs)
def compress(self, data_list):
if data_list:
# Raise a validation error if time or date is empty
# (possible if SplitDateTimeField has required=False).
if data_list[0] in validators.EMPTY_VALUES:
raise ValidationError(self.error_messages['invalid_date'])
if data_list[1] in validators.EMPTY_VALUES:
raise ValidationError(self.error_messages['invalid_time'])
result = datetime.datetime.combine(*data_list)
return from_current_timezone(result)
return None
class IPAddressField(CharField):
default_error_messages = {
'invalid': _('Enter a valid IPv4 address.'),
}
default_validators = [validators.validate_ipv4_address]
class GenericIPAddressField(CharField):
default_error_messages = {}
def __init__(self, protocol='both', unpack_ipv4=False, *args, **kwargs):
self.unpack_ipv4 = unpack_ipv4
self.default_validators, invalid_error_message = \
validators.ip_address_validators(protocol, unpack_ipv4)
self.default_error_messages['invalid'] = invalid_error_message
super(GenericIPAddressField, self).__init__(*args, **kwargs)
def to_python(self, value):
if value in validators.EMPTY_VALUES:
return ''
if value and ':' in value:
return clean_ipv6_address(value,
self.unpack_ipv4, self.error_messages['invalid'])
return value
class SlugField(CharField):
default_error_messages = {
'invalid': _("Enter a valid 'slug' consisting of letters, numbers,"
" underscores or hyphens."),
}
default_validators = [validators.validate_slug]
|
|
# -*- test-case-name: twisted.internet.test.test_gtk2reactor -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This module provides support for Twisted to interact with the glib/gtk2
mainloop.
In order to use this support, simply do the following::
| from twisted.internet import gtk2reactor
| gtk2reactor.install()
Then use twisted.internet APIs as usual. The other methods here are not
intended to be called directly.
When installing the reactor, you can choose whether to use the glib
event loop or the GTK+ event loop which is based on it but adds GUI
integration.
"""
# System Imports
import sys
from zope.interface import implements
try:
if not hasattr(sys, 'frozen'):
# Don't want to check this for py2exe
import pygtk
pygtk.require('2.0')
except (ImportError, AttributeError):
pass # maybe we're using pygtk before this hack existed.
import gobject
if hasattr(gobject, "threads_init"):
# recent versions of python-gtk expose this. python-gtk=2.4.1
# (wrapping glib-2.4.7) does. python-gtk=2.0.0 (wrapping
# glib-2.2.3) does not.
gobject.threads_init()
# Twisted Imports
from twisted.python import log, runtime, failure
from twisted.python.compat import set
from twisted.internet.interfaces import IReactorFDSet
from twisted.internet import main, posixbase, error, selectreactor
POLL_DISCONNECTED = gobject.IO_HUP | gobject.IO_ERR | gobject.IO_NVAL
# glib's iochannel sources won't tell us about any events that we haven't
# asked for, even if those events aren't sensible inputs to the poll()
# call.
INFLAGS = gobject.IO_IN | POLL_DISCONNECTED
OUTFLAGS = gobject.IO_OUT | POLL_DISCONNECTED
def _our_mainquit():
# XXX: gtk.main_quit() (which is used for crash()) raises an exception if
# gtk.main_level() == 0; however, all the tests freeze if we use this
# function to stop the reactor. what gives? (I believe this may have been
# a stupid mistake where I forgot to import gtk here... I will remove this
# comment if the tests pass)
import gtk
if gtk.main_level():
gtk.main_quit()
class Gtk2Reactor(posixbase.PosixReactorBase):
"""
GTK+-2 event loop reactor.
@ivar _sources: A dictionary mapping L{FileDescriptor} instances to gtk
watch handles.
@ivar _reads: A set of L{FileDescriptor} instances currently monitored for
reading.
@ivar _writes: A set of L{FileDescriptor} instances currently monitored for
writing.
@ivar _simtag: A gtk timeout handle for the next L{simulate} call.
"""
implements(IReactorFDSet)
def __init__(self, useGtk=True):
self._simtag = None
self._reads = set()
self._writes = set()
self._sources = {}
posixbase.PosixReactorBase.__init__(self)
# pre 2.3.91 the glib iteration and mainloop functions didn't release
# global interpreter lock, thus breaking thread and signal support.
if getattr(gobject, "pygtk_version", ()) >= (2, 3, 91) and not useGtk:
self.context = gobject.main_context_default()
self.__pending = self.context.pending
self.__iteration = self.context.iteration
self.loop = gobject.MainLoop()
self.__crash = self.loop.quit
self.__run = self.loop.run
else:
import gtk
self.__pending = gtk.events_pending
self.__iteration = gtk.main_iteration
self.__crash = _our_mainquit
self.__run = gtk.main
# The input_add function in pygtk1 checks for objects with a
# 'fileno' method and, if present, uses the result of that method
# as the input source. The pygtk2 input_add does not do this. The
# function below replicates the pygtk1 functionality.
# In addition, pygtk maps gtk.input_add to _gobject.io_add_watch, and
# g_io_add_watch() takes different condition bitfields than
# gtk_input_add(). We use g_io_add_watch() here in case pygtk fixes this
# bug.
def input_add(self, source, condition, callback):
if hasattr(source, 'fileno'):
# handle python objects
def wrapper(source, condition, real_s=source, real_cb=callback):
return real_cb(real_s, condition)
return gobject.io_add_watch(source.fileno(), condition, wrapper)
else:
return gobject.io_add_watch(source, condition, callback)
def _add(self, source, primary, other, primaryFlag, otherFlag):
"""
Add the given L{FileDescriptor} for monitoring either for reading or
writing. If the file is already monitored for the other operation, we
delete the previous registration and re-register it for both reading
and writing.
"""
if source in primary:
return
flags = primaryFlag
if source in other:
gobject.source_remove(self._sources[source])
flags |= otherFlag
self._sources[source] = self.input_add(source, flags, self.callback)
primary.add(source)
def addReader(self, reader):
"""
Add a L{FileDescriptor} for monitoring of data available to read.
"""
self._add(reader, self._reads, self._writes, INFLAGS, OUTFLAGS)
def addWriter(self, writer):
"""
Add a L{FileDescriptor} for monitoring ability to write data.
"""
self._add(writer, self._writes, self._reads, OUTFLAGS, INFLAGS)
def getReaders(self):
"""
Retrieve the list of current L{FileDescriptor} monitored for reading.
"""
return list(self._reads)
def getWriters(self):
"""
Retrieve the list of current L{FileDescriptor} monitored for writing.
"""
return list(self._writes)
def removeAll(self):
"""
Remove monitoring for all registered L{FileDescriptor}s.
"""
return self._removeAll(self._reads, self._writes)
def _remove(self, source, primary, other, flags):
"""
Remove monitoring the given L{FileDescriptor} for either reading or
writing. If it's still monitored for the other operation, we
re-register the L{FileDescriptor} for only that operation.
"""
if source not in primary:
return
gobject.source_remove(self._sources[source])
primary.remove(source)
if source in other:
self._sources[source] = self.input_add(
source, flags, self.callback)
else:
self._sources.pop(source)
def removeReader(self, reader):
"""
Stop monitoring the given L{FileDescriptor} for reading.
"""
self._remove(reader, self._reads, self._writes, OUTFLAGS)
def removeWriter(self, writer):
"""
Stop monitoring the given L{FileDescriptor} for writing.
"""
self._remove(writer, self._writes, self._reads, INFLAGS)
doIterationTimer = None
def doIterationTimeout(self, *args):
self.doIterationTimer = None
return 0 # auto-remove
def doIteration(self, delay):
# flush some pending events, return if there was something to do
# don't use the usual "while self.context.pending(): self.context.iteration()"
# idiom because lots of IO (in particular test_tcp's
# ProperlyCloseFilesTestCase) can keep us from ever exiting.
log.msg(channel='system', event='iteration', reactor=self)
if self.__pending():
self.__iteration(0)
return
# nothing to do, must delay
if delay == 0:
return # shouldn't delay, so just return
self.doIterationTimer = gobject.timeout_add(int(delay * 1000),
self.doIterationTimeout)
# This will either wake up from IO or from a timeout.
self.__iteration(1) # block
# note: with the .simulate timer below, delays > 0.1 will always be
# woken up by the .simulate timer
if self.doIterationTimer:
# if woken by IO, need to cancel the timer
gobject.source_remove(self.doIterationTimer)
self.doIterationTimer = None
def crash(self):
posixbase.PosixReactorBase.crash(self)
self.__crash()
def run(self, installSignalHandlers=1):
self.startRunning(installSignalHandlers=installSignalHandlers)
gobject.timeout_add(0, self.simulate)
if self._started:
self.__run()
def _doReadOrWrite(self, source, condition, faildict={
error.ConnectionDone: failure.Failure(error.ConnectionDone()),
error.ConnectionLost: failure.Failure(error.ConnectionLost()),
}):
why = None
didRead = None
if condition & POLL_DISCONNECTED and \
not (condition & gobject.IO_IN):
why = main.CONNECTION_LOST
else:
try:
if condition & gobject.IO_IN:
why = source.doRead()
didRead = source.doRead
if not why and condition & gobject.IO_OUT:
# if doRead caused connectionLost, don't call doWrite
# if doRead is doWrite, don't call it again.
if not source.disconnected and source.doWrite != didRead:
why = source.doWrite()
didRead = source.doWrite # if failed it was in write
except:
why = sys.exc_info()[1]
log.msg('Error In %s' % source)
log.deferr()
if why:
self._disconnectSelectable(source, why, didRead == source.doRead)
def callback(self, source, condition):
log.callWithLogger(source, self._doReadOrWrite, source, condition)
self.simulate() # fire Twisted timers
return 1 # 1=don't auto-remove the source
def simulate(self):
"""
Run simulation loops and reschedule callbacks.
"""
if self._simtag is not None:
gobject.source_remove(self._simtag)
self.runUntilCurrent()
timeout = min(self.timeout(), 0.1)
if timeout is None:
timeout = 0.1
# grumble
self._simtag = gobject.timeout_add(int(timeout * 1010), self.simulate)
class PortableGtkReactor(selectreactor.SelectReactor):
"""
Reactor that works on Windows.
Sockets aren't supported by GTK+'s input_add on Win32.
"""
_simtag = None
def crash(self):
selectreactor.SelectReactor.crash(self)
import gtk
# mainquit is deprecated in newer versions
if gtk.main_level():
if hasattr(gtk, 'main_quit'):
gtk.main_quit()
else:
gtk.mainquit()
def run(self, installSignalHandlers=1):
import gtk
self.startRunning(installSignalHandlers=installSignalHandlers)
gobject.timeout_add(0, self.simulate)
# mainloop is deprecated in newer versions
if hasattr(gtk, 'main'):
gtk.main()
else:
gtk.mainloop()
def simulate(self):
"""
Run simulation loops and reschedule callbacks.
"""
if self._simtag is not None:
gobject.source_remove(self._simtag)
self.iterate()
timeout = min(self.timeout(), 0.1)
if timeout is None:
timeout = 0.1
# grumble
self._simtag = gobject.timeout_add(int(timeout * 1010), self.simulate)
def install(useGtk=True):
"""
Configure the twisted mainloop to be run inside the gtk mainloop.
@param useGtk: should glib rather than GTK+ event loop be
used (this will be slightly faster but does not support GUI).
"""
reactor = Gtk2Reactor(useGtk)
from twisted.internet.main import installReactor
installReactor(reactor)
return reactor
def portableInstall(useGtk=True):
"""
Configure the twisted mainloop to be run inside the gtk mainloop.
"""
reactor = PortableGtkReactor()
from twisted.internet.main import installReactor
installReactor(reactor)
return reactor
if runtime.platform.getType() != 'posix':
install = portableInstall
__all__ = ['install']
|
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module provides a base class for tensor-like objects and methods for
basic tensor manipulation. It also provides a class, SquareTensor,
that provides basic methods for creating and manipulating rank 2 tensors
"""
import collections
import itertools
import os
import string
import warnings
import numpy as np
from monty.json import MSONable
from monty.serialization import loadfn
from scipy.linalg import polar
from pymatgen.analysis.structure_matcher import StructureMatcher
from pymatgen.core.lattice import Lattice
from pymatgen.core.operations import SymmOp
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
__author__ = "Joseph Montoya"
__copyright__ = "Copyright 2017, The Materials Project"
__credits__ = "Maarten de Jong, Shyam Dwaraknath, Wei Chen, " "Mark Asta, Anubhav Jain, Terence Lew"
__version__ = "1.0"
__maintainer__ = "Joseph Montoya"
__email__ = "[email protected]"
__status__ = "Production"
__date__ = "July 24, 2018"
voigt_map = [(0, 0), (1, 1), (2, 2), (1, 2), (0, 2), (0, 1)]
reverse_voigt_map = np.array([[0, 5, 4], [5, 1, 3], [4, 3, 2]])
DEFAULT_QUAD = loadfn(os.path.join(os.path.dirname(__file__), "quad_data.json"))
class Tensor(np.ndarray, MSONable):
"""
Base class for doing useful general operations on Nth order tensors,
without restrictions on the type (stress, elastic, strain, piezo, etc.)
"""
symbol = "T"
def __new__(cls, input_array, vscale=None, check_rank=None):
"""
Create a Tensor object. Note that the constructor uses __new__
rather than __init__ according to the standard method of
subclassing numpy ndarrays.
Args:
input_array: (array-like with shape 3^N): array-like representing
a tensor quantity in standard (i. e. non-voigt) notation
vscale: (N x M array-like): a matrix corresponding
to the coefficients of the voigt-notation tensor
"""
obj = np.asarray(input_array).view(cls)
obj.rank = len(obj.shape)
if check_rank and check_rank != obj.rank:
raise ValueError("{} input must be rank {}".format(obj.__class__.__name__, check_rank))
vshape = tuple([3] * (obj.rank % 2) + [6] * (obj.rank // 2))
obj._vscale = np.ones(vshape)
if vscale is not None:
obj._vscale = vscale
if obj._vscale.shape != vshape:
raise ValueError("Voigt scaling matrix must be the shape of the " "voigt notation matrix or vector.")
if not all([i == 3 for i in obj.shape]):
raise ValueError(
"Pymatgen only supports 3-dimensional tensors, "
"and default tensor constructor uses standard "
"notation. To construct from voigt notation, use"
" {}.from_voigt".format(obj.__class__.__name__)
)
return obj
def __array_finalize__(self, obj):
if obj is None:
return
self.rank = getattr(obj, "rank", None)
self._vscale = getattr(obj, "_vscale", None)
self._vdict = getattr(obj, "_vdict", None)
def __array_wrap__(self, obj):
"""
Overrides __array_wrap__ methods in ndarray superclass to avoid errors
associated with functions that return scalar values
"""
if len(obj.shape) == 0:
return obj[()]
return np.ndarray.__array_wrap__(self, obj)
def __hash__(self):
"""
define a hash function, since numpy arrays
have their own __eq__ method
"""
return hash(self.tostring())
def __repr__(self):
return "{}({})".format(self.__class__.__name__, self.__str__())
def zeroed(self, tol=1e-3):
"""
returns the matrix with all entries below a certain threshold
(i.e. tol) set to zero
"""
new_tensor = self.copy()
new_tensor[abs(new_tensor) < tol] = 0
return new_tensor
def transform(self, symm_op):
"""
Applies a transformation (via a symmetry operation) to a tensor.
Args:
symm_op (SymmOp): a symmetry operation to apply to the tensor
"""
return self.__class__(symm_op.transform_tensor(self))
def rotate(self, matrix, tol=1e-3):
"""
Applies a rotation directly, and tests input matrix to ensure a valid
rotation.
Args:
matrix (3x3 array-like): rotation matrix to be applied to tensor
tol (float): tolerance for testing rotation matrix validity
"""
matrix = SquareTensor(matrix)
if not matrix.is_rotation(tol):
raise ValueError("Rotation matrix is not valid.")
sop = SymmOp.from_rotation_and_translation(matrix, [0.0, 0.0, 0.0])
return self.transform(sop)
def einsum_sequence(self, other_arrays, einsum_string=None):
"""
Calculates the result of an einstein summation expression
"""
if not isinstance(other_arrays, list):
raise ValueError("other tensors must be list of " "tensors or tensor input")
other_arrays = [np.array(a) for a in other_arrays]
if not einsum_string:
lc = string.ascii_lowercase
einsum_string = lc[: self.rank]
other_ranks = [len(a.shape) for a in other_arrays]
idx = self.rank - sum(other_ranks)
for length in other_ranks:
einsum_string += "," + lc[idx : idx + length]
idx += length
einsum_args = [self] + list(other_arrays)
return np.einsum(einsum_string, *einsum_args)
def project(self, n):
"""
Convenience method for projection of a tensor into a
vector. Returns the tensor dotted into a unit vector
along the input n.
Args:
n (3x1 array-like): direction to project onto
Returns (float):
scalar value corresponding to the projection of
the tensor into the vector
"""
n = get_uvec(n)
return self.einsum_sequence([n] * self.rank)
def average_over_unit_sphere(self, quad=None):
"""
Method for averaging the tensor projection over the unit
with option for custom quadrature.
Args:
quad (dict): quadrature for integration, should be
dictionary with "points" and "weights" keys defaults
to quadpy.sphere.Lebedev(19) as read from file
Returns:
Average of tensor projected into vectors on the unit sphere
"""
quad = quad or DEFAULT_QUAD
weights, points = quad["weights"], quad["points"]
return sum([w * self.project(n) for w, n in zip(weights, points)])
def get_grouped_indices(self, voigt=False, **kwargs):
"""
Gets index sets for equivalent tensor values
Args:
voigt (bool): whether to get grouped indices
of voigt or full notation tensor, defaults
to false
**kwargs: keyword args for np.isclose. Can take atol
and rtol for absolute and relative tolerance, e. g.
>>> tensor.group_array_indices(atol=1e-8)
or
>>> tensor.group_array_indices(rtol=1e-5)
Returns:
list of index groups where tensor values are equivalent to
within tolerances
"""
if voigt:
array = self.voigt
else:
array = self
indices = list(itertools.product(*[range(n) for n in array.shape]))
remaining = indices.copy()
# Start with everything near zero
grouped = [list(zip(*np.where(np.isclose(array, 0, **kwargs))))]
remaining = [i for i in remaining if i not in grouped[0]]
# Iteratively run through remaining indices
while remaining:
new = list(zip(*np.where(np.isclose(array, array[remaining[0]], **kwargs))))
grouped.append(new)
remaining = [i for i in remaining if i not in new]
# Don't return any empty lists
return [g for g in grouped if g]
def get_symbol_dict(self, voigt=True, zero_index=False, **kwargs):
"""
Creates a summary dict for tensor with associated symbol
Args:
voigt (bool): whether to get symbol dict for voigt
notation tensor, as opposed to full notation,
defaults to true
zero_index (bool): whether to set initial index to zero,
defaults to false, since tensor notations tend to use
one-indexing, rather than zero indexing like python
**kwargs: keyword args for np.isclose. Can take atol
and rtol for absolute and relative tolerance, e. g.
>>> tensor.get_symbol_dict(atol=1e-8)
or
>>> tensor.get_symbol_dict(rtol=1e-5)
Returns:
list of index groups where tensor values are equivalent to
within tolerances
Returns:
"""
d = {}
if voigt:
array = self.voigt
else:
array = self
grouped = self.get_grouped_indices(voigt=voigt, **kwargs)
if zero_index:
p = 0
else:
p = 1
for indices in grouped:
sym_string = self.symbol + "_"
sym_string += "".join([str(i + p) for i in indices[0]])
value = array[indices[0]]
if not np.isclose(value, 0):
d[sym_string] = array[indices[0]]
return d
def round(self, decimals=0):
"""
Wrapper around numpy.round to ensure object
of same type is returned
Args:
decimals :Number of decimal places to round to (default: 0).
If decimals is negative, it specifies the number of
positions to the left of the decimal point.
Returns (Tensor):
rounded tensor of same type
"""
return self.__class__(np.round(self, decimals=decimals))
@property
def symmetrized(self):
"""
Returns a generally symmetrized tensor, calculated by taking
the sum of the tensor and its transpose with respect to all
possible permutations of indices
"""
perms = list(itertools.permutations(range(self.rank)))
return sum([np.transpose(self, ind) for ind in perms]) / len(perms)
@property
def voigt_symmetrized(self):
"""
Returns a "voigt"-symmetrized tensor, i. e. a voigt-notation
tensor such that it is invariant wrt permutation of indices
"""
if not (self.rank % 2 == 0 and self.rank >= 2):
raise ValueError("V-symmetrization requires rank even and >= 2")
v = self.voigt
perms = list(itertools.permutations(range(len(v.shape))))
new_v = sum([np.transpose(v, ind) for ind in perms]) / len(perms)
return self.__class__.from_voigt(new_v)
def is_symmetric(self, tol=1e-5):
"""
Tests whether a tensor is symmetric or not based on the residual
with its symmetric part, from self.symmetrized
Args:
tol (float): tolerance to test for symmetry
"""
return (self - self.symmetrized < tol).all()
def fit_to_structure(self, structure, symprec=0.1):
"""
Returns a tensor that is invariant with respect to symmetry
operations corresponding to a structure
Args:
structure (Structure): structure from which to generate
symmetry operations
symprec (float): symmetry tolerance for the Spacegroup Analyzer
used to generate the symmetry operations
"""
sga = SpacegroupAnalyzer(structure, symprec)
symm_ops = sga.get_symmetry_operations(cartesian=True)
return sum([self.transform(symm_op) for symm_op in symm_ops]) / len(symm_ops)
def is_fit_to_structure(self, structure, tol=1e-2):
"""
Tests whether a tensor is invariant with respect to the
symmetry operations of a particular structure by testing
whether the residual of the symmetric portion is below a
tolerance
Args:
structure (Structure): structure to be fit to
tol (float): tolerance for symmetry testing
"""
return (self - self.fit_to_structure(structure) < tol).all()
@property
def voigt(self):
"""
Returns the tensor in Voigt notation
"""
v_matrix = np.zeros(self._vscale.shape, dtype=self.dtype)
this_voigt_map = self.get_voigt_dict(self.rank)
for ind in this_voigt_map:
v_matrix[this_voigt_map[ind]] = self[ind]
if not self.is_voigt_symmetric():
warnings.warn("Tensor is not symmetric, information may " "be lost in voigt conversion.")
return v_matrix * self._vscale
def is_voigt_symmetric(self, tol=1e-6):
"""
Tests symmetry of tensor to that necessary for voigt-conversion
by grouping indices into pairs and constructing a sequence of
possible permutations to be used in a tensor transpose
"""
transpose_pieces = [[[0 for i in range(self.rank % 2)]]]
transpose_pieces += [[range(j, j + 2)] for j in range(self.rank % 2, self.rank, 2)]
for n in range(self.rank % 2, len(transpose_pieces)):
if len(transpose_pieces[n][0]) == 2:
transpose_pieces[n] += [transpose_pieces[n][0][::-1]]
for trans_seq in itertools.product(*transpose_pieces):
trans_seq = list(itertools.chain(*trans_seq))
if (self - self.transpose(trans_seq) > tol).any():
return False
return True
@staticmethod
def get_voigt_dict(rank):
"""
Returns a dictionary that maps indices in the tensor to those
in a voigt representation based on input rank
Args:
rank (int): Tensor rank to generate the voigt map
"""
vdict = {}
for ind in itertools.product(*[range(3)] * rank):
v_ind = ind[: rank % 2]
for j in range(rank // 2):
pos = rank % 2 + 2 * j
v_ind += (reverse_voigt_map[ind[pos : pos + 2]],)
vdict[ind] = v_ind
return vdict
@classmethod
def from_voigt(cls, voigt_input):
"""
Constructor based on the voigt notation vector or matrix.
Args:
voigt_input (array-like): voigt input for a given tensor
"""
voigt_input = np.array(voigt_input)
rank = sum(voigt_input.shape) // 3
t = cls(np.zeros([3] * rank))
if voigt_input.shape != t._vscale.shape:
raise ValueError("Invalid shape for voigt matrix")
voigt_input = voigt_input / t._vscale
this_voigt_map = t.get_voigt_dict(rank)
for ind in this_voigt_map:
t[ind] = voigt_input[this_voigt_map[ind]]
return cls(t)
@staticmethod
def get_ieee_rotation(structure, refine_rotation=True):
"""
Given a structure associated with a tensor, determines
the rotation matrix for IEEE conversion according to
the 1987 IEEE standards.
Args:
structure (Structure): a structure associated with the
tensor to be converted to the IEEE standard
refine_rotation (bool): whether to refine the rotation
using SquareTensor.refine_rotation
"""
# Check conventional setting:
sga = SpacegroupAnalyzer(structure)
dataset = sga.get_symmetry_dataset()
trans_mat = dataset["transformation_matrix"]
conv_latt = Lattice(np.transpose(np.dot(np.transpose(structure.lattice.matrix), np.linalg.inv(trans_mat))))
xtal_sys = sga.get_crystal_system()
vecs = conv_latt.matrix
lengths = np.array(conv_latt.abc)
angles = np.array(conv_latt.angles)
rotation = np.zeros((3, 3))
# IEEE rules: a,b,c || x1,x2,x3
if xtal_sys == "cubic":
rotation = [vecs[i] / lengths[i] for i in range(3)]
# IEEE rules: a=b in length; c,a || x3, x1
elif xtal_sys == "tetragonal":
rotation = np.array([vec / mag for (mag, vec) in sorted(zip(lengths, vecs), key=lambda x: x[0])])
if abs(lengths[2] - lengths[1]) < abs(lengths[1] - lengths[0]):
rotation[0], rotation[2] = rotation[2], rotation[0].copy()
rotation[1] = get_uvec(np.cross(rotation[2], rotation[0]))
# IEEE rules: c<a<b; c,a || x3,x1
elif xtal_sys == "orthorhombic":
rotation = [vec / mag for (mag, vec) in sorted(zip(lengths, vecs))]
rotation = np.roll(rotation, 2, axis=0)
# IEEE rules: c,a || x3,x1, c is threefold axis
# Note this also includes rhombohedral crystal systems
elif xtal_sys in ("trigonal", "hexagonal"):
# find threefold axis:
tf_index = np.argmin(abs(angles - 120.0))
non_tf_mask = np.logical_not(angles == angles[tf_index])
rotation[2] = get_uvec(vecs[tf_index])
rotation[0] = get_uvec(vecs[non_tf_mask][0])
rotation[1] = get_uvec(np.cross(rotation[2], rotation[0]))
# IEEE rules: b,c || x2,x3; alpha=beta=90, c<a
elif xtal_sys == "monoclinic":
# Find unique axis
u_index = np.argmax(abs(angles - 90.0))
n_umask = np.logical_not(angles == angles[u_index])
rotation[1] = get_uvec(vecs[u_index])
# Shorter of remaining lattice vectors for c axis
c = [vec / mag for (mag, vec) in sorted(zip(lengths[n_umask], vecs[n_umask]))][0]
rotation[2] = np.array(c)
rotation[0] = np.cross(rotation[1], rotation[2])
# IEEE rules: c || x3, x2 normal to ac plane
elif xtal_sys == "triclinic":
rotation = [vec / mag for (mag, vec) in sorted(zip(lengths, vecs))]
rotation[1] = get_uvec(np.cross(rotation[2], rotation[0]))
rotation[0] = np.cross(rotation[1], rotation[2])
rotation = SquareTensor(rotation)
if refine_rotation:
rotation = rotation.refine_rotation()
return rotation
def convert_to_ieee(self, structure, initial_fit=True, refine_rotation=True):
"""
Given a structure associated with a tensor, attempts a
calculation of the tensor in IEEE format according to
the 1987 IEEE standards.
Args:
structure (Structure): a structure associated with the
tensor to be converted to the IEEE standard
initial_fit (bool): flag to indicate whether initial
tensor is fit to the symmetry of the structure.
Defaults to true. Note that if false, inconsistent
results may be obtained due to symmetrically
equivalent, but distinct transformations
being used in different versions of spglib.
refine_rotation (bool): whether to refine the rotation
produced by the ieee transform generator, default True
"""
rotation = self.get_ieee_rotation(structure, refine_rotation)
result = self.copy()
if initial_fit:
# pylint: disable=E1101
result = result.fit_to_structure(structure)
return result.rotate(rotation, tol=1e-2)
def structure_transform(self, original_structure, new_structure, refine_rotation=True):
"""
Transforms a tensor from one basis for an original structure
into a new basis defined by a new structure.
Args:
original_structure (Structure): structure corresponding
to the basis of the current tensor
new_structure (Structure): structure corresponding to the
desired basis
refine_rotation (bool): whether to refine the rotations
generated in get_ieee_rotation
Returns:
Tensor that has been transformed such that its basis
corresponds to the new_structure's basis
"""
sm = StructureMatcher()
if not sm.fit(original_structure, new_structure):
warnings.warn("original and new structures do not match!")
trans_1 = self.get_ieee_rotation(original_structure, refine_rotation)
trans_2 = self.get_ieee_rotation(new_structure, refine_rotation)
# Get the ieee format tensor
new = self.rotate(trans_1)
# Reverse the ieee format rotation for the second structure
new = new.rotate(np.transpose(trans_2))
return new
@classmethod
def from_values_indices(
cls,
values,
indices,
populate=False,
structure=None,
voigt_rank=None,
vsym=True,
verbose=False,
):
"""
Creates a tensor from values and indices, with options
for populating the remainder of the tensor.
Args:
values (floats): numbers to place at indices
indices (array-likes): indices to place values at
populate (bool): whether to populate the tensor
structure (Structure): structure to base population
or fit_to_structure on
voigt_rank (int): full tensor rank to indicate the
shape of the resulting tensor. This is necessary
if one provides a set of indices more minimal than
the shape of the tensor they want, e.g.
Tensor.from_values_indices((0, 0), 100)
vsym (bool): whether to voigt symmetrize during the
optimization procedure
verbose (bool): whether to populate verbosely
"""
# auto-detect voigt notation
# TODO: refactor rank inheritance to make this easier
indices = np.array(indices)
if voigt_rank:
shape = [3] * (voigt_rank % 2) + [6] * (voigt_rank // 2)
else:
shape = np.ceil(np.max(indices + 1, axis=0) / 3.0) * 3
base = np.zeros(shape.astype(int))
for v, idx in zip(values, indices):
base[tuple(idx)] = v
if 6 in shape:
obj = cls.from_voigt(base)
else:
obj = cls(base)
if populate:
assert structure, "Populate option must include structure input"
obj = obj.populate(structure, vsym=vsym, verbose=verbose)
elif structure:
obj = obj.fit_to_structure(structure)
return obj
def populate(self, structure, prec=1e-5, maxiter=200, verbose=False, precond=True, vsym=True):
"""
Takes a partially populated tensor, and populates the non-zero
entries according to the following procedure, iterated until
the desired convergence (specified via prec) is achieved.
1. Find non-zero entries
2. Symmetrize the tensor with respect to crystal symmetry and
(optionally) voigt symmetry
3. Reset the non-zero entries of the original tensor
Args:
structure (structure object)
prec (float): precision for determining a non-zero value
maxiter (int): maximum iterations for populating the tensor
verbose (bool): whether to populate verbosely
precond (bool): whether to precondition by cycling through
all symmops and storing new nonzero values, default True
vsym (bool): whether to enforce voigt symmetry, defaults
to True
"""
if precond:
# Generate the guess from populated
sops = SpacegroupAnalyzer(structure).get_symmetry_operations()
guess = Tensor(np.zeros(self.shape))
mask = abs(self) > prec
guess[mask] = self[mask]
def merge(old, new):
gmask = np.abs(old) > prec
nmask = np.abs(new) > prec
new_mask = np.logical_not(gmask) * nmask
avg_mask = gmask * nmask
old[avg_mask] = (old[avg_mask] + new[avg_mask]) / 2.0
old[new_mask] = new[new_mask]
if verbose:
print("Preconditioning for {} symmops".format(len(sops)))
for sop in sops:
rot = guess.transform(sop)
# Store non-zero entries of new that weren't previously
# in the guess in the guess
merge(guess, rot)
if verbose:
print("Preconditioning for voigt symmetry")
if vsym:
v = guess.voigt
perms = list(itertools.permutations(range(len(v.shape))))
for perm in perms:
vtrans = np.transpose(v, perm)
merge(v, vtrans)
guess = Tensor.from_voigt(v)
else:
guess = np.zeros(self.shape)
assert guess.shape == self.shape, "Guess must have same shape"
converged = False
test_new, test_old = [guess.copy()] * 2
for i in range(maxiter):
test_new = test_old.fit_to_structure(structure)
if vsym:
test_new = test_new.voigt_symmetrized
diff = np.abs(test_old - test_new)
converged = (diff < prec).all()
if converged:
break
test_new[mask] = self[mask]
test_old = test_new
if verbose:
print("Iteration {}: {}".format(i, np.max(diff)))
if not converged:
max_diff = np.max(np.abs(self - test_new))
warnings.warn("Warning, populated tensor is not converged " "with max diff of {}".format(max_diff))
return self.__class__(test_new)
def as_dict(self, voigt: bool = False) -> dict:
"""
Serializes the tensor object
Args:
voigt (bool): flag for whether to store entries in
voigt-notation. Defaults to false, as information
may be lost in conversion.
Returns (Dict):
serialized format tensor object
"""
input_array = self.voigt if voigt else self
d = {
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"input_array": input_array.tolist(),
}
if voigt:
d.update({"voigt": voigt})
return d
@classmethod
def from_dict(cls, d):
"""MSONAble from_dict implementation."""
voigt = d.get("voigt")
if voigt:
return cls.from_voigt(d["input_array"])
return cls(d["input_array"])
class TensorCollection(collections.abc.Sequence, MSONable):
"""
A sequence of tensors that can be used for fitting data
or for having a tensor expansion
"""
def __init__(self, tensor_list, base_class=Tensor):
"""
:param tensor_list: List of tensors.
:param base_class: Class to be used.
"""
self.tensors = [base_class(t) if not isinstance(t, base_class) else t for t in tensor_list]
def __len__(self):
return len(self.tensors)
def __getitem__(self, ind):
return self.tensors[ind]
def __iter__(self):
return self.tensors.__iter__()
def zeroed(self, tol=1e-3):
"""
:param tol: Tolerance
:return: TensorCollection where small values are set to 0.
"""
return self.__class__([t.zeroed(tol) for t in self])
def transform(self, symm_op):
"""
Transforms TensorCollection with a symmetry operation.
:param symm_op: SymmetryOperation.
:return: TensorCollection.
"""
return self.__class__([t.transform(symm_op) for t in self])
def rotate(self, matrix, tol=1e-3):
"""
Rotates TensorCollection.
:param matrix: Rotation matrix.
:param tol: tolerance.
:return: TensorCollection.
"""
return self.__class__([t.rotate(matrix, tol) for t in self])
@property
def symmetrized(self):
"""
:return: TensorCollection where all tensors are symmetrized.
"""
return self.__class__([t.symmetrized for t in self])
def is_symmetric(self, tol=1e-5):
"""
:param tol: tolerance
:return: Whether all tensors are symmetric.
"""
return all([t.is_symmetric(tol) for t in self])
def fit_to_structure(self, structure, symprec=0.1):
"""
Fits all tensors to a Structure.
:param structure: Structure
:param symprec: symmetry precision.
:return: TensorCollection.
"""
return self.__class__([t.fit_to_structure(structure, symprec) for t in self])
def is_fit_to_structure(self, structure, tol=1e-2):
"""
:param structure: Structure
:param tol: tolerance
:return: Whether all tensors are fitted to Structure.
"""
return all([t.is_fit_to_structure(structure, tol) for t in self])
@property
def voigt(self):
"""
:return: TensorCollection where all tensors are in voight form.
"""
return [t.voigt for t in self]
@property
def ranks(self):
"""
:return: Ranks for all tensors.
"""
return [t.rank for t in self]
def is_voigt_symmetric(self, tol=1e-6):
"""
:param tol: tolerance
:return: Whether all tensors are voigt symmetric.
"""
return all([t.is_voigt_symmetric(tol) for t in self])
@classmethod
def from_voigt(cls, voigt_input_list, base_class=Tensor):
"""
Creates TensorCollection from voigt form.
:param voigt_input_list: List of voigt tensors
:param base_class: Class for tensor.
:return: TensorCollection.
"""
return cls([base_class.from_voigt(v) for v in voigt_input_list])
def convert_to_ieee(self, structure, initial_fit=True, refine_rotation=True):
"""
Convert all tensors to IEEE.
:param structure: Structure
:param initial_fit: Whether to perform an initial fit.
:param refine_rotation: Whether to refine the rotation.
:return: TensorCollection.
"""
return self.__class__([t.convert_to_ieee(structure, initial_fit, refine_rotation) for t in self])
def round(self, *args, **kwargs):
"""
Round all tensors.
:param args: Passthrough to Tensor.round
:param kwargs: Passthrough to Tensor.round
:return: TensorCollection.
"""
return self.__class__([t.round(*args, **kwargs) for t in self])
@property
def voigt_symmetrized(self):
"""
:return: TensorCollection where all tensors are voigt symmetrized.
"""
return self.__class__([t.voigt_symmetrized for t in self])
def as_dict(self, voigt=False):
"""
:param voigt: Whether to use voight form.
:return: Dict representation of TensorCollection.
"""
tensor_list = self.voigt if voigt else self
d = {
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"tensor_list": [t.tolist() for t in tensor_list],
}
if voigt:
d.update({"voigt": voigt})
return d
@classmethod
def from_dict(cls, d):
"""
Creates TensorCollection from dict.
:param d: dict
:return: TensorCollection
"""
voigt = d.get("voigt")
if voigt:
return cls.from_voigt(d["tensor_list"])
return cls(d["tensor_list"])
class SquareTensor(Tensor):
"""
Base class for doing useful general operations on second rank tensors
(stress, strain etc.).
"""
def __new__(cls, input_array, vscale=None):
"""
Create a SquareTensor object. Note that the constructor uses __new__
rather than __init__ according to the standard method of
subclassing numpy ndarrays. Error is thrown when the class is
initialized with non-square matrix.
Args:
input_array (3x3 array-like): the 3x3 array-like
representing the content of the tensor
vscale (6x1 array-like): 6x1 array-like scaling the
voigt-notation vector with the tensor entries
"""
obj = super().__new__(cls, input_array, vscale, check_rank=2)
return obj.view(cls)
@property
def trans(self):
"""
shorthand for transpose on SquareTensor
"""
return SquareTensor(np.transpose(self))
@property
def inv(self):
"""
shorthand for matrix inverse on SquareTensor
"""
if self.det == 0:
raise ValueError("SquareTensor is non-invertible")
return SquareTensor(np.linalg.inv(self))
@property
def det(self):
"""
shorthand for the determinant of the SquareTensor
"""
return np.linalg.det(self)
def is_rotation(self, tol=1e-3, include_improper=True):
"""
Test to see if tensor is a valid rotation matrix, performs a
test to check whether the inverse is equal to the transpose
and if the determinant is equal to one within the specified
tolerance
Args:
tol (float): tolerance to both tests of whether the
the determinant is one and the inverse is equal
to the transpose
include_improper (bool): whether to include improper
rotations in the determination of validity
"""
det = np.abs(np.linalg.det(self))
if include_improper:
det = np.abs(det)
return (np.abs(self.inv - self.trans) < tol).all() and (np.abs(det - 1.0) < tol)
def refine_rotation(self):
"""
Helper method for refining rotation matrix by ensuring
that second and third rows are perpindicular to the first.
Gets new y vector from an orthogonal projection of x onto y
and the new z vector from a cross product of the new x and y
Args:
tol to test for rotation
Returns:
new rotation matrix
"""
new_x, y = get_uvec(self[0]), get_uvec(self[1])
# Get a projection on y
new_y = y - np.dot(new_x, y) * new_x
new_z = np.cross(new_x, new_y)
return SquareTensor([new_x, new_y, new_z])
def get_scaled(self, scale_factor):
"""
Scales the tensor by a certain multiplicative scale factor
Args:
scale_factor (float): scalar multiplier to be applied to the
SquareTensor object
"""
return SquareTensor(self * scale_factor)
@property
def principal_invariants(self):
"""
Returns a list of principal invariants for the tensor,
which are the values of the coefficients of the characteristic
polynomial for the matrix
"""
return np.poly(self)[1:] * np.array([-1, 1, -1])
def polar_decomposition(self, side="right"):
"""
calculates matrices for polar decomposition
"""
return polar(self, side=side)
def get_uvec(vec):
""" Gets a unit vector parallel to input vector"""
l = np.linalg.norm(vec)
if l < 1e-8:
return vec
return vec / l
def symmetry_reduce(tensors, structure, tol=1e-8, **kwargs):
"""
Function that converts a list of tensors corresponding to a structure
and returns a dictionary consisting of unique tensor keys with symmop
values corresponding to transformations that will result in derivative
tensors from the original list
Args:
tensors (list of tensors): list of Tensor objects to test for
symmetrically-equivalent duplicates
structure (Structure): structure from which to get symmetry
tol (float): tolerance for tensor equivalence
kwargs: keyword arguments for the SpacegroupAnalyzer
returns:
dictionary consisting of unique tensors with symmetry operations
corresponding to those which will reconstruct the remaining
tensors as values
"""
sga = SpacegroupAnalyzer(structure, **kwargs)
symmops = sga.get_symmetry_operations(cartesian=True)
unique_mapping = TensorMapping([tensors[0]], [[]], tol=tol)
for tensor in tensors[1:]:
is_unique = True
for unique_tensor, symmop in itertools.product(unique_mapping, symmops):
if np.allclose(unique_tensor.transform(symmop), tensor, atol=tol):
unique_mapping[unique_tensor].append(symmop)
is_unique = False
break
if is_unique:
unique_mapping[tensor] = []
return unique_mapping
class TensorMapping(collections.abc.MutableMapping):
"""
Base class for tensor mappings, which function much like
a dictionary, but use numpy routines to determine approximate
equality to keys for getting and setting items.
This is intended primarily for convenience with things like
stress-strain pairs and fitting data manipulation. In general,
it is significantly less robust than a typical hashing
and should be used with care.
"""
def __init__(self, tensors=None, values=None, tol=1e-5):
"""
Initialize a TensorMapping
Args:
tensor_list ([Tensor]): list of tensors
value_list ([]): list of values to be associated with tensors
tol (float): an absolute tolerance for getting and setting
items in the mapping
"""
self._tensor_list = tensors or []
self._value_list = values or []
if not len(self._tensor_list) == len(self._value_list):
raise ValueError("TensorMapping must be initialized with tensors" "and values of equivalent length")
self.tol = tol
def __getitem__(self, item):
index = self._get_item_index(item)
if index is None:
raise KeyError("{} not found in mapping.".format(item))
return self._value_list[index]
def __setitem__(self, key, value):
index = self._get_item_index(key)
if index is None:
self._tensor_list.append(key)
self._value_list.append(value)
else:
self._value_list[index] = value
def __delitem__(self, key):
index = self._get_item_index(key)
self._tensor_list.pop(index)
self._value_list.pop(index)
def __len__(self):
return len(self._tensor_list)
def __iter__(self):
for item in self._tensor_list:
yield item
def values(self):
"""
:return: Values in mapping.
"""
return self._value_list
def items(self):
"""
:return: Items in mapping.
"""
return zip(self._tensor_list, self._value_list)
def __contains__(self, item):
return not self._get_item_index(item) is None
def _get_item_index(self, item):
if len(self._tensor_list) == 0:
return None
item = np.array(item)
axis = tuple(range(1, len(item.shape) + 1))
mask = np.all(np.abs(np.array(self._tensor_list) - item) < self.tol, axis=axis)
indices = np.where(mask)[0]
if len(indices) > 1:
raise ValueError("Tensor key collision.")
if len(indices) == 0:
return None
return indices[0]
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
r"""
This script runs commands on each entry in the API caches.
Syntax: cache.py [-password] [-delete] [-c '...'] [dir ...]
If no directory are specified, it will detect the API caches.
If no command is specified, it will print the filename of all entries.
If only -delete is specified, it will delete all entries.
The option '-c' must be followed by a command in python syntax.
Example commands:
Print the filename of any entry with 'wikidata' in the key:
entry if "wikidata" in entry._uniquedescriptionstr() else None
Customised output if the site code is 'ar':
entry.site.code == "ar" and print("%s" % entry._uniquedescriptionstr())
Or the state of the login
entry.site._loginstatus == LoginStatus.NOT_ATTEMPTED and \
print("%s" % entry._uniquedescriptionstr())
These functions can be used as a command:
has_password(entry)
is_logout(entry)
empty_response(entry)
not_accessed(entry)
incorrect_hash(entry)
older_than_one_day(entry)
recent(entry)
There are helper functions which can be part of a command:
older_than(entry, interval)
newer_than(entry, interval)
"""
#
# (C) Pywikibot team, 2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import print_function, unicode_literals
__version__ = '$Id: f05e9fd4096e2dc06a111a323b8afd4aaea5c540 $'
#
import os
import datetime
import pickle
import hashlib
import pywikibot
from pywikibot.data import api
from pywikibot.site import APISite, DataSite, LoginStatus # noqa
from pywikibot.page import User # noqa
class ParseError(Exception):
"""Error parsing."""
class CacheEntry(api.CachedRequest):
"""A Request cache entry."""
def __init__(self, directory, filename):
"""Constructor."""
self.directory = directory
self.filename = filename
def __str__(self):
return self.filename
def __repr__(self):
return self._cachefile_path()
def _create_file_name(self):
"""Filename of the cached entry."""
return self.filename
def _get_cache_dir(self):
"""Directory of the cached entry."""
return self.directory
def _cachefile_path(self):
return os.path.join(self._get_cache_dir(),
self._create_file_name())
def _load_cache(self):
"""Load the cache entry."""
with open(self._cachefile_path(), 'rb') as f:
self.key, self._data, self._cachetime = pickle.load(f)
return True
def parse_key(self):
"""Parse the key loaded from the cache entry."""
# find the start of the first parameter
start = self.key.index('(')
# find the end of the first object
end = self.key.index(')')
if not end:
raise ParseError('End of Site() keyword not found: %s' % self.key)
if 'Site' not in self.key[0:start]:
raise ParseError('Site() keyword not found at start of key: %s'
% self.key)
site = self.key[0:end + 1]
if site[0:5] == 'Site(':
site = 'APISite(' + site[5:]
username = None
login_status = None
start = end + 1
if self.key[start:start + 5] == 'User(':
# The addition of user to the cache key used:
# repr(User)
# which includes namespaces resulting in:
# User(User:<username>)
# This also accepts User(<username>)
if self.key[start:start + 10] == 'User(User:':
start += 10
else:
start += 5
end = self.key.index(')', start + 5)
if not end:
raise ParseError('End of User() keyword not found: %s'
% self.key)
username = self.key[start:end]
elif self.key[start:start + 12] == 'LoginStatus(':
end = self.key.index(')', start + 12)
if not end:
raise ParseError('End of LoginStatus() keyword not found: %s'
% self.key)
login_status = self.key[start:end + 1]
# If the key does not contain User(..) or LoginStatus(..),
# it must be the old key format which only contains Site and params
elif self.key[start:start + 3] != "[('":
raise ParseError('Keyword after Site not recognised: %s...'
% self.key)
start = end + 1
params = self.key[start:]
self._parsed_key = (site, username, login_status, params)
return self._parsed_key
def _rebuild(self):
"""Reconstruct the original Request from the key."""
if hasattr(self, '_parsed_key'):
(site, username, login_status, params) = self._parsed_key
else:
(site, username, login_status, params) = self.parse_key()
if not site:
raise ParseError('No Site')
self.site = eval(site)
if login_status:
self.site._loginstatus = eval('LoginStatus.%s'
% login_status[12:-1])
if username:
self.site._username = [username, username]
if not params:
raise ParseError('No request params')
self._params = dict(eval(params))
def _delete(self):
"""Delete the cache entry."""
os.remove(self._cachefile_path())
def process_entries(cache_path, func, use_accesstime=None):
"""
Check the contents of the cache.
This program tries to use file access times to determine
whether cache files are being used.
However file access times are not always usable.
On many modern filesystems, they have been disabled.
On unix, check the filesystem mount options. You may
need to remount with 'strictatime'.
@param use_accesstime: Whether access times should be used.
@type use_accesstime: bool tristate:
- None = detect
- False = dont use
- True = always use
"""
if not cache_path:
cache_path = os.path.join(pywikibot.config2.base_dir, 'apicache')
if not os.path.exists(cache_path):
pywikibot.error('%s: no such file or directory' % cache_path)
return
if os.path.isdir(cache_path):
filenames = [os.path.join(cache_path, filename)
for filename in os.listdir(cache_path)]
else:
filenames = [cache_path]
for filepath in filenames:
filename = os.path.basename(filepath)
cache_dir = os.path.dirname(filepath)
if use_accesstime is not False:
stinfo = os.stat(filepath)
entry = CacheEntry(cache_dir, filename)
try:
entry._load_cache()
except ValueError as e:
print('Failed loading %s' % entry._cachefile_path())
pywikibot.exception(e, tb=True)
continue
if use_accesstime is None:
stinfo2 = os.stat(filepath)
use_accesstime = stinfo.st_atime != stinfo2.st_atime
if use_accesstime:
# Reset access times to values before loading cache entry.
os.utime(filepath, (stinfo.st_atime, stinfo.st_mtime))
entry.stinfo = stinfo
try:
entry.parse_key()
except ParseError:
pywikibot.error(u'Problems parsing %s with key %s'
% (entry.filename, entry.key))
pywikibot.exception()
continue
try:
entry._rebuild()
except Exception as e:
pywikibot.error(u'Problems loading %s with key %s, %r'
% (entry.filename, entry.key, entry._parsed_key))
pywikibot.exception(e, tb=True)
continue
func(entry)
def has_password(entry):
"""Entry has a password in the entry."""
if 'lgpassword' in entry._uniquedescriptionstr():
return entry
def is_logout(entry):
"""Entry is a logout entry."""
if not entry._data and 'logout' in entry.key:
return entry
def empty_response(entry):
"""Entry has no data."""
if not entry._data and 'logout' not in entry.key:
return entry
def not_accessed(entry):
"""Entry has never been accessed."""
if not hasattr(entry, 'stinfo'):
return
if entry.stinfo.st_atime <= entry.stinfo.st_mtime:
return entry
def incorrect_hash(entry):
if hashlib.sha256(entry.key.encode('utf-8')).hexdigest() != entry.filename:
return entry
def older_than(entry, interval):
if entry._cachetime + interval < datetime.datetime.now():
return entry
def newer_than(entry, interval):
if entry._cachetime + interval >= datetime.datetime.now():
return entry
def older_than_one_day(entry):
if older_than(entry, datetime.timedelta(days=1)):
return entry
def recent(entry):
if newer_than(entry, datetime.timedelta(hours=1)):
return entry
def main():
local_args = pywikibot.handleArgs()
cache_paths = None
delete = False
command = None
for arg in local_args:
if command == '':
command = arg
elif arg == '-delete':
delete = True
elif arg == '-password':
command = 'has_password(entry)'
elif arg == '-c':
if command:
pywikibot.error('Only one command may be executed.')
exit(1)
command = ''
else:
if not cache_paths:
cache_paths = [arg]
else:
cache_paths.append(arg)
func = None
if not cache_paths:
cache_paths = ['apicache', 'tests/apicache']
# Also process the base directory, if it isnt the current directory
if os.path.abspath(os.getcwd()) != pywikibot.config2.base_dir:
cache_paths += [
os.path.join(pywikibot.config2.base_dir, 'apicache')]
# Also process the user home cache, if it isnt the config directory
if os.path.expanduser('~/.pywikibot') != pywikibot.config2.base_dir:
cache_paths += [
os.path.join(os.path.expanduser('~/.pywikibot'), 'apicache')]
if delete:
action_func = lambda entry: entry._delete()
else:
action_func = lambda entry: pywikibot.output(entry)
if command:
try:
command_func = eval('lambda entry: ' + command)
except:
pywikibot.exception()
pywikibot.error(u'Can not compile command: %s' % command)
exit(1)
func = lambda entry: command_func(entry) and action_func(entry)
else:
func = action_func
for cache_path in cache_paths:
if len(cache_paths) > 1:
pywikibot.output(u'Processing %s' % cache_path)
process_entries(cache_path, func)
if __name__ == '__main__':
main()
|
|
# -*- coding: utf-8 -*-
# Copyright: 2011, Grigoriy Petukhov
# Author: Grigoriy Petukhov (http://lorien.name)
# License: BSD
"""
The core of grab package: the Grab class.
"""
import logging
import os
from random import randint
from copy import copy, deepcopy
import threading
import itertools
import collections
try:
from urlparse import urljoin
except ImportError:
from urllib.parse import urljoin
import email
from datetime import datetime
import weakref
from grab.tools.html import find_refresh_url, find_base_url
from grab.document import Document
from grab import error
from grab.tools.http import normalize_http_values
from grab.cookie import CookieManager
from grab.proxy import ProxyList, parse_proxy_line
from grab.deprecated import DeprecatedThings
from grab.kit_interface import GrabKitInterface
from grab.ext.form import FormExtension
from grab.util.py2old_support import *
from grab.util.py3k_support import *
__all__ = ('Grab',)
# This counter will used in enumerating network queries.
# Its value will be displayed in logging messages and also used
# in names of dumps
# I use mutable module variable to allow different
# instances of Grab maintain single counter
# This could be helpful in debugging when your script
# creates multiple Grab instances - in case of shared counter
# grab instances do not overwrite dump logs
REQUEST_COUNTER = itertools.count(1)
GLOBAL_STATE = {
'dom_build_time': 0,
'selector_time': 0,
}
MUTABLE_CONFIG_KEYS = ['post', 'multipart_post', 'headers', 'cookies']
TRANSPORT_CACHE = {}
logger = logging.getLogger('grab.base')
# Logger to handle network activity
# It is done as separate logger to allow you easily
# control network logging separately from other grab logs
logger_network = logging.getLogger('grab.network')
def reset_request_counter():
global REQUEST_COUNTER
REQUEST_COUNTER = itertools.count(1)
def copy_config(config, mutable_config_keys=MUTABLE_CONFIG_KEYS):
"""
Copy grab config with correct handling of mutable config values.
"""
cloned_config = copy(config)
# Apply ``copy`` function to mutable config values
for key in mutable_config_keys:
cloned_config[key] = copy(config[key])
return cloned_config
def default_config():
# TODO: Maybe config should be splitted into two entities:
# 1) config which is not changed during request
# 2) changable settings
return dict(
# Common
url = None,
# Debugging
log_file = None,
log_dir = False,
debug_post = False,
debug_post_limit = 150,
# Only for curl transport
debug = False,
verbose_logging = False,
# Only for selenium transport
webdriver = 'firefox',
selenium_wait = 1, # in seconds
# Proxy
proxy = None,
proxy_type = None,
proxy_userpwd = None,
proxy_auto_change = True,
# Method, Post
method = None,
post = None,
multipart_post = None,
# Headers, User-Agent, Referer
headers = {},
common_headers = {},
user_agent = None,
user_agent_file = None,
referer = None,
reuse_referer = True,
# Cookies
cookies = {},
reuse_cookies = True,
cookiefile = None,
# Timeouts
timeout = 15,
connect_timeout = 3,
# Connection
connection_reuse = True,
# Response processing
nobody = False,
body_maxsize = None,
body_inmemory = True,
body_storage_dir = None,
body_storage_filename = None,
reject_file_size = None,
# Content compression
encoding = 'gzip',
# Network interface
interface = None,
# Redirects
follow_refresh = False,
follow_location = True,
refresh_redirect_count = 0,
redirect_limit = 10,
# Authentication
userpwd = None,
# Character set to which any unicode data should be encoded
# before get placed in request
# This setting is overwritten after each request with
# charset of rertreived document
charset = 'utf-8',
# Charset to use for converting content of response
# into unicode, by default it is detected automatically
document_charset = None,
# Conent type control how DOM are built
# For html type HTML DOM builder is used
# For xml type XML DOM builder is used
content_type = 'html',
# Fix &#X; entities, where X between 128 and 160
# Such entities are parsed by modern browsers as
# windows-1251 entities independently of the real charset of
# the document, If this option is True then such entities
# will be replaced with correct unicode entitites e.g.:
# — -> —
fix_special_entities = True,
# Convert document body to lower case before bulding LXML tree
# It does not affect `self.doc.body`
lowercased_tree = False,
# Strip null bytes from document body before building lXML tree
# It does not affect `self.doc.body`
strip_null_bytes = True,
# Internal object to store
state = {},
)
class Grab(FormExtension, DeprecatedThings):
__slots__ = ('request_head', 'request_log', 'request_body',
'proxylist', 'config', '_request_prepared',
'clone_counter', 'transport',
'transport_param', 'request_method', 'request_counter',
'__weakref__', 'cookies',
# Dirst hack to make it possbile to inherit Grab from
# multiple base classes with __slots__
'_lxml_form', '_file_fields',
'_pyquery', '_doc', '_kit',
)
# Attributes which should be processed when clone
# of Grab instance is creating
clonable_attributes = ('request_head', 'request_log', 'request_body',
'proxylist')
# Complex config items which points to mutable objects
mutable_config_keys = copy(MUTABLE_CONFIG_KEYS)
"""
Public methods
"""
def __init__(self, document_body=None, transport='grab.transport.curl.CurlTransport',
**kwargs):
"""
Create Grab instance
"""
self._doc = None
self.config = default_config()
self.config['common_headers'] = self.common_headers()
self._request_prepared = False
self.cookies = CookieManager()
self.proxylist = ProxyList()
self.setup_transport(transport)
self.reset()
if kwargs:
self.setup(**kwargs)
self.clone_counter = 0
if document_body is not None:
self.setup_document(document_body)
def _get_doc(self):
if self._doc is None:
self._doc = Document(self)
return self._doc
def _set_doc(self, obj):
self._doc = obj
doc = property(_get_doc, _set_doc)
def setup_transport(self, transport_param):
self.transport_param = transport_param
if isinstance(transport_param, basestring):
mod_path, cls_name = transport_param.rsplit('.', 1)
try:
cls = TRANSPORT_CACHE[(mod_path, cls_name)]
except KeyError:
mod = __import__(mod_path, globals(), locals(), ['foo'])
cls = getattr(mod, cls_name)
TRANSPORT_CACHE[(mod_path, cls_name)] = cls
self.transport = cls()
elif isinstance(transport_param, collections.Callable):
self.transport = transport_param()
else:
raise error.GrabMisuseError('Option `transport` should be string or callable. '
'Got %s' % type(transport_param))
def reset(self):
"""
Reset all attributes which could be modified during previous request
or which is not initialized yet if this is the new Grab instance.
This methods is automatically called before each network request.
"""
self.request_head = None
self.request_log = None
self.request_body = None
self.request_method = None
self.transport.reset()
# KIT
self._kit = None
# Form extension
self._lxml_form = None
self._file_fields = {}
def clone(self, **kwargs):
"""
Create clone of Grab instance.
Cloned instance will have the same state: cookies, referer, response document data
:param **kwargs: overrides settings of cloned grab instance
"""
g = Grab(transport=self.transport_param)
g.config = self.dump_config()
g.doc = self.doc.copy()
g.doc.grab = weakref.proxy(g)
for key in self.clonable_attributes:
setattr(g, key, getattr(self, key))
g.cookies = deepcopy(self.cookies)
g.clone_counter = self.clone_counter + 1
if kwargs:
g.setup(**kwargs)
return g
def adopt(self, g):
"""
Copy the state of another `Grab` instance.
Use case: create backup of current state to the cloned instance and
then restore the state from it.
"""
self.load_config(g.config)
self.doc = g.doc.copy(new_grab=self)
for key in self.clonable_attributes:
setattr(self, key, getattr(g, key))
self.cookies = deepcopy(g.cookies)
self.clone_counter = g.clone_counter + 1
def dump_config(self):
"""
Make clone of current config.
"""
conf = copy_config(self.config, self.mutable_config_keys)
conf['state'] = {
'cookiejar_cookies': list(self.cookies.cookiejar),
}
return conf
def load_config(self, config):
"""
Configure grab instance with external config object.
"""
self.config = copy_config(config, self.mutable_config_keys)
if 'cookiejar_cookies' in config['state']:
self.cookies = CookieManager.from_cookie_list(config['state']['cookiejar_cookies'])
def setup(self, **kwargs):
"""
Setting up Grab instance configuration.
"""
if 'hammer_mode' in kwargs:
logging.error('Option hammer_mode is deprecated. Grab does not support hammer mode anymore.')
del kwargs['hammer_mode']
if 'hammer_timeouts' in kwargs:
logging.error('Option hammer_timeouts is deprecated. Grab does not support hammer mode anymore.')
del kwargs['hammer_timeouts']
for key in kwargs:
if not key in self.config.keys():
raise error.GrabMisuseError('Unknown option: %s' % key)
if 'url' in kwargs:
if self.config.get('url'):
kwargs['url'] = self.make_url_absolute(kwargs['url'])
self.config.update(kwargs)
def go(self, url, **kwargs):
"""
Go to ``url``
Args:
:url: could be absolute or relative. If relative then t will be appended to the
absolute URL of previous request.
"""
return self.request(url=url, **kwargs)
def download(self, url, location, **kwargs):
"""
Fetch document located at ``url`` and save to to ``location``.
"""
doc = self.go(url, **kwargs)
with open(location, 'wb') as out:
out.write(doc.body)
return len(doc.body)
def prepare_request(self, **kwargs):
"""
Configure all things to make real network request.
This method is called before doing real request via
tranposrt extension.
"""
# Reset the state set by previous request
if not self._request_prepared:
self.reset()
self.request_counter = next(REQUEST_COUNTER)
if kwargs:
self.setup(**kwargs)
if not self.proxylist.is_empty() and self.config['proxy_auto_change']:
self.change_proxy()
self.request_method = self.detect_request_method()
self.transport.process_config(self)
self._request_prepared = True
def log_request(self, extra=''):
"""
Send request details to logging system.
"""
tname = threading.currentThread().getName().lower()
if tname == 'mainthread':
tname = ''
else:
tname = '-%s' % tname
if self.config['proxy']:
if self.config['proxy_userpwd']:
auth = ' with authorization'
else:
auth = ''
proxy_info = ' via %s proxy of type %s%s' % (
self.config['proxy'], self.config['proxy_type'], auth)
else:
proxy_info = ''
if extra:
extra = '[%s] ' % extra
logger_network.debug('[%02d%s] %s%s %s%s',
self.request_counter, tname,
extra, self.request_method or 'GET',
self.config['url'], proxy_info)
def request(self, **kwargs):
"""
Perform network request.
You can specify grab settings in ``**kwargs``.
Any keyword argument will be passed to ``self.config``.
Returns: ``Document`` objects.
"""
self.prepare_request(**kwargs)
self.log_request()
try:
self.transport.request()
except error.GrabError:
self._request_prepared = False
self.save_failed_dump()
raise
else:
# That builds `self.doc`
self.process_request_result()
return self.doc
def process_request_result(self, prepare_response_func=None):
"""
Process result of real request performed via transport extension.
"""
now = datetime.now()
# TODO: move into separate method
if self.config['debug_post']:
post = self.config['post'] or self.config['multipart_post']
if isinstance(post, dict):
post = list(post.items())
if post:
if isinstance(post, basestring):
post = post[:self.config['debug_post_limit']] + '...'
else:
items = normalize_http_values(post, charset='utf-8')
new_items = []
for key, value in items:
if len(value) > self.config['debug_post_limit']:
value = value[:self.config['debug_post_limit']] + '...'
else:
value = value
new_items.append((key, value))
post = '\n'.join('%-25s: %s' % x for x in new_items)
if post:
logger_network.debug('[%02d] POST request:\n%s\n' % (self.request_counter, post))
# It's important to delete old POST data after request is performed.
# If POST data is not cleared then next request will try to use them again!
old_refresh_count = self.config['refresh_redirect_count']
self.reset_temporary_options()
if prepare_response_func:
self.doc = prepare_response_func(self.transport, self)
else:
self.doc = self.transport.prepare_response(self)
# Warkaround
if self.doc.grab is None:
self.doc.grab = weakref.proxy(self)
if self.config['reuse_cookies']:
self.cookies.update(self.doc.cookies)
self.doc.timestamp = now
self.config['charset'] = self.doc.charset
if self.config['log_file']:
with open(self.config['log_file'], 'wb') as out:
out.write(self.doc.body)
if self.config['cookiefile']:
self.cookies.save_to_file(self.config['cookiefile'])
if self.config['reuse_referer']:
self.config['referer'] = self.doc.url
self.copy_request_data()
# Should be called after `copy_request_data`
self.save_dumps()
self._request_prepared = False
# TODO: check max redirect count
if self.config['follow_refresh']:
url = find_refresh_url(self.doc.unicode_body())
print('URL', url)
if url is not None:
inc_count = old_refresh_count + 1
if inc_count > self.config['redirect_limit']:
raise error.GrabTooManyRedirectsError()
else:
print(inc_count)
return self.request(url=url, refresh_redirect_count=inc_count)
return None
def reset_temporary_options(self):
self.config['post'] = None
self.config['multipart_post'] = None
self.config['method'] = None
self.config['body_storage_filename'] = None
self.config['refresh_redirect_count'] = 0
def save_failed_dump(self):
"""
Save dump of failed request for debugging.
This method is called then fatal network exception is raised.
The saved dump could be used for debugging the reason of the failure.
"""
# This is very untested feature, so
# I put it inside try/except to not break
# live spiders
try:
self.doc = self.transport.prepare_response(self)
self.copy_request_data()
self.save_dumps()
except Exception as ex:
logging.error(unicode(ex))
def copy_request_data(self):
# TODO: Maybe request object?
self.request_head = self.transport.request_head
self.request_body = self.transport.request_body
self.request_log = self.transport.request_log
def setup_document(self, content, **kwargs):
"""
Setup `response` object without real network requests.
Useful for testing and debuging.
All ``**kwargs`` will be passed to `Document` constructor.
"""
self.reset()
# Configure Document instance
doc = Document(grab=self)
doc.body = content
doc.status = ''
doc.head = ''
doc.parse(charset=kwargs.get('document_charset'))
doc.code = 200
doc.total_time = 0
doc.connect_time = 0
doc.name_lookup_time = 0
doc.url = ''
for key, value in kwargs.items():
setattr(doc, key, value)
self.doc = doc
def change_proxy(self):
"""
Set random proxy from proxylist.
"""
if not self.proxylist.is_empty():
proxy = self.proxylist.get_random_proxy()
self.setup(proxy=proxy.address, proxy_userpwd=proxy.userpwd,
proxy_type=proxy.proxy_type)
else:
logging.debug('Proxy list is empty')
"""
Private methods
"""
def common_headers(self):
"""
Build headers which sends typical browser.
"""
return {
'Accept': 'text/xml,application/xml,application/xhtml+xml'
',text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.%d' % randint(2, 5),
'Accept-Language': 'en-us,en;q=0.%d' % (randint(5, 9)),
'Accept-Charset': 'utf-8,windows-1251;q=0.7,*;q=0.%d' % randint(5, 7),
'Keep-Alive': '300',
'Expect': '',
}
def save_dumps(self):
if self.config['log_dir']:
tname = threading.currentThread().getName().lower()
if tname == 'mainthread':
tname = ''
else:
tname = '-%s' % tname
fname = os.path.join(self.config['log_dir'], '%02d%s.log' % (
self.request_counter, tname))
with open(fname, 'w') as out:
out.write('Request headers:\n')
out.write(self.request_head)
out.write('\n')
out.write('Request body:\n')
out.write(self.request_body)
out.write('\n\n')
out.write('Response headers:\n')
out.write(self.doc.head)
fext = 'html'
fname = os.path.join(self.config['log_dir'], '%02d%s.%s' % (
self.request_counter, tname, fext))
self.doc.save(fname)
def make_url_absolute(self, url, resolve_base=False):
"""
Make url absolute using previous request url as base url.
"""
if self.config['url']:
if resolve_base:
ubody = self.doc.unicode_body()
base_url = find_base_url(ubody)
if base_url:
return urljoin(base_url, url)
return urljoin(self.config['url'], url)
else:
return url
def detect_request_method(self):
"""
Analize request config and find which
request method will be used.
Returns request method in upper case
This method needs simetime when `process_config` method
was not called yet.
"""
method = self.config['method']
if method:
method = method.upper()
else:
if self.config['post'] or self.config['multipart_post']:
method = 'POST'
else:
method = 'GET'
return method
def clear_cookies(self):
"""
Clear all remembered cookies.
"""
self.config['cookies'] = {}
self.cookies.clear()
def setup_with_proxyline(self, line, proxy_type='http'):
# TODO: remove from base class
# maybe to proxylist?
host, port, user, pwd = parse_proxy_line(line)
server_port = '%s:%s' % (host, port)
self.setup(proxy=server_port, proxy_type=proxy_type)
if user:
userpwd = '%s:%s' % (user, pwd)
self.setup(proxy_userpwd=userpwd)
def __getstate__(self):
"""
Reset cached lxml objects which could not be pickled.
"""
state = {}
for cls in type(self).mro():
cls_slots = getattr(cls, '__slots__', ())
for slot in cls_slots:
if slot != '__weakref__':
if hasattr(self, slot):
state[slot] = getattr(self, slot)
state['_lxml_form'] = None
if state['_doc']:
state['_doc'].grab = weakref.proxy(self)
return state
def __setstate__(self, state):
for slot, value in state.items():
setattr(self, slot, value)
@property
def request_headers(self):
"""
Temporary hack till the time I'll understand
where to store request details.
"""
try:
first_head = self.request_head.split('\r\n\r\n')[0]
lines = first_head.split('\r\n')
lines = [x for x in lines if ':' in x]
headers = email.message_from_string('\n'.join(lines))
return headers
except Exception as ex:
logging.error('Could not parse request headers', exc_info=ex)
return {}
@property
def kit(self):
"""
Return KitInterface object that provides some
methods to communicate with Kit transport.
"""
if not self._kit:
self._kit = GrabKitInterface(self)
return self._kit
# For backward compatibility
# WTF???
BaseGrab = Grab
|
|
##
# Copyright (c) 2011-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Object model of CALDAV:filter element used in an addressbook-query.
"""
__all__ = [
"Filter",
]
from twext.python.log import Logger
from twistedcaldav.caldavxml import caldav_namespace, CalDAVTimeZoneElement
from twistedcaldav.dateops import timeRangesOverlap
from twistedcaldav.ical import Component, Property
from pycalendar.datetime import DateTime
from pycalendar.timezone import Timezone
log = Logger()
class FilterBase(object):
"""
Determines which matching components are returned.
"""
serialized_name = None
deserialize_names = {}
@classmethod
def serialize_register(cls, register):
cls.deserialize_names[register.serialized_name] = register
def __init__(self, xml_element):
pass
@classmethod
def deserialize(cls, data):
"""
Convert a JSON compatible serialization of this object into the actual object.
"""
obj = cls.deserialize_names[data["type"]](None)
obj._deserialize(data)
return obj
def _deserialize(self, data):
"""
Convert a JSON compatible serialization of this object into the actual object.
"""
pass
def serialize(self):
"""
Create a JSON compatible serialization of this object - will be used in a cross-pod request.
"""
return {
"type": self.serialized_name,
}
def match(self, item, access=None):
raise NotImplementedError
def valid(self, level=0):
raise NotImplementedError
class Filter(FilterBase):
"""
Determines which matching components are returned.
"""
serialized_name = "Filter"
def __init__(self, xml_element):
super(Filter, self).__init__(xml_element)
if xml_element is None:
return
# One comp-filter element must be present
if len(xml_element.children) != 1 or xml_element.children[0].qname() != (caldav_namespace, "comp-filter"):
raise ValueError("Invalid CALDAV:filter element: %s" % (xml_element,))
self.child = ComponentFilter(xml_element.children[0])
def _deserialize(self, data):
"""
Convert a JSON compatible serialization of this object into the actual object.
"""
self.child = FilterBase.deserialize(data["child"])
def serialize(self):
"""
Create a JSON compatible serialization of this object - will be used in a cross-pod request.
"""
result = super(Filter, self).serialize()
result.update({
"child": self.child.serialize(),
})
return result
def match(self, component, access=None):
"""
Returns True if the given calendar component matches this filter, False
otherwise.
"""
# We only care about certain access restrictions.
if access not in (Component.ACCESS_CONFIDENTIAL, Component.ACCESS_RESTRICTED):
access = None
# We need to prepare ourselves for a time-range query by pre-calculating
# the set of instances up to the latest time-range limit. That way we can
# avoid having to do some form of recurrence expansion for each query sub-part.
maxend, isStartTime = self.getmaxtimerange()
if maxend:
if isStartTime:
if component.isRecurringUnbounded():
# Unbounded recurrence is always within a start-only time-range
instances = None
else:
# Expand the instances up to infinity
instances = component.expandTimeRanges(DateTime(2100, 1, 1, 0, 0, 0, tzid=Timezone.UTCTimezone), ignoreInvalidInstances=True)
else:
instances = component.expandTimeRanges(maxend, ignoreInvalidInstances=True)
else:
instances = None
self.child.setInstances(instances)
# <filter> contains exactly one <comp-filter>
return self.child.match(component, access)
def valid(self):
"""
Indicate whether this filter element's structure is valid wrt iCalendar
data object model.
@return: True if valid, False otherwise
"""
# Must have one child element for VCALENDAR
return self.child.valid(0)
def settimezone(self, tzelement):
"""
Set the default timezone to use with this query.
@param calendar: a L{Component} for the VCALENDAR containing the one
VTIMEZONE that we want
@return: the L{Timezone} derived from the VTIMEZONE or utc.
"""
if tzelement is None:
tz = None
elif isinstance(tzelement, CalDAVTimeZoneElement):
tz = tzelement.gettimezone()
elif isinstance(tzelement, Component):
tz = tzelement.gettimezone()
if tz is None:
tz = Timezone.UTCTimezone
self.child.settzinfo(tz)
return tz
def getmaxtimerange(self):
"""
Get the date farthest into the future in any time-range elements
"""
return self.child.getmaxtimerange(None, False)
def getmintimerange(self):
"""
Get the date farthest into the past in any time-range elements. That is either
the start date, or if start is not present, the end date.
"""
return self.child.getmintimerange(None, False)
FilterBase.serialize_register(Filter)
class FilterChildBase(FilterBase):
"""
CalDAV filter element.
"""
def __init__(self, xml_element):
super(FilterChildBase, self).__init__(xml_element)
if xml_element is None:
return
qualifier = None
filters = []
for child in xml_element.children:
qname = child.qname()
if qname in (
(caldav_namespace, "is-not-defined"),
(caldav_namespace, "time-range"),
(caldav_namespace, "text-match"),
):
if qualifier is not None:
raise ValueError("Only one of CalDAV:time-range, CalDAV:text-match allowed")
if qname == (caldav_namespace, "is-not-defined"):
qualifier = IsNotDefined(child)
elif qname == (caldav_namespace, "time-range"):
qualifier = TimeRange(child)
elif qname == (caldav_namespace, "text-match"):
qualifier = TextMatch(child)
elif qname == (caldav_namespace, "comp-filter"):
filters.append(ComponentFilter(child))
elif qname == (caldav_namespace, "prop-filter"):
filters.append(PropertyFilter(child))
elif qname == (caldav_namespace, "param-filter"):
filters.append(ParameterFilter(child))
else:
raise ValueError("Unknown child element: %s" % (qname,))
if qualifier and isinstance(qualifier, IsNotDefined) and (len(filters) != 0):
raise ValueError("No other tests allowed when CalDAV:is-not-defined is present")
self.qualifier = qualifier
self.filters = filters
self.filter_name = xml_element.attributes["name"]
if isinstance(self.filter_name, unicode):
self.filter_name = self.filter_name.encode("utf-8")
self.defined = not self.qualifier or not isinstance(qualifier, IsNotDefined)
filter_test = xml_element.attributes.get("test", "allof")
if filter_test not in ("anyof", "allof"):
raise ValueError("Test must be only one of anyof, allof")
self.filter_test = filter_test
def _deserialize(self, data):
"""
Convert a JSON compatible serialization of this object into the actual object.
"""
self.qualifier = FilterBase.deserialize(data["qualifier"]) if data["qualifier"] else None
self.filters = [FilterBase.deserialize(filter) for filter in data["filters"]]
self.filter_name = data["filter_name"]
self.defined = data["defined"]
self.filter_test = data["filter_test"]
def serialize(self):
"""
Create a JSON compatible serialization of this object - will be used in a cross-pod request.
"""
result = super(FilterChildBase, self).serialize()
result.update({
"qualifier": self.qualifier.serialize() if self.qualifier else None,
"filters": [filter.serialize() for filter in self.filters],
"filter_name": self.filter_name,
"defined": self.defined,
"filter_test": self.filter_test,
})
return result
def match(self, item, access=None):
"""
Returns True if the given calendar item (either a component, property or parameter value)
matches this filter, False otherwise.
"""
# Always return True for the is-not-defined case as the result of this will
# be negated by the caller
if not self.defined:
return True
if self.qualifier and not self.qualifier.match(item, access):
return False
if len(self.filters) > 0:
allof = self.filter_test == "allof"
for filter in self.filters:
if allof != filter._match(item, access):
return not allof
return allof
else:
return True
class ComponentFilter (FilterChildBase):
"""
Limits a search to only the chosen component types.
"""
serialized_name = "ComponentFilter"
def match(self, item, access):
"""
Returns True if the given calendar item (which is a component)
matches this filter, False otherwise.
This specialization uses the instance matching option of the time-range filter
to minimize instance expansion.
"""
# Always return True for the is-not-defined case as the result of this will
# be negated by the caller
if not self.defined:
return True
if self.qualifier and not self.qualifier.matchinstance(item, self.instances):
return False
if len(self.filters) > 0:
allof = self.filter_test == "allof"
for filter in self.filters:
if allof != filter._match(item, access):
return not allof
return allof
else:
return True
def _match(self, component, access):
# At least one subcomponent must match (or is-not-defined is set)
for subcomponent in component.subcomponents():
# If access restrictions are in force, restrict matching to specific components only.
# In particular do not match VALARM.
if access and subcomponent.name() not in ("VEVENT", "VTODO", "VJOURNAL", "VFREEBUSY", "VTIMEZONE",):
continue
# Try to match the component name
if isinstance(self.filter_name, str):
if subcomponent.name() != self.filter_name:
continue
else:
if subcomponent.name() not in self.filter_name:
continue
if self.match(subcomponent, access):
break
else:
return not self.defined
return self.defined
def setInstances(self, instances):
"""
Give the list of instances to each comp-filter element.
@param instances: the list of instances.
"""
self.instances = instances
for compfilter in [x for x in self.filters if isinstance(x, ComponentFilter)]:
compfilter.setInstances(instances)
def valid(self, level):
"""
Indicate whether this filter element's structure is valid wrt iCalendar
data object model.
@param level: the nesting level of this filter element, 0 being the top comp-filter.
@return: True if valid, False otherwise
"""
# Check for time-range
timerange = self.qualifier and isinstance(self.qualifier, TimeRange)
if level == 0:
# Must have VCALENDAR at the top
if (self.filter_name != "VCALENDAR") or timerange:
log.info("Top-level comp-filter must be VCALENDAR, instead: %s" % (self.filter_name,))
return False
elif level == 1:
# Disallow VCALENDAR, VALARM, STANDARD, DAYLIGHT, AVAILABLE at the top, everything else is OK
if self.filter_name in ("VCALENDAR", "VALARM", "STANDARD", "DAYLIGHT", "AVAILABLE"):
log.info("comp-filter wrong component type: %s" % (self.filter_name,))
return False
# time-range only on VEVENT, VTODO, VJOURNAL, VFREEBUSY, VAVAILABILITY
if timerange and self.filter_name not in ("VEVENT", "VTODO", "VJOURNAL", "VFREEBUSY", "VAVAILABILITY"):
log.info("time-range cannot be used with component %s" % (self.filter_name,))
return False
elif level == 2:
# Disallow VCALENDAR, VTIMEZONE, VEVENT, VTODO, VJOURNAL, VFREEBUSY, VAVAILABILITY at the top, everything else is OK
if (self.filter_name in ("VCALENDAR", "VTIMEZONE", "VEVENT", "VTODO", "VJOURNAL", "VFREEBUSY", "VAVAILABILITY")):
log.info("comp-filter wrong sub-component type: %s" % (self.filter_name,))
return False
# time-range only on VALARM, AVAILABLE
if timerange and self.filter_name not in ("VALARM", "AVAILABLE",):
log.info("time-range cannot be used with sub-component %s" % (self.filter_name,))
return False
else:
# Disallow all standard iCal components anywhere else
if (self.filter_name in ("VCALENDAR", "VTIMEZONE", "VEVENT", "VTODO", "VJOURNAL", "VFREEBUSY", "VALARM", "STANDARD", "DAYLIGHT", "AVAILABLE")) or timerange:
log.info("comp-filter wrong standard component type: %s" % (self.filter_name,))
return False
# Test each property
for propfilter in [x for x in self.filters if isinstance(x, PropertyFilter)]:
if not propfilter.valid():
return False
# Test each component
for compfilter in [x for x in self.filters if isinstance(x, ComponentFilter)]:
if not compfilter.valid(level + 1):
return False
# Test the time-range
if timerange:
if not self.qualifier.valid():
return False
return True
def settzinfo(self, tzinfo):
"""
Set the default timezone to use with this query.
@param tzinfo: a L{Timezone} to use.
"""
# Give tzinfo to any TimeRange we have
if isinstance(self.qualifier, TimeRange):
self.qualifier.settzinfo(tzinfo)
# Pass down to sub components/properties
for x in self.filters:
x.settzinfo(tzinfo)
def getmaxtimerange(self, currentMaximum, currentIsStartTime):
"""
Get the date farthest into the future in any time-range elements
@param currentMaximum: current future value to compare with
@type currentMaximum: L{DateTime}
"""
# Give tzinfo to any TimeRange we have
isStartTime = False
if isinstance(self.qualifier, TimeRange):
isStartTime = self.qualifier.end is None
compareWith = self.qualifier.start if isStartTime else self.qualifier.end
if currentMaximum is None or currentMaximum < compareWith:
currentMaximum = compareWith
currentIsStartTime = isStartTime
# Pass down to sub components/properties
for x in self.filters:
currentMaximum, currentIsStartTime = x.getmaxtimerange(currentMaximum, currentIsStartTime)
return currentMaximum, currentIsStartTime
def getmintimerange(self, currentMinimum, currentIsEndTime):
"""
Get the date farthest into the past in any time-range elements. That is either
the start date, or if start is not present, the end date.
"""
# Give tzinfo to any TimeRange we have
isEndTime = False
if isinstance(self.qualifier, TimeRange):
isEndTime = self.qualifier.start is None
compareWith = self.qualifier.end if isEndTime else self.qualifier.start
if currentMinimum is None or currentMinimum > compareWith:
currentMinimum = compareWith
currentIsEndTime = isEndTime
# Pass down to sub components/properties
for x in self.filters:
currentMinimum, currentIsEndTime = x.getmintimerange(currentMinimum, currentIsEndTime)
return currentMinimum, currentIsEndTime
FilterBase.serialize_register(ComponentFilter)
class PropertyFilter (FilterChildBase):
"""
Limits a search to specific properties.
"""
serialized_name = "PropertyFilter"
def _match(self, component, access):
# When access restriction is in force, we need to only allow matches against the properties
# allowed by the access restriction level.
if access:
allowedProperties = Component.confidentialPropertiesMap.get(component.name(), None)
if allowedProperties and access == Component.ACCESS_RESTRICTED:
allowedProperties += Component.extraRestrictedProperties
else:
allowedProperties = None
# At least one property must match (or is-not-defined is set)
for property in component.properties():
# Apply access restrictions, if any.
if allowedProperties is not None and property.name().upper() not in allowedProperties:
continue
if property.name().upper() == self.filter_name.upper() and self.match(property, access):
break
else:
return not self.defined
return self.defined
def valid(self):
"""
Indicate whether this filter element's structure is valid wrt iCalendar
data object model.
@return: True if valid, False otherwise
"""
# Check for time-range
timerange = self.qualifier and isinstance(self.qualifier, TimeRange)
# time-range only on COMPLETED, CREATED, DTSTAMP, LAST-MODIFIED
if timerange and self.filter_name.upper() not in ("COMPLETED", "CREATED", "DTSTAMP", "LAST-MODIFIED"):
log.info("time-range cannot be used with property %s" % (self.filter_name,))
return False
# Test the time-range
if timerange:
if not self.qualifier.valid():
return False
# No other tests
return True
def settzinfo(self, tzinfo):
"""
Set the default timezone to use with this query.
@param tzinfo: a L{Timezone} to use.
"""
# Give tzinfo to any TimeRange we have
if isinstance(self.qualifier, TimeRange):
self.qualifier.settzinfo(tzinfo)
def getmaxtimerange(self, currentMaximum, currentIsStartTime):
"""
Get the date farthest into the future in any time-range elements
@param currentMaximum: current future value to compare with
@type currentMaximum: L{DateTime}
"""
# Give tzinfo to any TimeRange we have
isStartTime = False
if isinstance(self.qualifier, TimeRange):
isStartTime = self.qualifier.end is None
compareWith = self.qualifier.start if isStartTime else self.qualifier.end
if currentMaximum is None or currentMaximum < compareWith:
currentMaximum = compareWith
currentIsStartTime = isStartTime
return currentMaximum, currentIsStartTime
def getmintimerange(self, currentMinimum, currentIsEndTime):
"""
Get the date farthest into the past in any time-range elements. That is either
the start date, or if start is not present, the end date.
"""
# Give tzinfo to any TimeRange we have
isEndTime = False
if isinstance(self.qualifier, TimeRange):
isEndTime = self.qualifier.start is None
compareWith = self.qualifier.end if isEndTime else self.qualifier.start
if currentMinimum is None or currentMinimum > compareWith:
currentMinimum = compareWith
currentIsEndTime = isEndTime
return currentMinimum, currentIsEndTime
FilterBase.serialize_register(PropertyFilter)
class ParameterFilter (FilterChildBase):
"""
Limits a search to specific parameters.
"""
serialized_name = "ParameterFilter"
def _match(self, property, access):
# At least one parameter must match (or is-not-defined is set)
result = not self.defined
for parameterName in property.parameterNames():
if parameterName.upper() == self.filter_name.upper() and self.match([property.parameterValue(parameterName)], access):
result = self.defined
break
return result
FilterBase.serialize_register(ParameterFilter)
class IsNotDefined (FilterBase):
"""
Specifies that the named iCalendar item does not exist.
"""
serialized_name = "IsNotDefined"
def match(self, component, access=None):
# Oddly, this needs always to return True so that it appears there is
# a match - but we then "negate" the result if is-not-defined is set.
# Actually this method should never be called as we special case the
# is-not-defined option.
return True
FilterBase.serialize_register(IsNotDefined)
class TextMatch (FilterBase):
"""
Specifies a substring match on a property or parameter value.
(CalDAV-access-09, section 9.6.4)
"""
serialized_name = "TextMatch"
def __init__(self, xml_element):
super(TextMatch, self).__init__(xml_element)
if xml_element is None:
return
self.text = str(xml_element)
if "caseless" in xml_element.attributes:
caseless = xml_element.attributes["caseless"]
if caseless == "yes":
self.caseless = True
elif caseless == "no":
self.caseless = False
else:
self.caseless = True
if "negate-condition" in xml_element.attributes:
negate = xml_element.attributes["negate-condition"]
if negate == "yes":
self.negate = True
elif negate == "no":
self.negate = False
else:
self.negate = False
if "match-type" in xml_element.attributes:
self.match_type = xml_element.attributes["match-type"]
if self.match_type not in (
"equals",
"contains",
"starts-with",
"ends-with",
):
self.match_type = "contains"
else:
self.match_type = "contains"
def _deserialize(self, data):
"""
Convert a JSON compatible serialization of this object into the actual object.
"""
self.text = data["text"]
self.caseless = data["caseless"]
self.negate = data["negate"]
self.match_type = data["match_type"]
def serialize(self):
"""
Create a JSON compatible serialization of this object - will be used in a cross-pod request.
"""
result = super(TextMatch, self).serialize()
result.update({
"text": self.text,
"caseless": self.caseless,
"negate": self.negate,
"match_type": self.match_type,
})
return result
def match(self, item, access):
"""
Match the text for the item.
If the item is a property, then match the property value,
otherwise it may be a list of parameter values - try to match anyone of those
"""
if item is None:
return False
if isinstance(item, Property):
values = [item.strvalue()]
else:
values = item
test = unicode(self.text, "utf-8")
if self.caseless:
test = test.lower()
def _textCompare(s):
if self.caseless:
s = s.lower()
if self.match_type == "equals":
return s == test
elif self.match_type == "contains":
return s.find(test) != -1
elif self.match_type == "starts-with":
return s.startswith(test)
elif self.match_type == "ends-with":
return s.endswith(test)
else:
return False
for value in values:
# NB Its possible that we have a text list value which appears as a Python list,
# so we need to check for that and iterate over the list.
if isinstance(value, list):
for subvalue in value:
if _textCompare(unicode(subvalue, "utf-8")):
return not self.negate
else:
if _textCompare(unicode(value, "utf-8")):
return not self.negate
return self.negate
FilterBase.serialize_register(TextMatch)
class TimeRange (FilterBase):
"""
Specifies a time for testing components against.
"""
serialized_name = "TimeRange"
def __init__(self, xml_element):
super(TimeRange, self).__init__(xml_element)
if xml_element is None:
return
# One of start or end must be present
if "start" not in xml_element.attributes and "end" not in xml_element.attributes:
raise ValueError("One of 'start' or 'end' must be present in CALDAV:time-range")
self.start = DateTime.parseText(xml_element.attributes["start"]) if "start" in xml_element.attributes else None
self.end = DateTime.parseText(xml_element.attributes["end"]) if "end" in xml_element.attributes else None
self.tzinfo = None
def _deserialize(self, data):
"""
Convert a JSON compatible serialization of this object into the actual object.
"""
self.start = DateTime.parseText(data["start"]) if data["start"] else None
self.end = DateTime.parseText(data["end"]) if data["end"] else None
self.tzinfo = Timezone(tzid=data["tzinfo"]) if data["tzinfo"] else None
def serialize(self):
"""
Create a JSON compatible serialization of this object - will be used in a cross-pod request.
"""
result = super(TimeRange, self).serialize()
result.update({
"start": self.start.getText() if self.start else None,
"end": self.end.getText() if self.end else None,
"tzinfo": self.tzinfo.getTimezoneID() if self.tzinfo else None,
})
return result
def settzinfo(self, tzinfo):
"""
Set the default timezone to use with this query.
@param tzinfo: a L{Timezone} to use.
"""
# Give tzinfo to any TimeRange we have
self.tzinfo = tzinfo
def valid(self, level=0):
"""
Indicate whether the time-range is valid (must be date-time in UTC).
@return: True if valid, False otherwise
"""
if self.start is not None and self.start.isDateOnly():
log.info("start attribute in <time-range> is not a date-time: %s" % (self.start,))
return False
if self.end is not None and self.end.isDateOnly():
log.info("end attribute in <time-range> is not a date-time: %s" % (self.end,))
return False
if self.start is not None and not self.start.utc():
log.info("start attribute in <time-range> is not UTC: %s" % (self.start,))
return False
if self.end is not None and not self.end.utc():
log.info("end attribute in <time-range> is not UTC: %s" % (self.end,))
return False
# No other tests
return True
def match(self, property, access=None):
"""
NB This is only called when doing a time-range match on a property.
"""
if property is None:
return False
else:
return property.containsTimeRange(self.start, self.end, self.tzinfo)
def matchinstance(self, component, instances):
"""
Test whether this time-range element causes a match to the specified component
using the specified set of instances to determine the expanded time ranges.
@param component: the L{Component} to test.
@param instances: the list of expanded instances.
@return: True if the time-range query matches, False otherwise.
"""
if component is None:
return False
assert instances is not None or self.end is None, "Failure to expand instance for time-range filter: %r" % (self,)
# Special case open-ended unbounded
if instances is None:
if component.getRecurrenceIDUTC() is None:
return True
else:
# See if the overridden component's start is past the start
start, _ignore_end = component.getEffectiveStartEnd()
if start is None:
return True
else:
return start >= self.start
# Handle alarms as a special case
alarms = (component.name() == "VALARM")
if alarms:
testcomponent = component._parent
else:
testcomponent = component
for key in instances:
instance = instances[key]
# First make sure components match
if not testcomponent.same(instance.component):
continue
if alarms:
# Get all the alarm triggers for this instance and test each one
triggers = instance.getAlarmTriggers()
for trigger in triggers:
if timeRangesOverlap(trigger, None, self.start, self.end, self.tzinfo):
return True
else:
# Regular instance overlap test
if timeRangesOverlap(instance.start, instance.end, self.start, self.end, self.tzinfo):
return True
return False
FilterBase.serialize_register(TimeRange)
|
|
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from copy import deepcopy
from mock import patch
import string
from oslo.serialization import jsonutils
from nailgun.errors import errors
from nailgun.extensions.volume_manager.extension import VolumeManagerExtension
from nailgun.extensions.volume_manager.manager import Disk
from nailgun.extensions.volume_manager.manager import DisksFormatConvertor
from nailgun.extensions.volume_manager.manager import only_disks
from nailgun.extensions.volume_manager.manager import only_vg
from nailgun.test.base import BaseIntegrationTest
from nailgun.test.base import fake_tasks
from nailgun.utils import reverse
class TestNodeDisksHandlers(BaseIntegrationTest):
def create_node(self, roles=None, pending_roles=None):
if roles is None:
roles = ['controller']
if pending_roles is None:
pending_roles = []
self.env.create(
nodes_kwargs=[{
'roles': roles,
'pending_roles': pending_roles,
'pending_addition': True,
'api': True}])
return self.env.nodes[0]
def get(self, node_id):
resp = self.app.get(
reverse('NodeDisksHandler', kwargs={'node_id': node_id}),
headers=self.default_headers)
self.assertEqual(200, resp.status_code)
return resp.json_body
def put(self, node_id, data, expect_errors=False):
resp = self.app.put(
reverse('NodeDisksHandler', kwargs={'node_id': node_id}),
jsonutils.dumps(data),
headers=self.default_headers,
expect_errors=expect_errors)
if not expect_errors:
self.assertEqual(200, resp.status_code)
return resp.json_body
else:
return resp
@staticmethod
def get_vgs(resp):
disks_vgs = [d['volumes'] for d in resp]
vgs = [vg['name'] for disk_vgs in disks_vgs for vg in disk_vgs]
return set(vgs)
@fake_tasks()
def test_clean_volumes_after_reset(self):
disks = [
{
"model": "TOSHIBA MK1002TS",
"name": "sda",
"disk": "sda",
"size": 1000204886016
},
{
"model": "TOSHIBA MK1002TS",
"name": "sdb",
"disk": "disk/by-path/pci-0000:00:0d.0-scsi-0:0:0:0",
"size": 1000204886016
},
]
self.env.create(
nodes_kwargs=[{
"roles": [],
"pending_roles": ['controller'],
"meta": {"disks": disks}
}]
)
self.env.wait_ready(
self.env.launch_deployment())
self.env.wait_ready(
self.env.reset_environment())
node_db = self.env.nodes[0]
# simulate disk change
new_meta = deepcopy(node_db.meta)
new_meta['disks'][0]['disk'] = new_meta['disks'][1]['disk']
new_meta['disks'][1]['disk'] = 'sdb'
self.app.put(
reverse('NodeAgentHandler'),
jsonutils.dumps({
'mac': node_db.mac,
'meta': new_meta}),
headers=self.default_headers)
# check that we can config disks after reset
disks = self.get(node_db.id)
disks[0]['volumes'][0]['size'] -= 100
updated_disks = self.put(node_db.id, disks)
self.assertEqual(disks, updated_disks)
def test_default_attrs_after_creation(self):
self.env.create_node(api=True)
node_db = self.env.nodes[0]
disks = self.get(node_db.id)
self.assertGreater(len(disks), 0)
for disk in disks:
self.assertTrue(type(disk['size']) == int)
self.assertGreaterEqual(disk['size'], 0)
self.assertEqual(len(disk['volumes']), 0)
def test_volumes_regeneration_after_roles_update(self):
self.env.create(
nodes_kwargs=[
{"roles": [], "pending_roles": ['compute']}
]
)
node_db = self.env.nodes[0]
original_roles_response = self.get(node_db.id)
def update_node_roles(roles):
resp = self.app.put(
reverse('NodeCollectionHandler'),
jsonutils.dumps([{'id': node_db.id, 'pending_roles': roles}]),
headers=self.default_headers)
self.assertEqual(200, resp.status_code)
# adding role
update_node_roles(['compute', 'cinder'])
modified_roles_response = self.get(node_db.id)
self.assertNotEqual(self.get_vgs(original_roles_response),
self.get_vgs(modified_roles_response))
original_roles_response = modified_roles_response
# replacing role
update_node_roles(['compute', 'ceph-osd'])
modified_roles_response = self.get(node_db.id)
self.assertNotEqual(self.get_vgs(original_roles_response),
self.get_vgs(modified_roles_response))
original_roles_response = modified_roles_response
# removing role
update_node_roles(['compute'])
modified_roles_response = self.get(node_db.id)
self.assertNotEqual(self.get_vgs(original_roles_response),
self.get_vgs(modified_roles_response))
original_roles_response = modified_roles_response
# replacing role to itself
update_node_roles(['controller'])
update_node_roles(['compute'])
modified_roles_response = self.get(node_db.id)
self.assertEqual(self.get_vgs(original_roles_response),
self.get_vgs(modified_roles_response))
def test_volumes_update_after_roles_assignment(self):
self.env.create(
nodes_kwargs=[
{"cluster_id": None}
]
)
node_db = self.env.nodes[0]
original_roles_response = self.get(node_db.id)
# adding role
assignment_data = [
{
"id": node_db.id,
"roles": ['compute', 'cinder']
}
]
self.app.post(
reverse(
'NodeAssignmentHandler',
kwargs={'cluster_id': self.env.clusters[0].id}
),
jsonutils.dumps(assignment_data),
headers=self.default_headers
)
modified_roles_response = self.get(node_db.id)
self.assertNotEqual(self.get_vgs(original_roles_response),
self.get_vgs(modified_roles_response))
def test_disks_volumes_size_update(self):
node_db = self.create_node()
disks = self.get(node_db.id)
for disk in disks:
if disk['size'] > 0:
for volume in disk['volumes']:
volume['size'] = 4200
expect_disks = deepcopy(disks)
response = self.put(node_db.id, disks)
self.assertEqual(response, expect_disks)
response = self.get(node_db.id)
self.assertEqual(response, expect_disks)
def test_os_vg_one_disk_ubuntu(self):
self.env.create(
release_kwargs={
"operating_system": "Ubuntu"
},
nodes_kwargs=[
{"roles": [], "pending_roles": ['controller']}
]
)
node_db = self.env.nodes[0]
disks = self.get(node_db.id)
for disk in disks:
for vol in disk["volumes"]:
if disk["size"] > 100:
vol["size"] = 100 if vol["name"] == "os" else 0
resp = self.put(node_db.id, disks, expect_errors=True)
self.assertEqual(
resp.json_body["message"],
"Base system should be allocated on one disk only"
)
def test_recalculates_vg_sizes_when_disks_volumes_size_update(self):
node_db = self.create_node()
disks = self.get(node_db.id)
vgs_before_update = filter(
lambda volume: volume.get('type') == 'vg',
VolumeManagerExtension.get_volumes(node_db))
new_volume_size = 4200
updated_disks_count = 0
for disk in disks:
if disk['size'] > 0:
for volume in disk['volumes']:
volume['size'] = new_volume_size
updated_disks_count += 1
self.put(node_db.id, disks)
vgs_after_update = filter(
lambda volume: volume.get('type') == 'vg',
VolumeManagerExtension.get_volumes(node_db))
for vg_before, vg_after in zip(vgs_before_update, vgs_after_update):
size_volumes_before = sum([
volume.get('size', 0) for volume in vg_before['volumes']])
size_volumes_after = sum([
volume.get('size', 0) for volume in vg_after['volumes']])
self.assertNotEqual(size_volumes_before, size_volumes_after)
volume_group_size = new_volume_size * updated_disks_count
self.assertEqual(size_volumes_after, volume_group_size)
def test_update_ceph_partition(self):
node = self.create_node(roles=['ceph-osd'])
disks = self.get(node.id)
new_volume_size = 4321
for disk in disks:
if disk['size'] > 0:
for volume in disk['volumes']:
volume['size'] = new_volume_size
self.put(node.id, disks)
partitions_after_update = filter(
lambda volume: volume.get('type') == 'partition',
VolumeManagerExtension.get_volumes(node))
for partition_after in partitions_after_update:
self.assertEqual(partition_after['size'], new_volume_size)
def test_validator_at_least_one_disk_exists(self):
node = self.create_node()
response = self.put(node.id, [], True)
self.assertEqual(response.status_code, 400)
self.assertRegexpMatches(response.json_body["message"],
'^Node seems not to have disks')
def test_validator_not_enough_size_for_volumes(self):
node = self.create_node()
disks = self.get(node.id)
for disk in disks:
if disk['size'] > 0:
for volume in disk['volumes']:
volume['size'] = disk['size'] + 1
response = self.put(node.id, disks, True)
self.assertEqual(response.status_code, 400)
self.assertRegexpMatches(response.json_body["message"],
'^Not enough free space on disk: .+')
def test_validator_invalid_data(self):
node = self.create_node()
disks = self.get(node.id)
for disk in disks:
for volume in disk['volumes']:
del volume['size']
response = self.put(node.id, disks, True)
self.assertEqual(response.status_code, 400)
self.assertRegexpMatches(response.json_body["message"],
"'size' is a required property")
class TestNodeDefaultsDisksHandler(BaseIntegrationTest):
def get(self, node_id):
resp = self.app.get(
reverse('NodeDefaultsDisksHandler', kwargs={'node_id': node_id}),
headers=self.default_headers)
self.assertEqual(200, resp.status_code)
return resp.json_body
def test_node_disk_amount_regenerates_volumes_info_if_new_disk_added(self):
cluster = self.env.create_cluster(api=True)
self.env.create_node(
api=True,
roles=['compute'], # vgs: os, vm
cluster_id=cluster['id'])
node_db = self.env.nodes[0]
response = self.get(node_db.id)
self.assertEqual(len(response), 6)
new_meta = node_db.meta.copy()
new_meta['disks'].append({
'size': 1000022933376,
'model': 'SAMSUNG B00B135',
'name': 'sda',
'disk': 'disk/id/b00b135'})
self.app.put(
reverse('NodeAgentHandler'),
jsonutils.dumps({
"mac": node_db.mac,
"meta": new_meta}),
headers=self.default_headers)
self.env.refresh_nodes()
response = self.get(node_db.id)
self.assertEqual(len(response), 7)
# check all groups on all disks
vgs = ['os', 'vm']
for disk in response:
self.assertEqual(len(disk['volumes']), len(vgs))
def test_get_default_attrs(self):
self.env.create_node(api=False)
node_db = self.env.nodes[0]
volumes_from_api = self.get(node_db.id)
default_volumes = node_db.volume_manager.gen_volumes_info()
disks = only_disks(default_volumes)
self.assertEqual(len(disks), len(volumes_from_api))
class TestNodeVolumesInformationHandler(BaseIntegrationTest):
def get(self, node_id):
resp = self.app.get(
reverse('NodeVolumesInformationHandler',
kwargs={'node_id': node_id}),
headers=self.default_headers)
self.assertEqual(200, resp.status_code)
return resp.json_body
def create_node(self, role):
self.env.create(
nodes_kwargs=[{'roles': [role], 'pending_addition': True}])
return self.env.nodes[0]
def check_volumes(self, volumes, volumes_ids):
self.assertEqual(len(volumes), len(volumes_ids))
for volume_id in volumes_ids:
# Volume has name
volume = filter(
lambda volume: volume['name'] == volume_id, volumes)[0]
# min_size
self.assertTrue(type(volume['min_size']) == int)
self.assertGreaterEqual(volume['min_size'], 0)
# and label
self.assertTrue(type(volume['label']) in (str, unicode))
self.assertGreater(volume['label'], 0)
def test_volumes_information_for_cinder_role(self):
node_db = self.create_node('cinder')
response = self.get(node_db.id)
self.check_volumes(response, ['os', 'cinder'])
def test_volumes_information_for_compute_role(self):
node_db = self.create_node('compute')
response = self.get(node_db.id)
self.check_volumes(response, ['os', 'vm'])
def test_volumes_information_for_controller_role(self):
node_db = self.create_node('controller')
response = self.get(node_db.id)
self.check_volumes(response, ['os', 'image'])
def test_volumes_information_for_ceph_role(self):
node_db = self.create_node('ceph-osd')
response = self.get(node_db.id)
self.check_volumes(response, ['os', 'ceph', 'cephjournal'])
class TestVolumeManager(BaseIntegrationTest):
def create_node(self, *roles):
self.env.create(
nodes_kwargs=[{
'roles': [],
'pending_roles': roles,
'pending_addition': True,
'api': True}])
return self.env.nodes[-1]
def non_zero_size(self, size):
self.assertTrue(type(size) == int)
self.assertGreater(size, 0)
def os_size(self, disks, with_lvm_meta=True):
os_sum_size = 0
for disk in only_disks(disks):
os_volume = filter(
lambda volume: volume.get('vg') == 'os', disk['volumes'])[0]
os_sum_size += os_volume['size']
if not with_lvm_meta:
os_sum_size -= os_volume['lvm_meta_size']
self.non_zero_size(os_sum_size)
return os_sum_size
def glance_size(self, disks):
glance_sum_size = 0
for disk in only_disks(disks):
glance_volume = filter(
lambda volume: volume.get('vg') == 'image', disk['volumes']
)[0]
glance_sum_size += glance_volume['size']
self.non_zero_size(glance_sum_size)
return glance_sum_size
def reserved_size(self, spaces):
reserved_size = 0
for disk in only_disks(spaces):
reserved_size += DisksFormatConvertor.\
calculate_service_partitions_size(disk['volumes'])
return reserved_size
def should_contain_os_with_minimal_size(self, volume_manager):
self.assertEqual(
self.os_size(volume_manager.volumes, with_lvm_meta=False),
volume_manager.call_generator('calc_min_os_size'))
def all_free_space_except_os_for_volume(self, spaces, volume_name):
os_size = self.os_size(spaces)
reserved_size = self.reserved_size(spaces)
disk_sum_size = sum([disk['size'] for disk in only_disks(spaces)])
vg_size = 0
sum_lvm_meta = 0
for disk in only_disks(spaces):
for volume in disk['volumes']:
if volume.get('vg') == volume_name or \
volume.get('name') == volume_name:
vg_size += volume['size']
vg_size -= volume.get('lvm_meta_size', 0)
sum_lvm_meta += volume.get('lvm_meta_size', 0)
self.assertEqual(
vg_size, disk_sum_size - os_size - reserved_size - sum_lvm_meta)
def all_free_space_except_os_disks_for_volume(self, volume_manager,
volume_name):
spaces = volume_manager.volumes
reserved_size = self.reserved_size(spaces)
disk_sum_size = sum([disk['size'] for disk in only_disks(spaces)])
boot_data_size = volume_manager.call_generator('calc_boot_size') + \
volume_manager.call_generator('calc_boot_records_size')
vg_size = 0
sum_lvm_meta = 0
for disk in only_disks(spaces):
for volume in disk['volumes']:
# Exclude disks with OS vg as Ceph won't be there
if volume.get('vg') == 'os' and volume.get('size', 0) > 0:
disk_sum_size -= (disk['size'] - boot_data_size)
if volume.get('vg') == volume_name or \
volume.get('name') == volume_name:
vg_size += volume['size']
vg_size -= volume.get('lvm_meta_size', 0)
sum_lvm_meta += volume.get('lvm_meta_size', 0)
self.assertEqual(
vg_size, disk_sum_size - reserved_size - sum_lvm_meta)
def logical_volume_sizes_should_equal_all_phisical_volumes(self, spaces):
vg_sizes = {}
for vg in only_vg(spaces):
for volume in vg['volumes']:
vg_name = vg['id']
if not vg_sizes.get(vg_name):
vg_sizes[vg_name] = 0
vg_sizes[vg_name] += volume['size']
pv_sizes = {}
for disk in only_disks(spaces):
for volume in disk['volumes']:
# Skip cinder because it does not have
# logical volumes
if volume.get('vg') == 'cinder':
continue
if volume['type'] == 'pv':
vg_name = volume['vg']
if not pv_sizes.get(vg_name):
pv_sizes[vg_name] = 0
pv_sizes[vg_name] += volume['size']
pv_sizes[vg_name] -= volume['lvm_meta_size']
self.assertEqual(vg_sizes, pv_sizes)
def check_disk_size_equal_sum_of_all_volumes(self, spaces):
for disk in only_disks(spaces):
volumes_size = sum(
[volume.get('size', 0) for volume in disk['volumes']])
self.assertEqual(volumes_size, disk['size'])
def test_volume_request_without_cluster(self):
self.env.create_node(api=True)
node = self.env.nodes[-1]
resp = self.app.get(
reverse('NodeVolumesInformationHandler',
kwargs={'node_id': node.id}),
headers=self.default_headers,
expect_errors=True
)
self.assertEqual(404, resp.status_code)
def test_allocates_all_free_space_for_os_for_controller_role(self):
node = self.create_node('controller')
disks = only_disks(node.volume_manager.volumes)
disks_size_sum = sum([disk['size'] for disk in disks])
os_sum_size = self.os_size(disks)
glance_sum_size = self.glance_size(disks)
reserved_size = self.reserved_size(disks)
self.assertEqual(disks_size_sum - reserved_size,
os_sum_size + glance_sum_size)
self.logical_volume_sizes_should_equal_all_phisical_volumes(
VolumeManagerExtension.get_volumes(node))
self.check_disk_size_equal_sum_of_all_volumes(
VolumeManagerExtension.get_volumes(node))
def test_allocates_all_free_space_for_vm_for_compute_role(self):
node = self.create_node('compute')
self.should_contain_os_with_minimal_size(node.volume_manager)
self.all_free_space_except_os_for_volume(
node.volume_manager.volumes, 'vm')
self.logical_volume_sizes_should_equal_all_phisical_volumes(
VolumeManagerExtension.get_volumes(node))
self.check_disk_size_equal_sum_of_all_volumes(
VolumeManagerExtension.get_volumes(node))
def test_allocates_all_free_space_for_vm_for_cinder_role(self):
node = self.create_node('cinder')
self.should_contain_os_with_minimal_size(node.volume_manager)
self.all_free_space_except_os_for_volume(
node.volume_manager.volumes, 'cinder')
self.check_disk_size_equal_sum_of_all_volumes(
VolumeManagerExtension.get_volumes(node))
def test_allocates_space_single_disk_for_ceph_for_ceph_role(self):
node = self.create_node('ceph-osd')
self.update_node_with_single_disk(node, 30000)
self.should_contain_os_with_minimal_size(node.volume_manager)
self.all_free_space_except_os_for_volume(
node.volume_manager.volumes, 'ceph')
self.check_disk_size_equal_sum_of_all_volumes(
VolumeManagerExtension.get_volumes(node))
def test_allocates_full_disks_for_ceph_for_ceph_role(self):
node = self.create_node('ceph-osd')
self.should_contain_os_with_minimal_size(node.volume_manager)
self.all_free_space_except_os_disks_for_volume(
node.volume_manager, 'ceph')
def should_allocates_same_size(self, volumes, same_size_volume_names):
disks = only_disks(volumes)
actual_volumes_size = {}
for disk in disks:
for volume in disk['volumes']:
name = volume.get('vg') or volume.get('name')
if not name:
continue
actual_volumes_size.setdefault(name, {})
actual_volumes_size[name].setdefault('size', 0)
actual_volumes_size[name].setdefault(
'type', volume.get('type'))
actual_volumes_size[name]['size'] += volume.get('size')
actual_volumes = [v for k, v in actual_volumes_size.iteritems()
if k in same_size_volume_names]
# All pv should have equal size
actual_pv_volumes = filter(
lambda volume: volume['type'] == 'pv', actual_volumes)
sum_pv_size = sum([volume['size'] for volume in actual_pv_volumes])
average_size = sum_pv_size / len(actual_pv_volumes)
for pv in actual_pv_volumes:
# In cases where all volumes are created on one disk and
# that disk has an odd-numbered size the volume sizes will
# differ by 1.
self.assertAlmostEqual(pv['size'], average_size, delta=1)
def test_multirole_controller_ceph(self):
node = self.create_node('controller', 'ceph-osd')
self.should_contain_os_with_minimal_size(node.volume_manager)
self.should_allocates_same_size(
node.volume_manager.volumes, ['image', 'ceph'])
self.logical_volume_sizes_should_equal_all_phisical_volumes(
VolumeManagerExtension.get_volumes(node))
self.check_disk_size_equal_sum_of_all_volumes(
VolumeManagerExtension.get_volumes(node))
def test_multirole_controller_cinder_ceph(self):
node = self.create_node('controller', 'cinder', 'ceph-osd')
self.should_contain_os_with_minimal_size(node.volume_manager)
self.should_allocates_same_size(
node.volume_manager.volumes, ['image', 'cinder', 'ceph'])
self.logical_volume_sizes_should_equal_all_phisical_volumes(
VolumeManagerExtension.get_volumes(node))
self.check_disk_size_equal_sum_of_all_volumes(
VolumeManagerExtension.get_volumes(node))
def create_node_and_calculate_min_size(
self, role, space_info, volumes_metadata):
node = self.create_node(role)
volume_manager = node.volume_manager
min_installation_size = self.__calc_minimal_installation_size(
volume_manager
)
return node, min_installation_size
def update_node_with_single_disk(self, node, size):
new_meta = node.meta.copy()
new_meta['disks'] = [{
# convert mbytes to bytes
'size': size * (1024 ** 2),
'model': 'SAMSUNG B00B135',
'name': 'sda',
'disk': 'disk/id/a00b135'}]
self.app.put(
reverse('NodeAgentHandler'),
jsonutils.dumps({
'mac': node.mac,
'meta': new_meta}),
headers=self.default_headers)
def add_disk_to_node(self, node, size):
new_meta = node.meta.copy()
last_disk = [d['name'][-1] for d in new_meta['disks']][-1]
new_disk = string.letters.index(last_disk) + 1
new_meta['disks'].append({
# convert mbytes to bytes
'size': size * (1024 ** 2),
'model': 'SAMSUNG B00B135',
'name': 'sd%s' % string.letters[new_disk],
'disk': 'disk/id/%s00b135' % string.letters[new_disk]})
self.app.put(
reverse('NodeAgentHandler'),
jsonutils.dumps({
'mac': node.mac,
'meta': new_meta}),
headers=self.default_headers)
def test_check_disk_space_for_deployment(self):
min_size = 100000
volumes_metadata = self.env.get_default_volumes_metadata()
volumes_roles_mapping = volumes_metadata['volumes_roles_mapping']
for role, space_info in volumes_roles_mapping.iteritems():
node, min_installation_size = self.\
create_node_and_calculate_min_size(
role, space_info, volumes_metadata)
self.update_node_with_single_disk(node, min_size)
vm = node.volume_manager
with patch.object(vm,
'_VolumeManager'
'__calc_minimal_installation_size',
return_value=min_size):
vm.check_disk_space_for_deployment()
self.update_node_with_single_disk(node, min_size - 1)
vm = node.volume_manager
with patch.object(vm,
'_VolumeManager'
'__calc_minimal_installation_size',
return_value=min_size):
self.assertRaises(
errors.NotEnoughFreeSpace,
vm.check_disk_space_for_deployment
)
def test_calc_minimal_installation_size(self):
volumes_metadata = self.env.get_default_volumes_metadata()
volumes_roles_mapping = volumes_metadata['volumes_roles_mapping']
for role, space_info in volumes_roles_mapping.iteritems():
node = self.create_node(role)
vm = node.volume_manager
self.assertEqual(
vm._VolumeManager__calc_minimal_installation_size(),
self.__calc_minimal_installation_size(vm)
)
def __calc_minimal_installation_size(self, volume_manager):
disks_count = len(filter(lambda disk: disk.size > 0,
volume_manager.disks))
boot_size = volume_manager.call_generator('calc_boot_size') + \
volume_manager.call_generator('calc_boot_records_size')
min_installation_size = disks_count * boot_size
for volume in volume_manager.allowed_volumes:
min_size = volume_manager.expand_generators(volume)['min_size']
min_installation_size += min_size
return min_installation_size
def test_check_volume_size_for_deployment(self):
node = self.create_node('controller', 'ceph-osd')
# First disk contains more than minimum size of all VGs
self.update_node_with_single_disk(node, 116384)
# Second is taken entirely by ceph
self.add_disk_to_node(node, 65536)
node.volume_manager.check_volume_sizes_for_deployment()
# First disk contains less than minimum size of all VGs
self.update_node_with_single_disk(node, 16384)
# Second is taken entirely by ceph
self.add_disk_to_node(node, 65536)
self.assertRaises(
errors.NotEnoughFreeSpace,
node.volume_manager.check_volume_sizes_for_deployment)
def update_ram_and_assert_swap_size(self, node, size, swap_size):
new_meta = deepcopy(node.meta)
new_meta['memory']['total'] = (1024 ** 2) * size
node.meta = new_meta
self.env.db.commit()
self.assertEqual(node.volume_manager._calc_swap_size(), swap_size)
def test_root_size_calculation(self):
node = self.create_node('controller')
self.update_ram_and_assert_swap_size(node, 2, 4)
self.update_ram_and_assert_swap_size(node, 2048, 4096)
self.update_ram_and_assert_swap_size(node, 2049, 2049)
self.update_ram_and_assert_swap_size(node, 8192, 8192)
self.update_ram_and_assert_swap_size(node, 8193, 4096)
self.update_ram_and_assert_swap_size(node, 65536, 32768)
self.update_ram_and_assert_swap_size(node, 65537, 4096)
self.update_ram_and_assert_swap_size(node, 81920, 4096)
class TestDisks(BaseIntegrationTest):
def get_boot(self, volumes):
return filter(
lambda volume: volume.get('mount') == '/boot',
volumes)[0]
def create_disk(self, boot_is_raid=False, possible_pvs_count=0):
return Disk(
[], lambda name: 100, 'sda', 'sda', 10000,
boot_is_raid=boot_is_raid, possible_pvs_count=possible_pvs_count)
def test_create_mbr_as_raid_if_disks_count_greater_than_zero(self):
disk = self.create_disk(boot_is_raid=True)
boot_partition = self.get_boot(disk.volumes)
self.assertEqual(boot_partition['type'], 'raid')
def test_create_mbr_as_partition_if_disks_count_less_than_zero(self):
disk = self.create_disk()
boot_partition = self.get_boot(disk.volumes)
self.assertEqual(boot_partition['type'], 'partition')
def test_remove_pv(self):
disk = self.create_disk(possible_pvs_count=1)
disk_without_pv = deepcopy(disk)
disk.create_pv({'id': 'pv_name'}, 100)
disk.remove_pv('pv_name')
self.assertEqual(disk_without_pv.render(), disk.render())
def test_boot_partition_has_file_system(self):
disk = self.create_disk(possible_pvs_count=1)
boot_record = filter(
lambda volume: volume.get('mount') == '/boot', disk.volumes)[0]
self.assertEqual(boot_record['file_system'], 'ext2')
class TestFixtures(BaseIntegrationTest):
@property
def get_vgs_for_releases(self):
openstack = self.env.read_fixtures(
('openstack',))[0]['fields']['volumes_metadata']['volumes']
return [only_vg(openstack)]
def test_each_logical_volume_has_file_system(self):
for release_vgs in self.get_vgs_for_releases:
for vg in release_vgs:
for volume in vg['volumes']:
self.assertIn(
volume['file_system'],
('ext2', 'ext4', 'swap', 'xfs', None))
|
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
WSGI middleware for OpenStack API controllers.
"""
from oslo_config import cfg
from oslo_log import log as logging
import routes
import six
import stevedore
import webob.dec
import webob.exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import exception
from nova.i18n import _
from nova.i18n import _LC
from nova.i18n import _LE
from nova.i18n import _LI
from nova.i18n import _LW
from nova.i18n import translate
from nova import notifications
from nova import utils
from nova import wsgi as base_wsgi
api_opts = [
cfg.ListOpt('extensions_blacklist',
default=[],
help='DEPRECATED: A list of v2.1 API extensions to never '
'load. Specify the extension aliases here. '
'This option will be removed in the near future. '
'After that point you have to run all of the API.',
deprecated_for_removal=True, deprecated_group='osapi_v21'),
cfg.ListOpt('extensions_whitelist',
default=[],
help='DEPRECATED: If the list is not empty then a v2.1 '
'API extension will only be loaded if it exists in this '
'list. Specify the extension aliases here. '
'This option will be removed in the near future. '
'After that point you have to run all of the API.',
deprecated_for_removal=True, deprecated_group='osapi_v21'),
cfg.StrOpt('project_id_regex',
help='DEPRECATED: The validation regex for project_ids '
'used in urls. This defaults to [0-9a-f\-]+ if not set, '
'which matches normal uuids created by keystone.',
deprecated_for_removal=True, deprecated_group='osapi_v21')
]
api_opts_group = cfg.OptGroup(name='osapi_v21', title='API v2.1 Options')
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.register_group(api_opts_group)
CONF.register_opts(api_opts, api_opts_group)
# List of v21 API extensions which are considered to form
# the core API and so must be present
# TODO(cyeoh): Expand this list as the core APIs are ported to v21
API_V21_CORE_EXTENSIONS = set(['os-consoles',
'extensions',
'os-flavor-extra-specs',
'os-flavor-manage',
'flavors',
'ips',
'os-keypairs',
'os-flavor-access',
'server-metadata',
'servers',
'versions'])
class FaultWrapper(base_wsgi.Middleware):
"""Calls down the middleware stack, making exceptions into faults."""
_status_to_type = {}
@staticmethod
def status_to_type(status):
if not FaultWrapper._status_to_type:
for clazz in utils.walk_class_hierarchy(webob.exc.HTTPError):
FaultWrapper._status_to_type[clazz.code] = clazz
return FaultWrapper._status_to_type.get(
status, webob.exc.HTTPInternalServerError)()
def _error(self, inner, req):
LOG.exception(_LE("Caught error: %s"), six.text_type(inner))
safe = getattr(inner, 'safe', False)
headers = getattr(inner, 'headers', None)
status = getattr(inner, 'code', 500)
if status is None:
status = 500
msg_dict = dict(url=req.url, status=status)
LOG.info(_LI("%(url)s returned with HTTP %(status)d"), msg_dict)
outer = self.status_to_type(status)
if headers:
outer.headers = headers
# NOTE(johannes): We leave the explanation empty here on
# purpose. It could possibly have sensitive information
# that should not be returned back to the user. See
# bugs 868360 and 874472
# NOTE(eglynn): However, it would be over-conservative and
# inconsistent with the EC2 API to hide every exception,
# including those that are safe to expose, see bug 1021373
if safe:
user_locale = req.best_match_language()
inner_msg = translate(inner.message, user_locale)
outer.explanation = '%s: %s' % (inner.__class__.__name__,
inner_msg)
notifications.send_api_fault(req.url, status, inner)
return wsgi.Fault(outer)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
try:
return req.get_response(self.application)
except Exception as ex:
return self._error(ex, req)
class LegacyV2CompatibleWrapper(base_wsgi.Middleware):
def _filter_request_headers(self, req):
"""For keeping same behavior with v2 API, ignores microversions
HTTP header X-OpenStack-Nova-API-Version in the request.
"""
if wsgi.API_VERSION_REQUEST_HEADER in req.headers:
del req.headers[wsgi.API_VERSION_REQUEST_HEADER]
return req
def _filter_response_headers(self, response):
"""For keeping same behavior with v2 API, filter out microversions
HTTP header and microversions field in header 'Vary'.
"""
if wsgi.API_VERSION_REQUEST_HEADER in response.headers:
del response.headers[wsgi.API_VERSION_REQUEST_HEADER]
if 'Vary' in response.headers:
vary_headers = response.headers['Vary'].split(',')
filtered_vary = []
for vary in vary_headers:
vary = vary.strip()
if vary == wsgi.API_VERSION_REQUEST_HEADER:
continue
filtered_vary.append(vary)
if filtered_vary:
response.headers['Vary'] = ','.join(filtered_vary)
else:
del response.headers['Vary']
return response
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
req.set_legacy_v2()
req = self._filter_request_headers(req)
response = req.get_response(self.application)
return self._filter_response_headers(response)
class APIMapper(routes.Mapper):
def routematch(self, url=None, environ=None):
if url == "":
result = self._match("", environ)
return result[0], result[1]
return routes.Mapper.routematch(self, url, environ)
def connect(self, *args, **kargs):
# NOTE(vish): Default the format part of a route to only accept json
# and xml so it doesn't eat all characters after a '.'
# in the url.
kargs.setdefault('requirements', {})
if not kargs['requirements'].get('format'):
kargs['requirements']['format'] = 'json|xml'
return routes.Mapper.connect(self, *args, **kargs)
class ProjectMapper(APIMapper):
def resource(self, member_name, collection_name, **kwargs):
# NOTE(sdague): project_id parameter is only valid if its hex
# or hex + dashes (note, integers are a subset of this). This
# is required to hand our overlaping routes issues.
project_id_regex = '[0-9a-f\-]+'
if CONF.osapi_v21.project_id_regex:
project_id_regex = CONF.osapi_v21.project_id_regex
project_id_token = '{project_id:%s}' % project_id_regex
if 'parent_resource' not in kwargs:
kwargs['path_prefix'] = '%s/' % project_id_token
else:
parent_resource = kwargs['parent_resource']
p_collection = parent_resource['collection_name']
p_member = parent_resource['member_name']
kwargs['path_prefix'] = '%s/%s/:%s_id' % (
project_id_token,
p_collection,
p_member)
routes.Mapper.resource(
self,
member_name,
collection_name,
**kwargs)
# while we are in transition mode, create additional routes
# for the resource that do not include project_id.
if 'parent_resource' not in kwargs:
del kwargs['path_prefix']
else:
parent_resource = kwargs['parent_resource']
p_collection = parent_resource['collection_name']
p_member = parent_resource['member_name']
kwargs['path_prefix'] = '%s/:%s_id' % (p_collection,
p_member)
routes.Mapper.resource(self, member_name,
collection_name,
**kwargs)
class PlainMapper(APIMapper):
def resource(self, member_name, collection_name, **kwargs):
if 'parent_resource' in kwargs:
parent_resource = kwargs['parent_resource']
p_collection = parent_resource['collection_name']
p_member = parent_resource['member_name']
kwargs['path_prefix'] = '%s/:%s_id' % (p_collection, p_member)
routes.Mapper.resource(self, member_name,
collection_name,
**kwargs)
class APIRouter(base_wsgi.Router):
"""Routes requests on the OpenStack API to the appropriate controller
and method.
"""
ExtensionManager = None # override in subclasses
@classmethod
def factory(cls, global_config, **local_config):
"""Simple paste factory, :class:`nova.wsgi.Router` doesn't have one."""
return cls()
def __init__(self, ext_mgr=None, init_only=None):
if ext_mgr is None:
if self.ExtensionManager:
ext_mgr = self.ExtensionManager()
else:
raise Exception(_("Must specify an ExtensionManager class"))
mapper = ProjectMapper()
self.resources = {}
self._setup_routes(mapper, ext_mgr, init_only)
self._setup_ext_routes(mapper, ext_mgr, init_only)
self._setup_extensions(ext_mgr)
super(APIRouter, self).__init__(mapper)
def _setup_ext_routes(self, mapper, ext_mgr, init_only):
for resource in ext_mgr.get_resources():
LOG.debug('Extending resource: %s',
resource.collection)
if init_only is not None and resource.collection not in init_only:
continue
inherits = None
if resource.inherits:
inherits = self.resources.get(resource.inherits)
if not resource.controller:
resource.controller = inherits.controller
wsgi_resource = wsgi.Resource(resource.controller,
inherits=inherits)
self.resources[resource.collection] = wsgi_resource
kargs = dict(
controller=wsgi_resource,
collection=resource.collection_actions,
member=resource.member_actions)
if resource.parent:
kargs['parent_resource'] = resource.parent
mapper.resource(resource.collection, resource.collection, **kargs)
if resource.custom_routes_fn:
resource.custom_routes_fn(mapper, wsgi_resource)
def _setup_extensions(self, ext_mgr):
for extension in ext_mgr.get_controller_extensions():
collection = extension.collection
controller = extension.controller
msg_format_dict = {'collection': collection,
'ext_name': extension.extension.name}
if collection not in self.resources:
LOG.warning(_LW('Extension %(ext_name)s: Cannot extend '
'resource %(collection)s: No such resource'),
msg_format_dict)
continue
LOG.debug('Extension %(ext_name)s extended resource: '
'%(collection)s',
msg_format_dict)
resource = self.resources[collection]
resource.register_actions(controller)
resource.register_extensions(controller)
def _setup_routes(self, mapper, ext_mgr, init_only):
raise NotImplementedError()
class APIRouterV21(base_wsgi.Router):
"""Routes requests on the OpenStack v2.1 API to the appropriate controller
and method.
"""
@classmethod
def factory(cls, global_config, **local_config):
"""Simple paste factory, :class:`nova.wsgi.Router` doesn't have one."""
return cls()
@staticmethod
def api_extension_namespace():
return 'nova.api.v21.extensions'
def __init__(self, init_only=None):
# TODO(cyeoh): bp v3-api-extension-framework. Currently load
# all extensions but eventually should be able to exclude
# based on a config file
def _check_load_extension(ext):
if (self.init_only is None or ext.obj.alias in
self.init_only) and isinstance(ext.obj,
extensions.V21APIExtensionBase):
# Check whitelist is either empty or if not then the extension
# is in the whitelist
if (not CONF.osapi_v21.extensions_whitelist or
ext.obj.alias in CONF.osapi_v21.extensions_whitelist):
# Check the extension is not in the blacklist
blacklist = CONF.osapi_v21.extensions_blacklist
if ext.obj.alias not in blacklist:
return self._register_extension(ext)
return False
if (CONF.osapi_v21.extensions_blacklist or
CONF.osapi_v21.extensions_whitelist):
LOG.warning(
_LW('In the M release you must run all of the API. '
'The concept of API extensions will be removed from '
'the codebase to ensure there is a single Compute API.'))
self.init_only = init_only
LOG.debug("v21 API Extension Blacklist: %s",
CONF.osapi_v21.extensions_blacklist)
LOG.debug("v21 API Extension Whitelist: %s",
CONF.osapi_v21.extensions_whitelist)
in_blacklist_and_whitelist = set(
CONF.osapi_v21.extensions_whitelist).intersection(
CONF.osapi_v21.extensions_blacklist)
if len(in_blacklist_and_whitelist) != 0:
LOG.warning(_LW("Extensions in both blacklist and whitelist: %s"),
list(in_blacklist_and_whitelist))
self.api_extension_manager = stevedore.enabled.EnabledExtensionManager(
namespace=self.api_extension_namespace(),
check_func=_check_load_extension,
invoke_on_load=True,
invoke_kwds={"extension_info": self.loaded_extension_info})
mapper = ProjectMapper()
self.resources = {}
# NOTE(cyeoh) Core API support is rewritten as extensions
# but conceptually still have core
if list(self.api_extension_manager):
# NOTE(cyeoh): Stevedore raises an exception if there are
# no plugins detected. I wonder if this is a bug.
self._register_resources_check_inherits(mapper)
self.api_extension_manager.map(self._register_controllers)
missing_core_extensions = self.get_missing_core_extensions(
self.loaded_extension_info.get_extensions().keys())
if not self.init_only and missing_core_extensions:
LOG.critical(_LC("Missing core API extensions: %s"),
missing_core_extensions)
raise exception.CoreAPIMissing(
missing_apis=missing_core_extensions)
LOG.info(_LI("Loaded extensions: %s"),
sorted(self.loaded_extension_info.get_extensions().keys()))
super(APIRouterV21, self).__init__(mapper)
def _register_resources_list(self, ext_list, mapper):
for ext in ext_list:
self._register_resources(ext, mapper)
def _register_resources_check_inherits(self, mapper):
ext_has_inherits = []
ext_no_inherits = []
for ext in self.api_extension_manager:
for resource in ext.obj.get_resources():
if resource.inherits:
ext_has_inherits.append(ext)
break
else:
ext_no_inherits.append(ext)
self._register_resources_list(ext_no_inherits, mapper)
self._register_resources_list(ext_has_inherits, mapper)
@staticmethod
def get_missing_core_extensions(extensions_loaded):
extensions_loaded = set(extensions_loaded)
missing_extensions = API_V21_CORE_EXTENSIONS - extensions_loaded
return list(missing_extensions)
@property
def loaded_extension_info(self):
raise NotImplementedError()
def _register_extension(self, ext):
raise NotImplementedError()
def _register_resources(self, ext, mapper):
"""Register resources defined by the extensions
Extensions define what resources they want to add through a
get_resources function
"""
handler = ext.obj
LOG.debug("Running _register_resources on %s", ext.obj)
for resource in handler.get_resources():
LOG.debug('Extended resource: %s', resource.collection)
inherits = None
if resource.inherits:
inherits = self.resources.get(resource.inherits)
if not resource.controller:
resource.controller = inherits.controller
wsgi_resource = wsgi.ResourceV21(resource.controller,
inherits=inherits)
self.resources[resource.collection] = wsgi_resource
kargs = dict(
controller=wsgi_resource,
collection=resource.collection_actions,
member=resource.member_actions)
if resource.parent:
kargs['parent_resource'] = resource.parent
# non core-API plugins use the collection name as the
# member name, but the core-API plugins use the
# singular/plural convention for member/collection names
if resource.member_name:
member_name = resource.member_name
else:
member_name = resource.collection
mapper.resource(member_name, resource.collection,
**kargs)
if resource.custom_routes_fn:
resource.custom_routes_fn(mapper, wsgi_resource)
def _register_controllers(self, ext):
"""Register controllers defined by the extensions
Extensions define what resources they want to add through
a get_controller_extensions function
"""
handler = ext.obj
LOG.debug("Running _register_controllers on %s", ext.obj)
for extension in handler.get_controller_extensions():
ext_name = extension.extension.name
collection = extension.collection
controller = extension.controller
if collection not in self.resources:
LOG.warning(_LW('Extension %(ext_name)s: Cannot extend '
'resource %(collection)s: No such resource'),
{'ext_name': ext_name, 'collection': collection})
continue
LOG.debug('Extension %(ext_name)s extending resource: '
'%(collection)s',
{'ext_name': ext_name, 'collection': collection})
resource = self.resources[collection]
resource.register_actions(controller)
resource.register_extensions(controller)
|
|
import pandas as pd
import numpy as np
import pytest # noqa: F401
import calliope
from calliope import exceptions
from calliope.time import funcs, masks
from calliope.test.common.util import (
build_test_model,
check_error_or_warning,
python36_or_higher,
)
class TestClustering:
@pytest.fixture
def model_national(self, scope="module"):
return calliope.examples.national_scale(
override_dict={
"model.random_seed": 23,
"model.subset_time": ["2005-01-01", "2005-03-31"],
}
)
def test_kmeans_mean(self, model_national):
data = model_national._model_data
data_clustered = funcs.apply_clustering(
data,
timesteps=None,
clustering_func="kmeans",
how="mean",
normalize=True,
k=5,
)
assert len(data_clustered.clusters.to_pandas().unique()) == 5
def test_kmeans_closest(self, model_national):
data = model_national._model_data
data_clustered = funcs.apply_clustering(
data,
timesteps=None,
clustering_func="kmeans",
how="closest",
normalize=True,
k=5,
)
def test_hierarchical_mean(self, model_national):
data = model_national._model_data
data_clustered = funcs.apply_clustering(
data,
timesteps=None,
clustering_func="hierarchical",
how="mean",
normalize=True,
k=5,
)
assert len(data_clustered.clusters.to_pandas().unique()) == 5
def test_hierarchical_closest(self, model_national):
data = model_national._model_data
data_clustered = funcs.apply_clustering(
data,
timesteps=None,
clustering_func="hierarchical",
how="closest",
normalize=True,
k=5,
)
# FIXME
def test_hartigans_rule(self, model_national):
data = model_national._model_data
with pytest.warns(exceptions.ModelWarning) as excinfo:
funcs.apply_clustering(
data,
timesteps=None,
clustering_func="kmeans",
how="mean",
normalize=True,
)
assert check_error_or_warning(excinfo, "a good number of clusters is 5")
def test_hierarchical_no_hartigans_rule(self, model_national):
data = model_national._model_data
with pytest.raises(exceptions.ModelError) as excinfo:
funcs.apply_clustering(
data,
timesteps=None,
clustering_func="hierarchical",
how="mean",
normalize=True,
)
assert check_error_or_warning(
excinfo, "Cannot undertake hierarchical clustering"
)
def test_15min_clustering(self):
# The data is identical for '2005-01-01' and '2005-01-03' timesteps,
# it is only different for '2005-01-02'
override = {
"techs.test_demand_elec.constraints.resource": "file=demand_elec_15mins.csv",
"model.subset_time": None,
}
model = build_test_model(override, scenario="simple_supply,one_day")
data = model._model_data
data_clustered_kmeans = funcs.apply_clustering(
data,
timesteps=None,
clustering_func="kmeans",
how="mean",
normalize=True,
k=2,
)
data_clustered_hierarchical = funcs.apply_clustering(
data,
timesteps=None,
clustering_func="hierarchical",
how="mean",
normalize=True,
k=2,
)
assert len(data_clustered_kmeans.clusters.to_pandas().unique()) == 2
assert len(data_clustered_hierarchical.clusters.to_pandas().unique()) == 2
days = np.unique(
data_clustered_kmeans.timesteps.to_index().strftime("%Y-%m-%d")
)
# not sure which of '2005-01-01' and '2005-01-03' it will choose to
# label the cluster of those two days
assert "2005-01-02" in days and ("2005-01-01" in days or "2005-01-03" in days)
assert np.array_equal(
data_clustered_kmeans.timestep_resolution.values,
[0.25 for i in range(24 * 4 * 2)],
)
def test_15min_to_2h_clustering(self):
# The data is identical for '2005-01-01' and '2005-01-03' timesteps,
# it is only different for '2005-01-02'
override = {
"techs.test_demand_elec.constraints.resource": "file=demand_elec_15T_to_2h.csv",
"model.subset_time": None,
}
model = build_test_model(override, scenario="simple_supply,one_day")
data = model._model_data
data_clustered_kmeans = funcs.apply_clustering(
data,
timesteps=None,
clustering_func="kmeans",
how="mean",
normalize=True,
k=2,
)
data_clustered_hierarchical = funcs.apply_clustering(
data,
timesteps=None,
clustering_func="hierarchical",
how="mean",
normalize=True,
k=2,
)
assert len(data_clustered_kmeans.clusters.to_pandas().unique()) == 2
assert len(data_clustered_hierarchical.clusters.to_pandas().unique()) == 2
days = np.unique(
data_clustered_kmeans.timesteps.to_index().strftime("%Y-%m-%d")
)
# not sure which of '2005-01-01' and '2005-01-03' it will choose to
# label the cluster of those two days
assert "2005-01-02" in days and ("2005-01-01" in days or "2005-01-03" in days)
assert np.array_equal(
data_clustered_kmeans.timestep_resolution.values,
[
0.25,
0.25,
0.25,
0.25,
0.5,
0.5,
0.5,
0.5,
1,
1,
1,
2,
2,
2,
2,
2,
1,
1,
2,
2,
2,
0.25,
0.25,
0.25,
0.25,
0.5,
0.5,
0.5,
0.5,
1,
1,
1,
2,
2,
2,
2,
2,
1,
1,
2,
2,
2,
],
)
@python36_or_higher
def test_predefined_clusters(self):
override = {
"model.subset_time": ["2005-01-01", "2005-01-04"],
"model.time": {
"function": "apply_clustering",
"function_options": {
"clustering_func": "file=clusters.csv:a",
"how": "mean",
},
},
}
model = build_test_model(override, scenario="simple_supply")
assert np.array_equal(
model._model_data.clusters.to_pandas().unique(), [0, 1, 2]
)
override2 = {
**override,
**{
"model.time.function_options.clustering_func": "file=cluster_days.csv:b"
},
}
model = build_test_model(override2, scenario="simple_supply")
assert np.array_equal(
model._model_data.clusters.to_pandas().unique(), [0, 1, 2]
)
override3 = {
**override,
**{
"model.time.function_options.clustering_func": "file=cluster_days.csv:b",
"model.time.function_options.how": "closest",
},
}
model = build_test_model(override3, scenario="simple_supply")
assert np.array_equal(model._model_data.clusters.to_pandas().unique(), [0])
@python36_or_higher
def test_predefined_clusters_fail(self):
override = {
"model.subset_time": ["2005-01-01", "2005-01-04"],
"model.time": {
"function": "apply_clustering",
"function_options": {
"clustering_func": "file=clusters.csv:a",
"how": "mean",
},
},
}
# should fail - no CSV data column defined
override1 = {
**override,
**{"model.time.function_options.clustering_func": "file=clusters.csv"},
}
with pytest.raises(exceptions.ModelError) as error:
build_test_model(override1, scenario="simple_supply")
assert check_error_or_warning(error, "No time clustering column given")
# should fail - unknown CSV data column defined
override2 = {
**override,
**{"model.time.function_options.clustering_func": "file=clusters.csv:1"},
}
with pytest.raises(KeyError) as error:
build_test_model(override2, scenario="simple_supply")
assert check_error_or_warning(error, "time clustering column 1 not found")
# should fail - more than one cluster given to any one day
override3 = {
**override,
**{"model.time.function_options.clustering_func": "file=clusters.csv:b"},
}
with pytest.raises(exceptions.ModelError) as error:
build_test_model(override3, scenario="simple_supply")
assert check_error_or_warning(
error, "More than one cluster value assigned to a day in `clusters.csv:b`"
)
# should fail - not enough data in clusters.csv to cover subset_time
override4 = {
**override,
**{
"model.subset_time": ["2005-01-01", "2005-01-06"],
"model.time.function_options.clustering_func": "file=cluster_days.csv:b",
},
}
with pytest.raises(exceptions.ModelError) as error:
build_test_model(override4, scenario="simple_supply")
assert check_error_or_warning(
error,
"Missing data for the timeseries array(s) [('cluster_days.csv', 'a') ('cluster_days.csv', 'b')]",
)
class TestMasks:
@pytest.fixture
def model_national(self, scope="module"):
return calliope.examples.national_scale(
override_dict={"model.subset_time": ["2005-01-01", "2005-01-31"]}
)
@pytest.fixture
def model_urban(self, scope="module"):
return calliope.examples.urban_scale(
override_dict={"model.subset_time": ["2005-01-01", "2005-01-31"]}
)
def test_zero(self, model_national):
data = model_national._model_data_pre_clustering.copy()
mask = masks.zero(data, "csp", var="resource")
dtindex = pd.DatetimeIndex(
[
"2005-01-01 00:00:00",
"2005-01-01 01:00:00",
"2005-01-01 02:00:00",
"2005-01-01 03:00:00",
"2005-01-01 04:00:00",
"2005-01-01 05:00:00",
"2005-01-01 06:00:00",
"2005-01-01 16:00:00",
"2005-01-01 17:00:00",
"2005-01-01 18:00:00",
"2005-01-01 19:00:00",
"2005-01-01 20:00:00",
"2005-01-01 21:00:00",
"2005-01-01 22:00:00",
"2005-01-01 23:00:00",
"2005-01-02 00:00:00",
"2005-01-02 01:00:00",
"2005-01-02 02:00:00",
"2005-01-02 03:00:00",
"2005-01-02 04:00:00",
"2005-01-02 05:00:00",
"2005-01-02 06:00:00",
"2005-01-02 16:00:00",
"2005-01-02 17:00:00",
"2005-01-02 18:00:00",
"2005-01-02 19:00:00",
"2005-01-02 20:00:00",
"2005-01-02 21:00:00",
"2005-01-02 22:00:00",
"2005-01-02 23:00:00",
"2005-01-03 00:00:00",
"2005-01-03 01:00:00",
"2005-01-03 02:00:00",
"2005-01-03 03:00:00",
"2005-01-03 04:00:00",
"2005-01-03 05:00:00",
"2005-01-03 06:00:00",
"2005-01-03 16:00:00",
"2005-01-03 17:00:00",
"2005-01-03 18:00:00",
"2005-01-03 19:00:00",
"2005-01-03 20:00:00",
"2005-01-03 21:00:00",
"2005-01-03 22:00:00",
"2005-01-03 23:00:00",
"2005-01-04 00:00:00",
"2005-01-04 01:00:00",
"2005-01-04 02:00:00",
"2005-01-04 03:00:00",
"2005-01-04 04:00:00",
"2005-01-04 05:00:00",
"2005-01-04 06:00:00",
"2005-01-04 16:00:00",
"2005-01-04 17:00:00",
"2005-01-04 18:00:00",
"2005-01-04 19:00:00",
"2005-01-04 20:00:00",
"2005-01-04 21:00:00",
"2005-01-04 22:00:00",
"2005-01-04 23:00:00",
"2005-01-05 00:00:00",
"2005-01-05 01:00:00",
"2005-01-05 02:00:00",
"2005-01-05 03:00:00",
"2005-01-05 04:00:00",
"2005-01-05 05:00:00",
"2005-01-05 06:00:00",
"2005-01-05 16:00:00",
"2005-01-05 17:00:00",
"2005-01-05 18:00:00",
"2005-01-05 19:00:00",
"2005-01-05 20:00:00",
"2005-01-05 21:00:00",
"2005-01-05 22:00:00",
"2005-01-05 23:00:00",
]
)
assert dtindex.equals(mask[0:75])
def test_extreme(self, model_national):
data = model_national._model_data_pre_clustering.copy()
mask = masks.extreme(
data, "csp", var="resource", how="max", length="2D", n=1, padding="2H"
)
dtindex = pd.DatetimeIndex(
[
"2005-01-18 22:00:00",
"2005-01-18 23:00:00",
"2005-01-19 00:00:00",
"2005-01-19 01:00:00",
"2005-01-19 02:00:00",
"2005-01-19 03:00:00",
"2005-01-19 04:00:00",
"2005-01-19 05:00:00",
"2005-01-19 06:00:00",
"2005-01-19 07:00:00",
"2005-01-19 08:00:00",
"2005-01-19 09:00:00",
"2005-01-19 10:00:00",
"2005-01-19 11:00:00",
"2005-01-19 12:00:00",
"2005-01-19 13:00:00",
"2005-01-19 14:00:00",
"2005-01-19 15:00:00",
"2005-01-19 16:00:00",
"2005-01-19 17:00:00",
"2005-01-19 18:00:00",
"2005-01-19 19:00:00",
"2005-01-19 20:00:00",
"2005-01-19 21:00:00",
"2005-01-19 22:00:00",
"2005-01-19 23:00:00",
"2005-01-20 00:00:00",
"2005-01-20 01:00:00",
"2005-01-20 02:00:00",
"2005-01-20 03:00:00",
"2005-01-20 04:00:00",
"2005-01-20 05:00:00",
"2005-01-20 06:00:00",
"2005-01-20 07:00:00",
"2005-01-20 08:00:00",
"2005-01-20 09:00:00",
"2005-01-20 10:00:00",
"2005-01-20 11:00:00",
"2005-01-20 12:00:00",
"2005-01-20 13:00:00",
"2005-01-20 14:00:00",
"2005-01-20 15:00:00",
"2005-01-20 16:00:00",
"2005-01-20 17:00:00",
"2005-01-20 18:00:00",
"2005-01-20 19:00:00",
"2005-01-20 20:00:00",
"2005-01-20 21:00:00",
"2005-01-20 22:00:00",
"2005-01-20 23:00:00",
"2005-01-21 00:00:00",
"2005-01-21 01:00:00",
]
)
assert dtindex.equals(mask)
def test_extreme_diff_and_normalize(self, model_urban):
data = model_urban._model_data_pre_clustering.copy()
mask = masks.extreme_diff(
data,
"demand_heat",
"demand_electricity",
var="resource",
how="min",
length="1D",
n=2,
normalize=True,
)
dtindex = pd.DatetimeIndex(
[
"2005-01-13 00:00:00",
"2005-01-13 01:00:00",
"2005-01-13 02:00:00",
"2005-01-13 03:00:00",
"2005-01-13 04:00:00",
"2005-01-13 05:00:00",
"2005-01-13 06:00:00",
"2005-01-13 07:00:00",
"2005-01-13 08:00:00",
"2005-01-13 09:00:00",
"2005-01-13 10:00:00",
"2005-01-13 11:00:00",
"2005-01-13 12:00:00",
"2005-01-13 13:00:00",
"2005-01-13 14:00:00",
"2005-01-13 15:00:00",
"2005-01-13 16:00:00",
"2005-01-13 17:00:00",
"2005-01-13 18:00:00",
"2005-01-13 19:00:00",
"2005-01-13 20:00:00",
"2005-01-13 21:00:00",
"2005-01-13 22:00:00",
"2005-01-13 23:00:00",
"2005-01-19 00:00:00",
"2005-01-19 01:00:00",
"2005-01-19 02:00:00",
"2005-01-19 03:00:00",
"2005-01-19 04:00:00",
"2005-01-19 05:00:00",
"2005-01-19 06:00:00",
"2005-01-19 07:00:00",
"2005-01-19 08:00:00",
"2005-01-19 09:00:00",
"2005-01-19 10:00:00",
"2005-01-19 11:00:00",
"2005-01-19 12:00:00",
"2005-01-19 13:00:00",
"2005-01-19 14:00:00",
"2005-01-19 15:00:00",
"2005-01-19 16:00:00",
"2005-01-19 17:00:00",
"2005-01-19 18:00:00",
"2005-01-19 19:00:00",
"2005-01-19 20:00:00",
"2005-01-19 21:00:00",
"2005-01-19 22:00:00",
"2005-01-19 23:00:00",
]
)
assert dtindex.equals(mask)
def test_extreme_week_1d(self, model_national):
data = model_national._model_data_pre_clustering.copy()
mask = masks.extreme(
data,
"csp",
var="resource",
how="max",
length="1D",
n=1,
padding="calendar_week",
)
found_days = list(mask.dayofyear.unique())
days = [18, 19, 20, 21, 22, 23, 24]
assert days == found_days
def test_extreme_week_2d(self, model_national):
data = model_national._model_data_pre_clustering.copy()
with pytest.raises(ValueError):
mask = masks.extreme(
data,
"csp",
var="resource",
how="max",
length="2D",
n=1,
padding="calendar_week",
)
def test_15min_masking_1D(self):
# The data is identical for '2005-01-01' and '2005-01-03' timesteps,
# it is only different for '2005-01-02'
override = {
"techs.test_demand_elec.constraints.resource": "file=demand_elec_15mins.csv",
"model.subset_time": None,
}
model = build_test_model(override, scenario="simple_supply,one_day")
data = model._model_data
mask = masks.extreme(
data, "test_demand_elec", var="resource", how="max", length="1D"
)
dtindex = pd.DatetimeIndex(
[
"2005-01-02 00:00:00",
"2005-01-02 00:15:00",
"2005-01-02 00:30:00",
"2005-01-02 00:45:00",
"2005-01-02 01:00:00",
"2005-01-02 01:15:00",
"2005-01-02 01:30:00",
"2005-01-02 01:45:00",
"2005-01-02 02:00:00",
"2005-01-02 02:15:00",
"2005-01-02 02:30:00",
"2005-01-02 02:45:00",
"2005-01-02 03:00:00",
"2005-01-02 03:15:00",
"2005-01-02 03:30:00",
"2005-01-02 03:45:00",
"2005-01-02 04:00:00",
"2005-01-02 04:15:00",
"2005-01-02 04:30:00",
"2005-01-02 04:45:00",
"2005-01-02 05:00:00",
"2005-01-02 05:15:00",
"2005-01-02 05:30:00",
"2005-01-02 05:45:00",
"2005-01-02 06:00:00",
"2005-01-02 06:15:00",
"2005-01-02 06:30:00",
"2005-01-02 06:45:00",
"2005-01-02 07:00:00",
"2005-01-02 07:15:00",
"2005-01-02 07:30:00",
"2005-01-02 07:45:00",
"2005-01-02 08:00:00",
"2005-01-02 08:15:00",
"2005-01-02 08:30:00",
"2005-01-02 08:45:00",
"2005-01-02 09:00:00",
"2005-01-02 09:15:00",
"2005-01-02 09:30:00",
"2005-01-02 09:45:00",
"2005-01-02 10:00:00",
"2005-01-02 10:15:00",
"2005-01-02 10:30:00",
"2005-01-02 10:45:00",
"2005-01-02 11:00:00",
"2005-01-02 11:15:00",
"2005-01-02 11:30:00",
"2005-01-02 11:45:00",
"2005-01-02 12:00:00",
"2005-01-02 12:15:00",
"2005-01-02 12:30:00",
"2005-01-02 12:45:00",
"2005-01-02 13:00:00",
"2005-01-02 13:15:00",
"2005-01-02 13:30:00",
"2005-01-02 13:45:00",
"2005-01-02 14:00:00",
"2005-01-02 14:15:00",
"2005-01-02 14:30:00",
"2005-01-02 14:45:00",
"2005-01-02 15:00:00",
"2005-01-02 15:15:00",
"2005-01-02 15:30:00",
"2005-01-02 15:45:00",
"2005-01-02 16:00:00",
"2005-01-02 16:15:00",
"2005-01-02 16:30:00",
"2005-01-02 16:45:00",
"2005-01-02 17:00:00",
"2005-01-02 17:15:00",
"2005-01-02 17:30:00",
"2005-01-02 17:45:00",
"2005-01-02 18:00:00",
"2005-01-02 18:15:00",
"2005-01-02 18:30:00",
"2005-01-02 18:45:00",
"2005-01-02 19:00:00",
"2005-01-02 19:15:00",
"2005-01-02 19:30:00",
"2005-01-02 19:45:00",
"2005-01-02 20:00:00",
"2005-01-02 20:15:00",
"2005-01-02 20:30:00",
"2005-01-02 20:45:00",
"2005-01-02 21:00:00",
"2005-01-02 21:15:00",
"2005-01-02 21:30:00",
"2005-01-02 21:45:00",
"2005-01-02 22:00:00",
"2005-01-02 22:15:00",
"2005-01-02 22:30:00",
"2005-01-02 22:45:00",
"2005-01-02 23:00:00",
"2005-01-02 23:15:00",
"2005-01-02 23:30:00",
"2005-01-02 23:45:00",
]
)
assert dtindex.equals(mask)
def test_15min_to_2h_masking_1D(self):
# The data is identical for '2005-01-01' and '2005-01-03' timesteps,
# it is only different for '2005-01-02'
override = {
"techs.test_demand_elec.constraints.resource": "file=demand_elec_15T_to_2h.csv",
"model.subset_time": None,
}
model = build_test_model(override, scenario="simple_supply,one_day")
data = model._model_data
mask = masks.extreme(
data, "test_demand_elec", var="resource", how="max", length="1D"
)
dtindex = pd.DatetimeIndex(
[
"2005-01-02 00:00:00",
"2005-01-02 00:15:00",
"2005-01-02 00:30:00",
"2005-01-02 00:45:00",
"2005-01-02 01:00:00",
"2005-01-02 01:30:00",
"2005-01-02 02:00:00",
"2005-01-02 02:30:00",
"2005-01-02 03:00:00",
"2005-01-02 04:00:00",
"2005-01-02 05:00:00",
"2005-01-02 06:00:00",
"2005-01-02 08:00:00",
"2005-01-02 10:00:00",
"2005-01-02 12:00:00",
"2005-01-02 14:00:00",
"2005-01-02 16:00:00",
"2005-01-02 17:00:00",
"2005-01-02 18:00:00",
"2005-01-02 20:00:00",
"2005-01-02 22:00:00",
]
)
assert dtindex.equals(mask)
class TestResampling:
def test_15min_masking_1D_resampling_to_2h(self):
# The data is identical for '2005-01-01' and '2005-01-03' timesteps,
# it is only different for '2005-01-02'
override = {
"techs.test_demand_elec.constraints.resource": "file=demand_elec_15mins.csv",
"model.subset_time": None,
"model.time": {
"masks": [
{
"function": "extreme",
"options": {"tech": "test_demand_elec", "how": "max"},
}
],
"function": "resample",
"function_options": {"resolution": "2H"},
},
}
model = build_test_model(override, scenario="simple_supply,one_day")
data = model._model_data
dtindex = pd.DatetimeIndex(
[
"2005-01-01 00:00:00",
"2005-01-01 02:00:00",
"2005-01-01 04:00:00",
"2005-01-01 06:00:00",
"2005-01-01 08:00:00",
"2005-01-01 10:00:00",
"2005-01-01 12:00:00",
"2005-01-01 14:00:00",
"2005-01-01 16:00:00",
"2005-01-01 18:00:00",
"2005-01-01 20:00:00",
"2005-01-01 22:00:00",
"2005-01-02 00:00:00",
"2005-01-02 00:15:00",
"2005-01-02 00:30:00",
"2005-01-02 00:45:00",
"2005-01-02 01:00:00",
"2005-01-02 01:15:00",
"2005-01-02 01:30:00",
"2005-01-02 01:45:00",
"2005-01-02 02:00:00",
"2005-01-02 02:15:00",
"2005-01-02 02:30:00",
"2005-01-02 02:45:00",
"2005-01-02 03:00:00",
"2005-01-02 03:15:00",
"2005-01-02 03:30:00",
"2005-01-02 03:45:00",
"2005-01-02 04:00:00",
"2005-01-02 04:15:00",
"2005-01-02 04:30:00",
"2005-01-02 04:45:00",
"2005-01-02 05:00:00",
"2005-01-02 05:15:00",
"2005-01-02 05:30:00",
"2005-01-02 05:45:00",
"2005-01-02 06:00:00",
"2005-01-02 06:15:00",
"2005-01-02 06:30:00",
"2005-01-02 06:45:00",
"2005-01-02 07:00:00",
"2005-01-02 07:15:00",
"2005-01-02 07:30:00",
"2005-01-02 07:45:00",
"2005-01-02 08:00:00",
"2005-01-02 08:15:00",
"2005-01-02 08:30:00",
"2005-01-02 08:45:00",
"2005-01-02 09:00:00",
"2005-01-02 09:15:00",
"2005-01-02 09:30:00",
"2005-01-02 09:45:00",
"2005-01-02 10:00:00",
"2005-01-02 10:15:00",
"2005-01-02 10:30:00",
"2005-01-02 10:45:00",
"2005-01-02 11:00:00",
"2005-01-02 11:15:00",
"2005-01-02 11:30:00",
"2005-01-02 11:45:00",
"2005-01-02 12:00:00",
"2005-01-02 12:15:00",
"2005-01-02 12:30:00",
"2005-01-02 12:45:00",
"2005-01-02 13:00:00",
"2005-01-02 13:15:00",
"2005-01-02 13:30:00",
"2005-01-02 13:45:00",
"2005-01-02 14:00:00",
"2005-01-02 14:15:00",
"2005-01-02 14:30:00",
"2005-01-02 14:45:00",
"2005-01-02 15:00:00",
"2005-01-02 15:15:00",
"2005-01-02 15:30:00",
"2005-01-02 15:45:00",
"2005-01-02 16:00:00",
"2005-01-02 16:15:00",
"2005-01-02 16:30:00",
"2005-01-02 16:45:00",
"2005-01-02 17:00:00",
"2005-01-02 17:15:00",
"2005-01-02 17:30:00",
"2005-01-02 17:45:00",
"2005-01-02 18:00:00",
"2005-01-02 18:15:00",
"2005-01-02 18:30:00",
"2005-01-02 18:45:00",
"2005-01-02 19:00:00",
"2005-01-02 19:15:00",
"2005-01-02 19:30:00",
"2005-01-02 19:45:00",
"2005-01-02 20:00:00",
"2005-01-02 20:15:00",
"2005-01-02 20:30:00",
"2005-01-02 20:45:00",
"2005-01-02 21:00:00",
"2005-01-02 21:15:00",
"2005-01-02 21:30:00",
"2005-01-02 21:45:00",
"2005-01-02 22:00:00",
"2005-01-02 22:15:00",
"2005-01-02 22:30:00",
"2005-01-02 22:45:00",
"2005-01-02 23:00:00",
"2005-01-02 23:15:00",
"2005-01-02 23:30:00",
"2005-01-02 23:45:00",
"2005-01-03 00:00:00",
"2005-01-03 02:00:00",
"2005-01-03 04:00:00",
"2005-01-03 06:00:00",
"2005-01-03 08:00:00",
"2005-01-03 10:00:00",
"2005-01-03 12:00:00",
"2005-01-03 14:00:00",
"2005-01-03 16:00:00",
"2005-01-03 18:00:00",
"2005-01-03 20:00:00",
"2005-01-03 22:00:00",
]
)
assert dtindex.equals(data.timesteps.to_index())
def test_15min_resampling_to_6h(self):
# The data is identical for '2005-01-01' and '2005-01-03' timesteps,
# it is only different for '2005-01-02'
override = {
"techs.test_demand_elec.constraints.resource": "file=demand_elec_15mins.csv",
"model.subset_time": None,
"model.time": {
"function": "resample",
"function_options": {"resolution": "6H"},
},
}
model = build_test_model(override, scenario="simple_supply,one_day")
data = model._model_data
dtindex = pd.DatetimeIndex(
[
"2005-01-01 00:00:00",
"2005-01-01 06:00:00",
"2005-01-01 12:00:00",
"2005-01-01 18:00:00",
"2005-01-02 00:00:00",
"2005-01-02 06:00:00",
"2005-01-02 12:00:00",
"2005-01-02 18:00:00",
"2005-01-03 00:00:00",
"2005-01-03 06:00:00",
"2005-01-03 12:00:00",
"2005-01-03 18:00:00",
]
)
assert dtindex.equals(data.timesteps.to_index())
def test_15min_to_2h_resampling_to_2h(self):
"""
CSV has daily timeseries varying from 15min to 2h resolution, resample all to 2h
"""
override = {
"techs.test_demand_elec.constraints.resource": "file=demand_elec_15T_to_2h.csv",
"model.subset_time": None,
"model.time": {
"function": "resample",
"function_options": {"resolution": "2H"},
},
}
model = build_test_model(override, scenario="simple_supply,one_day")
data = model._model_data
dtindex = pd.DatetimeIndex(
[
"2005-01-01 00:00:00",
"2005-01-01 02:00:00",
"2005-01-01 04:00:00",
"2005-01-01 06:00:00",
"2005-01-01 08:00:00",
"2005-01-01 10:00:00",
"2005-01-01 12:00:00",
"2005-01-01 14:00:00",
"2005-01-01 16:00:00",
"2005-01-01 18:00:00",
"2005-01-01 20:00:00",
"2005-01-01 22:00:00",
"2005-01-02 00:00:00",
"2005-01-02 02:00:00",
"2005-01-02 04:00:00",
"2005-01-02 06:00:00",
"2005-01-02 08:00:00",
"2005-01-02 10:00:00",
"2005-01-02 12:00:00",
"2005-01-02 14:00:00",
"2005-01-02 16:00:00",
"2005-01-02 18:00:00",
"2005-01-02 20:00:00",
"2005-01-02 22:00:00",
"2005-01-03 00:00:00",
"2005-01-03 02:00:00",
"2005-01-03 04:00:00",
"2005-01-03 06:00:00",
"2005-01-03 08:00:00",
"2005-01-03 10:00:00",
"2005-01-03 12:00:00",
"2005-01-03 14:00:00",
"2005-01-03 16:00:00",
"2005-01-03 18:00:00",
"2005-01-03 20:00:00",
"2005-01-03 22:00:00",
]
)
assert dtindex.equals(data.timesteps.to_index())
class TestFuncs:
@pytest.fixture
def model_national(self, scope="module"):
return calliope.examples.national_scale(
override_dict={"model.subset_time": ["2005-01", "2005-01"]}
)
def test_drop_invalid_timesteps(self, model_national):
data = model_national._model_data_pre_clustering.copy()
timesteps = ["XXX2005-01-01 23:00"]
with pytest.raises(exceptions.ModelError):
funcs.drop(data, timesteps)
def test_drop(self, model_national):
data = model_national._model_data_pre_clustering.copy()
timesteps = ["2005-01-01 23:00", "2005-01-01 22:00"]
data_dropped = funcs.drop(data, timesteps)
assert len(data_dropped.timesteps) == 742
result_timesteps = list(data_dropped.coords["timesteps"].values)
assert "2005-01-01 21:00" not in result_timesteps
assert "2005-01-01 22:00" not in result_timesteps
class TestLoadTimeseries:
def test_invalid_csv_columns(self):
override = {
"nodes": {
"c.techs": {"test_supply_elec": None, "test_demand_elec": None},
"d.techs": {"test_supply_elec": None, "test_demand_elec": None},
},
"links": {
"a,b": {"exists": False},
"c,d.techs": {"test_transmission_elec": None},
},
}
with pytest.raises(exceptions.ModelError) as excinfo:
build_test_model(override_dict=override, scenario="one_day")
assert check_error_or_warning(
excinfo,
[
"file:column combinations `[('demand_elec.csv', 'c') ('demand_elec.csv', 'd')]` not found, but are requested by parameter `resource`."
],
)
|
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding M2M table for field region on 'Location'
db.create_table(u'main_location_region', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('location', models.ForeignKey(orm[u'main.location'], null=False)),
('region', models.ForeignKey(orm[u'main.region'], null=False))
))
db.create_unique(u'main_location_region', ['location_id', 'region_id'])
def backwards(self, orm):
# Removing M2M table for field region on 'Location'
db.delete_table('main_location_region')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'main.approval': {
'Meta': {'object_name': 'Approval'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'processed_time': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'main.channel': {
'Meta': {'ordering': "['name']", 'object_name': 'Channel'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'exclude_from_trending': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'image_is_banner': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Channel']", 'null': 'True'}),
'reverse_order': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'main.curatedgroup': {
'Meta': {'object_name': 'CuratedGroup'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
u'main.event': {
'Meta': {'object_name': 'Event'},
'additional_links': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'archive_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'call_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Channel']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Location']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modified_user'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'mozillian': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Participant']", 'symmetrical': 'False'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'event_picture'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['main.Picture']"}),
'pin': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'placeholder_img': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'popcorn_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '40', 'db_index': 'True'}),
'recruitmentmessage': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.RecruitmentMessage']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'remote_presenters': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'short_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'initiated'", 'max_length': '20', 'db_index': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Template']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'template_environment': ('airmozilla.main.fields.EnvironmentField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'transcript': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'upload': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'event_upload'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['uploads.Upload']"})
},
u'main.eventassignment': {
'Meta': {'object_name': 'EventAssignment'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']", 'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locations': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Location']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'main.eventhitstats': {
'Meta': {'object_name': 'EventHitStats'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']", 'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)'}),
'shortcode': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'total_hits': ('django.db.models.fields.IntegerField', [], {})
},
u'main.eventoldslug': {
'Meta': {'object_name': 'EventOldSlug'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215'})
},
u'main.eventrevision': {
'Meta': {'object_name': 'EventRevision'},
'additional_links': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'call_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Channel']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Picture']", 'null': 'True', 'blank': 'True'}),
'placeholder_img': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'recruitmentmessage': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.RecruitmentMessage']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'short_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'})
},
u'main.eventtweet': {
'Meta': {'object_name': 'EventTweet'},
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_placeholder': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'send_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)'}),
'sent_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'tweet_id': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
},
u'main.location': {
'Meta': {'ordering': "['name']", 'object_name': 'Location'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'region': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['main.Region']", 'null': 'True', 'blank': 'True'}),
'timezone': ('django.db.models.fields.CharField', [], {'max_length': '250'})
},
u'main.locationdefaultenvironment': {
'Meta': {'unique_together': "(('location', 'privacy', 'template'),)", 'object_name': 'LocationDefaultEnvironment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Location']"}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '40'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Template']"}),
'template_environment': ('airmozilla.main.fields.EnvironmentField', [], {})
},
u'main.participant': {
'Meta': {'object_name': 'Participant'},
'blog_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'clear_token': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'cleared': ('django.db.models.fields.CharField', [], {'default': "'no'", 'max_length': '15', 'db_index': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'participant_creator'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'department': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'irc': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'photo': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '65', 'blank': 'True'}),
'team': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'topic_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'})
},
u'main.picture': {
'Meta': {'object_name': 'Picture'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'picture_event'", 'null': 'True', 'to': u"orm['main.Event']"}),
'file': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'height': ('django.db.models.fields.PositiveIntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)', 'auto_now': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'notes': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'size': ('django.db.models.fields.PositiveIntegerField', [], {}),
'width': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'main.recruitmentmessage': {
'Meta': {'ordering': "['text']", 'object_name': 'RecruitmentMessage'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)', 'auto_now': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'main.region': {
'Meta': {'ordering': "['name']", 'object_name': 'Region'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
u'main.suggestedevent': {
'Meta': {'object_name': 'SuggestedEvent'},
'accepted': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']", 'null': 'True', 'blank': 'True'}),
'additional_links': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'call_info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Channel']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'first_submitted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Location']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Participant']", 'symmetrical': 'False'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Picture']", 'null': 'True', 'blank': 'True'}),
'placeholder_img': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'popcorn_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'privacy': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '40'}),
'remote_presenters': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'review_comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'short_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '215', 'blank': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'created'", 'max_length': '40'}),
'submitted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'upcoming': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'upload': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'upload'", 'null': 'True', 'to': u"orm['uploads.Upload']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'main.suggestedeventcomment': {
'Meta': {'object_name': 'SuggestedEventComment'},
'comment': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'suggested_event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.SuggestedEvent']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'main.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'main.template': {
'Meta': {'ordering': "['name']", 'object_name': 'Template'},
'content': ('django.db.models.fields.TextField', [], {}),
'default_archive_template': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'default_popcorn_template': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'main.urlmatch': {
'Meta': {'object_name': 'URLMatch'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'string': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'use_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'main.urltransform': {
'Meta': {'object_name': 'URLTransform'},
'find': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'match': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.URLMatch']"}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'replace_with': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'main.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'contributor': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'main.vidlysubmission': {
'Meta': {'object_name': 'VidlySubmission'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Event']"}),
'hd': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'submission_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'submission_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 1, 21, 0, 0)'}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'token_protection': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'uploads.upload': {
'Meta': {'object_name': 'Upload'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'event'", 'null': 'True', 'to': u"orm['main.Event']"}),
'file_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mime_type': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {}),
'suggested_event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'suggested_event'", 'null': 'True', 'to': u"orm['main.SuggestedEvent']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '400'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['main']
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.