repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
MontpellierRessourcesImagerie/openmicroscopy | components/tools/OmeroWeb/omeroweb/webadmin/forms.py | 1 | 23531 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2008-2014 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, 2008.
#
# Version: 1.0
#
import logging
try:
from collections import OrderedDict # Python 2.7+ only
except:
pass
from django import forms
from django.forms.widgets import Textarea
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from omeroweb.connector import Server
from omeroweb.custom_forms import NonASCIIForm
from custom_forms import ServerModelChoiceField, GroupModelChoiceField
from custom_forms import GroupModelMultipleChoiceField, OmeNameField
from custom_forms import ExperimenterModelMultipleChoiceField, MultiEmailField
logger = logging.getLogger(__name__)
#################################################################
# Non-model Form
class LoginForm(NonASCIIForm):
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
self.fields['server'] = ServerModelChoiceField(
Server, empty_label=None)
self.fields.keyOrder = ['server', 'username', 'password']
username = forms.CharField(
max_length=50, widget=forms.TextInput(attrs={
'size': 22, 'autofocus': 'autofocus'}))
password = forms.CharField(
max_length=50,
widget=forms.PasswordInput(attrs={'size': 22, 'autocomplete': 'off'}))
def clean_username(self):
if (self.cleaned_data['username'] == 'guest'):
raise forms.ValidationError("Guest account is not supported.")
return self.cleaned_data['username']
class ForgottonPasswordForm(NonASCIIForm):
server = ServerModelChoiceField(Server, empty_label=None)
username = forms.CharField(
max_length=50,
widget=forms.TextInput(attrs={'size': 28, 'autocomplete': 'off'}))
email = forms.EmailField(
widget=forms.TextInput(attrs={'size': 28, 'autocomplete': 'off'}))
ROLE_CHOICES = (
('user', 'User'),
('administrator', 'Administrator'),
('restricted_administrator', 'Administrator with restricted privileges')
)
class RoleRenderer(forms.RadioSelect.renderer):
"""Allows disabling of 'administrator' Radio button."""
def render(self):
midList = []
for x, wid in enumerate(self):
disabled = self.attrs.get('disabled')
if ROLE_CHOICES[x][0] == 'administrator':
if hasattr(self, 'disable_admin'):
disabled = getattr(self, 'disable_admin')
if disabled:
wid.attrs['disabled'] = True
midList.append(u'<li>%s</li>' % force_unicode(wid))
finalList = mark_safe(u'<ul id="id_role">\n%s\n</ul>'
% u'\n'.join([u'<li>%s</li>'
% w for w in midList]))
return finalList
class ExperimenterForm(NonASCIIForm):
def __init__(self, name_check=False, email_check=False,
experimenter_is_me_or_system=False,
experimenter_me=False,
can_modify_user=True,
user_privileges=[],
experimenter_root=False,
*args, **kwargs):
super(ExperimenterForm, self).__init__(*args, **kwargs)
self.name_check = name_check
self.email_check = email_check
self.user_privileges = user_privileges
try:
self.fields['other_groups'] = GroupModelMultipleChoiceField(
queryset=kwargs['initial']['groups'],
initial=kwargs['initial']['other_groups'], required=False,
label="Groups")
except:
self.fields['other_groups'] = GroupModelMultipleChoiceField(
queryset=kwargs['initial']['groups'], required=False,
label="Groups")
try:
self.fields['default_group'] = GroupModelChoiceField(
queryset=kwargs['initial']['my_groups'],
initial=kwargs['initial']['default_group'],
empty_label=u"", required=False)
except:
try:
self.fields['default_group'] = GroupModelChoiceField(
queryset=kwargs['initial']['my_groups'],
empty_label=u"", required=False)
except:
self.fields['default_group'] = GroupModelChoiceField(
queryset=list(), empty_label=u"", required=False)
# 'Role' is disabled if experimenter is 'admin' or self,
# so required=False to avoid validation error.
self.fields['role'] = forms.ChoiceField(
choices=ROLE_CHOICES,
widget=forms.RadioSelect(renderer=RoleRenderer),
required=False,
initial='user')
# If current user is restricted Admin, can't create full Admin
restricted_admin = "ReadSession" not in self.user_privileges
self.fields['role'].widget.renderer.disable_admin = restricted_admin
if ('with_password' in kwargs['initial'] and
kwargs['initial']['with_password']):
self.fields['password'] = forms.CharField(
max_length=50,
widget=forms.PasswordInput(attrs={'size': 30,
'autocomplete': 'off'}))
self.fields['confirmation'] = forms.CharField(
max_length=50,
widget=forms.PasswordInput(attrs={'size': 30,
'autocomplete': 'off'}))
fields_key_order = [
'omename', 'password', 'confirmation', 'first_name',
'middle_name', 'last_name', 'email', 'institution',
'role', 'active', 'other_groups', 'default_group']
else:
fields_key_order = [
'omename', 'first_name', 'middle_name', 'last_name',
'email', 'institution', 'role', 'active',
'other_groups', 'default_group']
ordered_fields = [(k, self.fields[k]) for k in fields_key_order]
roles = [('Sudo', 'Sudo'),
# combine WriteFile/ManagedRepo/Owned roles into 'Write'
('Write', 'Write Data'),
# combine DeleteFile/ManagedRepo/Owned roles into 'Delete'
('Delete', 'Delete Data'),
('Chgrp', 'Chgrp'),
('Chown', 'Chown'),
('ModifyGroup', 'Create and Edit Groups'),
('ModifyUser', 'Create and Edit Users'),
('ModifyGroupMembership', 'Add Users to Groups'),
('Script', 'Upload Scripts')]
for role in roles:
# If current user is light-admin, ignore privileges they don't have
# So they can't add/remove these from experimenter
# We don't disable them - (not in form data and will be removed)
ordered_fields.append(
(role[0], forms.BooleanField(
required=False,
label=role[1],
widget=forms.CheckboxInput(
attrs={'class': 'privilege',
'disabled': role[0] not in user_privileges})
))
)
# Django 1.8: Form.fields uses OrderedDict from the collections module.
self.fields = OrderedDict(ordered_fields)
if experimenter_me or experimenter_root:
self.fields['omename'].widget.attrs['readonly'] = True
name = "yourself"
if experimenter_root:
name = "'root' user"
self.fields['omename'].widget.attrs['title'] = \
"You can't edit Username of %s" % name
self.fields['role'].widget.attrs['disabled'] = True
self.fields['active'].widget.attrs['disabled'] = True
self.fields['active'].widget.attrs['title'] = \
"You cannot disable %s" % name
# If we can't modify user, ALL fields are disabled
if not can_modify_user:
for field in self.fields.values():
field.widget.attrs['disabled'] = True
omename = OmeNameField(
max_length=250,
widget=forms.TextInput(attrs={'size': 30, 'autocomplete': 'off'}),
label="Username")
first_name = forms.CharField(
max_length=250,
widget=forms.TextInput(attrs={'size': 30, 'autocomplete': 'off'}))
middle_name = forms.CharField(max_length=250, widget=forms.TextInput(
attrs={'size': 30, 'autocomplete': 'off'}), required=False)
last_name = forms.CharField(
max_length=250,
widget=forms.TextInput(attrs={'size': 30, 'autocomplete': 'off'}))
email = forms.EmailField(
widget=forms.TextInput(attrs={'size': 30, 'autocomplete': 'off'}),
required=False)
institution = forms.CharField(
max_length=250,
widget=forms.TextInput(attrs={'size': 30, 'autocomplete': 'off'}),
required=False)
active = forms.BooleanField(required=False)
def clean_confirmation(self):
if self.cleaned_data.get('password'):
if len(self.cleaned_data.get('password')) < 3:
raise forms.ValidationError(
'Password must be at least 3 characters long.')
if (self.cleaned_data.get('password') or
self.cleaned_data.get('confirmation')):
if (self.cleaned_data.get('password') !=
self.cleaned_data.get('confirmation')):
raise forms.ValidationError('Passwords do not match')
else:
return self.cleaned_data.get('password')
def clean_omename(self):
if self.name_check:
raise forms.ValidationError('This username already exists.')
return self.cleaned_data.get('omename')
def clean_email(self):
if self.email_check:
raise forms.ValidationError('This email already exists.')
return self.cleaned_data.get('email')
def clean_default_group(self):
if (self.cleaned_data.get('default_group') is None or
len(self.cleaned_data.get('default_group')) <= 0):
raise forms.ValidationError('No default group selected.')
else:
return self.cleaned_data.get('default_group')
def clean_other_groups(self):
if (self.cleaned_data.get('other_groups') is None or
len(self.cleaned_data.get('other_groups')) <= 0):
raise forms.ValidationError(
'User must be a member of at least one group.')
else:
return self.cleaned_data.get('other_groups')
PERMISSION_CHOICES = (
('0', 'Private'),
('1', 'Read-Only'),
('2', 'Read-Annotate'),
('3', 'Read-Write'),
)
class GroupForm(NonASCIIForm):
def __init__(self, name_check=False, group_is_system=False,
can_modify_group=True, can_add_member=True, *args, **kwargs):
super(GroupForm, self).__init__(*args, **kwargs)
self.name_check = name_check
if can_modify_group:
self.fields['name'] = forms.CharField(
max_length=100,
widget=forms.TextInput(attrs={'size': 25,
'autocomplete': 'off'}))
self.fields['description'] = forms.CharField(
max_length=250, required=False,
widget=forms.TextInput(attrs={'size': 25,
'autocomplete': 'off'}))
if can_add_member:
try:
if kwargs['initial']['owners']:
pass
self.fields['owners'] = ExperimenterModelMultipleChoiceField(
queryset=kwargs['initial']['experimenters'],
initial=kwargs['initial']['owners'], required=False)
except:
self.fields['owners'] = ExperimenterModelMultipleChoiceField(
queryset=kwargs['initial']['experimenters'],
required=False)
try:
if kwargs['initial']['members']:
pass
self.fields['members'] = ExperimenterModelMultipleChoiceField(
queryset=kwargs['initial']['experimenters'],
initial=kwargs['initial']['members'], required=False)
except:
self.fields['members'] = ExperimenterModelMultipleChoiceField(
queryset=kwargs['initial']['experimenters'],
required=False)
if can_modify_group:
self.fields['permissions'] = forms.ChoiceField(
choices=PERMISSION_CHOICES, widget=forms.RadioSelect(),
required=True, label="Permissions")
if group_is_system:
self.fields['name'].widget.attrs['readonly'] = True
self.fields['name'].widget.attrs['title'] = \
"Changing of system group name would be un-doable"
self.fields.keyOrder = [
'name', 'description', 'owners', 'members', 'permissions']
# If we can't modify group, disable fields
if not can_modify_group:
for name, field in self.fields.items():
if name not in ('owners', 'members'):
field.widget.attrs['disabled'] = True
def clean_name(self):
if self.name_check:
raise forms.ValidationError('This name already exists.')
return self.cleaned_data.get('name')
class GroupOwnerForm(forms.Form):
def __init__(self, *args, **kwargs):
super(GroupOwnerForm, self).__init__(*args, **kwargs)
try:
if kwargs['initial']['owners']:
pass
self.fields['owners'] = ExperimenterModelMultipleChoiceField(
queryset=kwargs['initial']['experimenters'],
initial=kwargs['initial']['owners'], required=False)
except:
self.fields['owners'] = ExperimenterModelMultipleChoiceField(
queryset=kwargs['initial']['experimenters'], required=False)
try:
if kwargs['initial']['members']:
pass
self.fields['members'] = ExperimenterModelMultipleChoiceField(
queryset=kwargs['initial']['experimenters'],
initial=kwargs['initial']['members'], required=False)
except:
self.fields['members'] = ExperimenterModelMultipleChoiceField(
queryset=kwargs['initial']['experimenters'], required=False)
self.fields.keyOrder = ['owners', 'members', 'permissions']
permissions = forms.ChoiceField(
choices=PERMISSION_CHOICES, widget=forms.RadioSelect(), required=True,
label="Permissions")
class MyAccountForm(NonASCIIForm):
def __init__(self, email_check=False, *args, **kwargs):
super(MyAccountForm, self).__init__(*args, **kwargs)
self.email_check = email_check
try:
if kwargs['initial']['default_group']:
pass
self.fields['default_group'] = GroupModelChoiceField(
queryset=kwargs['initial']['groups'],
initial=kwargs['initial']['default_group'],
empty_label=None)
except:
self.fields['default_group'] = GroupModelChoiceField(
queryset=kwargs['initial']['groups'],
empty_label=None)
self.fields.keyOrder = [
'omename', 'first_name', 'middle_name', 'last_name', 'email',
'institution', 'default_group']
omename = forms.CharField(
max_length=50,
widget=forms.TextInput(attrs={'onfocus': 'this.blur()', 'size': 30,
'autocomplete': 'off'}),
label="Username")
first_name = forms.CharField(
max_length=250,
widget=forms.TextInput(attrs={'size': 30, 'autocomplete': 'off'}))
middle_name = forms.CharField(
max_length=250,
widget=forms.TextInput(attrs={'size': 30, 'autocomplete': 'off'}),
required=False)
last_name = forms.CharField(
max_length=250,
widget=forms.TextInput(attrs={'size': 30, 'autocomplete': 'off'}))
email = forms.EmailField(
widget=forms.TextInput(attrs={'size': 30, 'autocomplete': 'off'}),
required=False)
institution = forms.CharField(
max_length=250,
widget=forms.TextInput(attrs={'size': 30, 'autocomplete': 'off'}),
required=False)
def clean_email(self):
if self.email_check:
raise forms.ValidationError('This email already exists.')
return self.cleaned_data.get('email')
class ContainedExperimentersForm(NonASCIIForm):
def __init__(self, *args, **kwargs):
super(ContainedExperimentersForm, self).__init__(*args, **kwargs)
try:
if kwargs['initial']['members']:
pass
self.fields['members'] = ExperimenterModelMultipleChoiceField(
queryset=kwargs['initial']['experimenters'],
initial=kwargs['initial']['members'],
required=False)
except:
self.fields['members'] = ExperimenterModelMultipleChoiceField(
queryset=kwargs['initial']['experimenters'],
required=False)
self.fields.keyOrder = ['members']
class UploadPhotoForm(forms.Form):
photo = forms.FileField(required=False)
def clean_photo(self):
if self.cleaned_data.get('photo') is None:
raise forms.ValidationError(
'No image selected. Supported image formats (file extensions'
' allowed): jpeg, jpg, gif, png. The maximum image size'
' allowed is 200KB.')
if not self.cleaned_data.get(
'photo').content_type.startswith("image"):
raise forms.ValidationError(
'Supported image formats (file extensions allowed):'
' jpeg, jpg, gif, png.')
if self.cleaned_data.get('photo').size > 204800:
raise forms.ValidationError(
'The maximum image size allowed is 200KB.')
return self.cleaned_data.get('photo')
class ChangePassword(NonASCIIForm):
old_password = forms.CharField(
max_length=50,
widget=forms.PasswordInput(attrs={'size': 30, 'autocomplete': 'off'}),
label="Current password")
password = forms.CharField(
max_length=50,
widget=forms.PasswordInput(attrs={'size': 30, 'autocomplete': 'off'}),
label="New password")
confirmation = forms.CharField(
max_length=50,
widget=forms.PasswordInput(attrs={'size': 30, 'autocomplete': 'off'}),
label="Confirm password")
def clean_confirmation(self):
if (self.cleaned_data.get('password') or
self.cleaned_data.get('confirmation')):
if len(self.cleaned_data.get('password')) < 3:
raise forms.ValidationError('Password must be at least 3'
' characters long.')
if (self.cleaned_data.get('password') !=
self.cleaned_data.get('confirmation')):
raise forms.ValidationError('Passwords do not match')
else:
return self.cleaned_data.get('password')
class EnumerationEntry(NonASCIIForm):
new_entry = forms.CharField(
max_length=250,
widget=forms.TextInput(attrs={'size': 30}))
class EnumerationEntries(NonASCIIForm):
def __init__(self, entries, *args, **kwargs):
super(EnumerationEntries, self).__init__(*args, **kwargs)
for i, e in enumerate(entries):
try:
if kwargs['initial']['entries']:
self.fields[str(e.id)] = forms.CharField(
max_length=250,
initial=e.value,
widget=forms.TextInput(attrs={'size': 30}),
label=i+1)
else:
self.fields[str(e.id)] = forms.CharField(
max_length=250,
widget=forms.TextInput(attrs={'size': 30}),
label=i+1)
except:
self.fields[str(e.id)] = forms.CharField(
max_length=250,
widget=forms.TextInput(attrs={'size': 30}),
label=i+1)
self.fields.keyOrder = [str(k) for k in self.fields.keys()]
class EmailForm(forms.Form):
"""
Form to gather recipients, subject and message for sending email
announcements
"""
error_css_class = 'field-error'
required_css_class = 'field-required'
# Define these as None just so I can order them
everyone = forms.BooleanField(required=False, label='All Users')
experimenters = forms.TypedMultipleChoiceField(
required=False,
coerce=int,
label='Users'
)
groups = forms.TypedMultipleChoiceField(
required=False,
coerce=int
)
# TODO CC isn't really CC. Maybe change label or change functionality
cc = MultiEmailField(required=False)
subject = forms.CharField(max_length=100, required=True)
message = forms.CharField(widget=Textarea, required=True)
# Include/Exclude inactive users
inactive = forms.BooleanField(label='Include inactive users',
required=False)
def __init__(self, experimenters, groups, conn, request, *args, **kwargs):
super(EmailForm, self).__init__(*args, **kwargs)
# Process Experimenters/Groups into choices (lists of tuples)
self.fields['experimenters'].choices = [
(experimenter.id, experimenter.firstName +
' ' + experimenter.lastName + ' (' + experimenter.omeName + ')' +
(' - Inactive' if not experimenter.isActive() else ''))
for experimenter in experimenters]
self.fields['groups'].choices = [
(group.id, group.name) for group in groups]
self.conn = conn
self.request = request
def clean(self):
cleaned_data = super(EmailForm, self).clean()
everyone = cleaned_data.get("everyone")
experimenters = cleaned_data.get("experimenters")
groups = cleaned_data.get("groups")
cc = cleaned_data.get("cc")
# If nobody addressed, throw an error
if not cc and not everyone and not experimenters and not groups:
raise forms.ValidationError("At least one addressee must be "
"specified in one or more of 'all',"
" 'user', 'group' or 'cc'")
return cleaned_data
| gpl-2.0 | 3,148,617,623,475,897,000 | 38.283806 | 79 | 0.571119 | false | 4.440649 | false | false | false |
oaelhara/numbbo | code-postprocessing/bbob_pproc/readalign.py | 1 | 16406 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Helper routines to read in data files.
The terms horizontal and vertical below refer to the horizontal
(fixed-target) and vertical (fixed-budget) views. When considering
convergence graphs of function values over times, we can view it as:
* costs for different fixed targets represented by horizontal cuts.
* function values for different fixed budgets represented by vertical
cuts.
COCO collects experimental data with respect to these two complementary
views. This module provides data structures and methods for dealing with
the experimental data.
"""
from __future__ import absolute_import
import os, sys
import numpy
import warnings
from pdb import set_trace
#GLOBAL VARIABLES
idxEvals = 0 # index of the column where to find the evaluations
# Single objective case
idxFSingle = 2 # index of the column where to find the function values
nbPtsFSingle = 5 # nb of target function values for each decade.
# Bi-objective case
idxFBi = 1 # index of the column where to find the function values
nbPtsFBi = 10 # nb of target function values for each decade.
#CLASS DEFINITIONS
class MultiReader(list):
"""List of data arrays to be aligned.
The main purpose of this class is to be used as a single container
of the data arrays to be aligned by :py:meth:`alignData()` in the
parent module.
A data array is defined as an array where rows correspond to
recordings at different moments of an experiment. Elements of these
rows correspond to different measures.
These data arrays can be aligned along the time or the function
values for instance.
This class is part abstract. Some methods have to be defined by
inheriting classes depending on wanted alignment:
* :py:meth:`isFinished()`, True when all the data is read.
* :py:meth:`getInitialValue()`, returns the initial alignment
value.
* :py:meth:`newCurrentValue()`, returns the next alignment value.
* :py:meth:`align()`, process all the elements of self to make
them aligned.
Some attributes have to be defined as well :py:attr:`idx`,
the index of the column with alignment values in the data array,
:py:attr:`idxData`, the index of the column with the actual data.
"""
# TODO: this class and all inheriting class may have to be redesigned for
# other kind of problems to work.
# idx: index of the column in the data array of the alignment value.
# idxData: index of the column in the data array for the data of concern.
def __init__(self, data, isHArray=False):
for i in data:
if len(i) > 0: # ie. if the data array is not empty.
self.append(self.SingleReader(i, isHArray))
def currentLine(self):
"""Aggregates currentLines information."""
return numpy.array(list(i.currentLine[self.idxData] for i in self))
def currentValues(self):
"""Gets the list of the current alignment values."""
return list(i.currentLine[self.idx] for i in self)
def nextValues(self):
"""Gets the list of the next alignment values."""
return list(i.nextLine[self.idx] for i in self if not i.isFinished)
#def isFinished(self):
"""When all the data is read."""
#pass
#def getInitialValue(self):
"""Returns the initial alignment value."""
#pass
#def newCurrentValue(self):
"""Returns the next alignment value."""
#pass
#def align(self, currentValue):
"""Process all the elements of self to make them aligned."""
#pass
class SingleReader:
"""Single data array reader class."""
def __init__(self, data, isHArray=False):
if len(data) == 0:
raise ValueError, 'Empty data array.'
self.data = numpy.array(data)
self.it = self.data.__iter__()
self.isNearlyFinished = False
self.isFinished = False
self.currentLine = None
self.nextLine = self.it.next()
if isHArray:
self.idxEvals = range(1, numpy.shape(data)[1])
else:
self.idxEvals = idxEvals
def next(self):
"""Returns the next (last if undefined) line of the array data."""
if not self.isFinished:
if not self.isNearlyFinished: # the next line is still defined
self.currentLine = self.nextLine.copy()
# Update nextLine
try:
self.nextLine = self.it.next()
except StopIteration:
self.isNearlyFinished = True
else:
self.isFinished = True
self.currentLine[self.idxEvals] = numpy.nan
#TODO: the line above was not valid for the MultiArrayReader
return self.currentLine
class VMultiReader(MultiReader):
"""List of data arrays to be aligned vertically.
Aligned vertically means, all number of function evaluations are the
closest from below or equal to the alignment number of function
evaluations.
"""
idx = idxEvals # the alignment value is the number of function evaluations.
def __init__(self, data, isBiobjective):
super(VMultiReader, self).__init__(data)
self.idxData = idxFBi if isBiobjective else idxFSingle # the data of concern are the function values.
def isFinished(self):
return all(i.isFinished for i in self)
def getInitialValue(self):
for i in self:
i.next()
res = self.currentValues()
return min(res)
def newCurrentValue(self):
res = self.nextValues()
if res:
return min(self.nextValues())
else:
return None
def align(self, currentValue):
for i in self:
while not i.isFinished:
if i.nextLine[self.idx] > currentValue:
break
i.next()
return numpy.insert(self.currentLine(), 0, currentValue)
class VMultiReaderNew(MultiReader):
"""List of data arrays to be aligned vertically.
Aligned vertically means, all number of function evaluations are the
closest from below or equal to the alignment number of function
evaluations.
"""
idx = idxEvals # the alignment value is the number of function evaluations.
def __init__(self, data, isBiobjective):
super(VMultiReaderNew, self).__init__(data)
self.idxData = idxFBi if isBiobjective else idxFSingle # the data of concern are the function values.
def isFinished(self):
return all(i.isFinished for i in self)
def getAlignedValues(self, selectedValues):
res = selectedValues()
# iterate until you find the same evaluation number in all functions
while res and min(res) < max(res) and len(res) == len(self):
index = res.index(min(res))
self[index].next()
res = selectedValues()
if self[index].isFinished:
break
if res and min(res) == max(res) and len(res) == len(self):
return min(res)
else:
return None
def getInitialValue(self):
for i in self:
i.next()
return self.getAlignedValues(self.currentValues)
def newCurrentValue(self):
return self.getAlignedValues(self.nextValues)
def align(self, currentValue):
for i in self:
while not i.isFinished:
if i.nextLine[self.idx] > currentValue:
break
i.next()
return numpy.insert(self.currentLine(), 0, currentValue)
class HMultiReader(MultiReader):
"""List of data arrays to be aligned horizontally.
Aligned horizontally means all the function values are lesser than
(or equal to) the current alignment function value.
"""
idxData = idxEvals # the data of concern are the number of function evals.
def __init__(self, data, isBiobjective):
super(HMultiReader, self).__init__(data)
# the alignment value is the function value.
self.idx = idxFBi if isBiobjective else idxFSingle
self.nbPtsF = nbPtsFBi if isBiobjective else nbPtsFSingle
self.idxCurrentF = numpy.inf # Minimization
# idxCurrentF is a float for the extreme case where it is infinite.
# else it is an integer and then is the 'i' in 10**(i/nbPtsF)
def isFinished(self):
"""Is finished when we found the last alignment value reached."""
currentValue = numpy.power(10, self.idxCurrentF / self.nbPtsF)
if currentValue == 0:
return True
# It can be more than one line for the previous alignment value.
# We iterate until we find a better value or to the end of the lines.
for i in self:
while i.nextLine[self.idx] > currentValue and not i.isFinished:
i.next();
return not any(i.nextLine[self.idx] <= currentValue for i in self)
def getInitialValue(self):
for i in self:
i.next()
fvalues = self.currentValues()
self.idxCurrentF = numpy.ceil(numpy.log10(max(fvalues) if max(fvalues) > 0 else 1e-19) * self.nbPtsF)
# Returns the smallest 10^i/nbPtsF value larger than max(Fvalues)
return numpy.power(10, self.idxCurrentF / self.nbPtsF)
def newCurrentValue(self):
self.idxCurrentF -= 1
return numpy.power(10, self.idxCurrentF / self.nbPtsF)
def align(self, currentValue):
fvalues = []
for i in self:
while not i.isFinished:
if i.currentLine[self.idx] <= currentValue:
break
i.next()
if i.currentLine[self.idx] <= currentValue:
fvalues.append(i.currentLine[self.idx])
#This should not happen
if not fvalues:
raise ValueError, 'Value %g is not reached.'
if max(fvalues) <= 0.:
self.idxCurrentF = -numpy.inf
currentValue = 0.
else:
self.idxCurrentF = min(self.idxCurrentF,
numpy.ceil(numpy.log10(max(fvalues)) * self.nbPtsF))
# Above line may return: Warning: divide by zero encountered in
# log10 in the case of negative fvalues.
# In the case of negative values for fvalues, self.idxCurrentF
# should be -numpy.inf at the condition that
# numpy.power(10, -inf) == 0 is true
# The update of idxCurrentF is done so all the intermediate
# function value trigger reached are not written, only the smallest
currentValue = numpy.power(10, self.idxCurrentF / self.nbPtsF)
return numpy.insert(self.currentLine(), 0, currentValue)
class ArrayMultiReader(MultiReader):
"""Class of *aligned* data arrays to be aligned together.
This class is used for dealing with the output of
:py:class:`MultiReader`:
* From *raw* data arrays, :py:class:`MultiReader` generates aligned
data arrays (first column is the alignment value, subsequent
columns are aligned data).
* This class also generates aligned data arrays but from other
aligned data arrays.
"""
idx = 0 # We expect the alignment value to be the 1st column.
def __init__(self, data, isHArray=False):
#super(ArrayMultiReader, self).__init__(data, True)
MultiReader.__init__(self, data, isHArray)
#for i in self:
#i.nbRuns = (numpy.shape(i.data)[1] - 1)
def currentLine(self):
"""Aggregates currentLines information."""
res = []
res.extend(list(i.currentLine[1:] for i in self))
return numpy.hstack(res)
class VArrayMultiReader(ArrayMultiReader, VMultiReader):
"""Wrapper class of *aligned* data arrays to be aligned vertically."""
def __init__(self, data):
ArrayMultiReader.__init__(self, data)
#TODO: Should this use super?
class VArrayMultiReaderNew(ArrayMultiReader, VMultiReader):
"""Wrapper class of *aligned* data arrays to be aligned vertically."""
def __init__(self, data):
ArrayMultiReader.__init__(self, data)
#TODO: Should this use super?
class HArrayMultiReader(ArrayMultiReader, HMultiReader):
"""Wrapper class of *aligned* data arrays to be aligned horizontally."""
def __init__(self, data, isBiobjective):
ArrayMultiReader.__init__(self, data, isHArray=True)
#TODO: Should this use super?
self.nbPtsF = nbPtsFBi if isBiobjective else nbPtsFSingle
self.idxCurrentF = numpy.inf #Minimization
#FUNCTION DEFINITIONS
def alignData(data, isBiobjective):
"""Aligns the data from a list of data arrays.
This method returns an array for which the alignment value is the
first column and the aligned values are in subsequent columns.
"""
#TODO: is template dependent.
idxF = idxFBi if isBiobjective else idxFSingle
res = []
currentValue = data.getInitialValue()
#set_trace()
if data.isFinished():
res.append(data.align(currentValue))
while not data.isFinished() and currentValue:
res.append(data.align(currentValue))
currentValue = data.newCurrentValue()
return (numpy.vstack(res), numpy.array(list(i.nextLine[idxEvals] for i in data)),
numpy.array(list(i.nextLine[idxF] for i in data)))
# Hack: at this point nextLine contains all information on the last line
# of the data.
def alignArrayData(data):
"""Aligns the data from a list of aligned arrays.
This method returns an array for which the alignment value is the first
column and the aligned values are in subsequent columns.
"""
#TODO: is template dependent.
res = []
currentValue = data.getInitialValue()
#set_trace()
if data.isFinished():
res.append(data.align(currentValue))
while not data.isFinished():
res.append(data.align(currentValue))
currentValue = data.newCurrentValue()
return numpy.vstack(res)
# Hack: at this point nextLine contains all information on the last line
# of the data.
def openfile(filePath):
if not os.path.isfile(filePath):
if ('win32' in sys.platform) and len(filePath) > 259:
raise IOError(2, 'The path is too long for the file "%s".' % filePath)
else:
raise IOError(2, 'The file "%s" does not exist.' % filePath)
return open(filePath, 'r')
def split(dataFiles, isBiobjective, dim=None):
"""Split a list of data files into arrays corresponding to data sets."""
dataSets = []
for fil in dataFiles:
with openfile(fil) as f:
# This doesnt work with windows.
# content = numpy.loadtxt(fil, comments='%')
lines = f.readlines()
content = []
# Save values in array content. Check for nan and inf.
for line in lines:
# skip if comment
if line.startswith('%'):
if content:
dataSets.append(numpy.vstack(content))
content = []
if isBiobjective and len(dataSets) >= 5:
break
continue
# else remove end-of-line sign
# and split into single strings
data = line.strip('\n').split()
if dim and len(data) != dim + 5:
warnings.warn('Incomplete line %s in ' % (line) +
'data file %s: ' % (fil))
continue
for id in xrange(len(data)):
if data[id] in ('Inf', 'inf'):
data[id] = numpy.inf
elif data[id] in ('-Inf', '-inf'):
data[id] = -numpy.inf
elif data[id] in ('NaN', 'nan'):
data[id] = numpy.nan
else:
data[id] = float(data[id])
content.append(numpy.array(data))
#Check that it always have the same length?
if content:
dataSets.append(numpy.vstack(content))
return dataSets
| bsd-3-clause | 387,460,668,223,755,600 | 33.538947 | 109 | 0.61508 | false | 4.178808 | false | false | false |
opencord/voltha | voltha/coordinator.py | 1 | 23681 | #
# Copyright 2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Consul-based coordinator services """
from consul import ConsulException
from consul.twisted import Consul
from requests import ConnectionError
from structlog import get_logger
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue, Deferred
from twisted.internet.error import DNSLookupError
from zope.interface import implementer
from leader import Leader
from common.utils.asleep import asleep
from common.utils.message_queue import MessageQueue
from voltha.registry import IComponent
from worker import Worker
from simplejson import dumps, loads
from common.utils.deferred_utils import DeferredWithTimeout, TimeOutError
log = get_logger()
class StaleMembershipEntryException(Exception):
pass
@implementer(IComponent)
class Coordinator(object):
"""
An app shall instantiate only one Coordinator (singleton).
A single instance of this object shall take care of all external
with consul, and via consul, all coordination activities with its
clustered peers. Roles include:
- registering an ephemeral membership entry (k/v record) in consul
- participating in a symmetric leader election, and potentially assuming
the leader's role. What leadership entails is not a concern for the
coordination, it simply instantiates (and shuts down) a leader class
when it gains (or looses) leadership.
"""
CONNECT_RETRY_INTERVAL_SEC = 1
RETRY_BACKOFF = [0.05, 0.1, 0.2, 0.5, 1, 2, 5]
# Public methods:
def __init__(self,
internal_host_address,
external_host_address,
instance_id,
rest_port,
config,
consul='localhost:8500',
container_name_regex='^.*\.([0-9]+)\..*$'):
log.info('initializing-coordinator')
self.config = config['coordinator']
self.worker_config = config['worker']
self.leader_config = config['leader']
self.membership_watch_relatch_delay = config.get(
'membership_watch_relatch_delay', 0.1)
self.tracking_loop_delay = self.config.get(
'tracking_loop_delay', 1)
self.session_renewal_timeout = self.config.get(
'session_renewal_timeout', 5)
self.session_renewal_loop_delay = self.config.get(
'session_renewal_loop_delay', 3)
self.membership_maintenance_loop_delay = self.config.get(
'membership_maintenance_loop_delay', 5)
self.session_time_to_live = self.config.get(
'session_time_to_live', 10)
self.prefix = self.config.get('voltha_kv_prefix', 'service/voltha')
self.leader_prefix = '/'.join((self.prefix, self.config.get(
self.config['leader_key'], 'leader')))
self.membership_prefix = '/'.join((self.prefix, self.config.get(
self.config['membership_key'], 'members'), ''))
self.assignment_prefix = '/'.join((self.prefix, self.config.get(
self.config['assignment_key'], 'assignments'), ''))
self.workload_prefix = '/'.join((self.prefix, self.config.get(
self.config['workload_key'], 'work'), ''))
self.core_store_prefix = '/'.join((self.prefix, self.config.get(
self.config['core_store_key'], 'data/core')))
self.core_store_assignment_key = self.core_store_prefix + \
'/assignment'
self.core_storage_suffix = 'core_store'
self.retries = 0
self.instance_id = instance_id
self.internal_host_address = internal_host_address
self.external_host_address = external_host_address
self.rest_port = rest_port
self.membership_record_key = self.membership_prefix + self.instance_id
self.session_id = None
self.i_am_leader = False
self.leader_id = None # will be the instance id of the current leader
self.shutting_down = False
self.leader = None
self.membership_callback = None
self.worker = Worker(self.instance_id, self)
self.host = consul.split(':')[0].strip()
self.port = int(consul.split(':')[1].strip())
# TODO need to handle reconnect events properly
self.consul = Consul(host=self.host, port=self.port)
self.container_name_regex = container_name_regex
self.wait_for_leader_deferreds = []
self.peers_mapping_queue = MessageQueue()
def start(self):
log.debug('starting')
reactor.callLater(0, self._async_init)
log.info('started')
return self
@inlineCallbacks
def stop(self):
log.debug('stopping')
self.shutting_down = True
yield self._delete_session() # this will delete the leader lock too
yield self.worker.stop()
if self.leader is not None:
yield self.leader.stop()
self.leader = None
log.info('stopped')
def wait_for_a_leader(self):
"""
Async wait till a leader is detected/elected. The deferred will be
called with the leader's instance_id
:return: Deferred.
"""
d = Deferred()
if self.leader_id is not None:
d.callback(self.leader_id)
return d
else:
self.wait_for_leader_deferreds.append(d)
return d
# Wait for a core data id to be assigned to this voltha instance
@inlineCallbacks
def get_core_store_id_and_prefix(self):
core_store_id = yield self.worker.get_core_store_id()
returnValue((core_store_id, self.core_store_prefix))
def recv_peers_map(self):
return self.peers_mapping_queue.get()
def publish_peers_map_change(self, msg):
self.peers_mapping_queue.put(msg)
# Proxy methods for consul with retry support
def kv_get(self, *args, **kw):
return self._retry('GET', *args, **kw)
def kv_put(self, *args, **kw):
return self._retry('PUT', *args, **kw)
def kv_delete(self, *args, **kw):
return self._retry('DELETE', *args, **kw)
# Methods exposing key membership information
@inlineCallbacks
def get_members(self):
"""Return list of all members"""
_, members = yield self.kv_get(self.membership_prefix, recurse=True)
returnValue([member['Key'][len(self.membership_prefix):]
for member in members])
# Private (internal) methods:
@inlineCallbacks
def _async_init(self):
yield self._create_session()
yield self._create_membership_record()
yield self._start_leader_tracking()
yield self.worker.start()
def _backoff(self, msg):
wait_time = self.RETRY_BACKOFF[min(self.retries,
len(self.RETRY_BACKOFF) - 1)]
self.retries += 1
log.info(msg, retry_in=wait_time)
return asleep(wait_time)
def _clear_backoff(self):
if self.retries:
log.info('reconnected-to-consul', after_retries=self.retries)
self.retries = 0
@inlineCallbacks
def _create_session(self):
@inlineCallbacks
def _create_session():
consul = yield self.get_consul()
# create consul session
self.session_id = yield consul.session.create(
behavior='release', ttl=self.session_time_to_live,
lock_delay=1)
log.info('created-consul-session', session_id=self.session_id)
self._start_session_tracking()
yield self._retry(_create_session)
@inlineCallbacks
def _delete_session(self):
try:
yield self.consul.session.destroy(self.session_id)
except Exception as e:
log.exception('failed-to-delete-session',
session_id=self.session_id)
@inlineCallbacks
def _create_membership_record(self):
yield self._do_create_membership_record_with_retries()
reactor.callLater(0, self._maintain_membership_record)
@inlineCallbacks
def _maintain_membership_record(self):
try:
while 1:
valid_membership = yield self._assert_membership_record_valid()
if not valid_membership:
log.info('recreating-membership-before',
session=self.session_id)
yield self._do_create_membership_record_with_retries()
log.info('recreating-membership-after',
session=self.session_id)
else:
log.debug('valid-membership', session=self.session_id)
# Async sleep before checking the membership record again
yield asleep(self.membership_maintenance_loop_delay)
except Exception, e:
log.exception('unexpected-error-leader-trackin', e=e)
finally:
# except in shutdown, the loop must continue (after a short delay)
if not self.shutting_down:
reactor.callLater(self.membership_watch_relatch_delay,
self._maintain_membership_record)
def _create_membership_record_data(self):
member_record = dict()
member_record['status'] = 'alive'
member_record['host_address'] = self.external_host_address
return member_record
@inlineCallbacks
def _assert_membership_record_valid(self):
try:
log.info('membership-record-before')
is_timeout, (_, record) = yield \
self.coordinator_get_with_timeout(
key=self.membership_record_key,
index=0,
timeout=5)
if is_timeout:
returnValue(False)
log.info('membership-record-after', record=record)
if record is None or \
'Session' not in record or \
record['Session'] != self.session_id:
log.info('membership-record-change-detected',
old_session=self.session_id,
record=record)
returnValue(False)
else:
returnValue(True)
except Exception as e:
log.exception('membership-validation-exception', e=e)
returnValue(False)
@inlineCallbacks
def _do_create_membership_record_with_retries(self):
while 1:
log.info('recreating-membership', session=self.session_id)
result = yield self._retry(
'PUT',
self.membership_record_key,
dumps(self._create_membership_record_data()),
acquire=self.session_id)
if result:
log.info('new-membership-record-created',
session=self.session_id)
break
else:
log.warn('cannot-create-membership-record')
yield self._backoff('stale-membership-record')
def _start_session_tracking(self):
reactor.callLater(0, self._session_tracking_loop)
@inlineCallbacks
def _session_tracking_loop(self):
@inlineCallbacks
def _redo_session():
log.info('_redo_session-before')
yield self._delete_session()
# Create a new consul connection/session with a TTL of 25 secs
try:
self.consul = Consul(host=self.host, port=self.port)
self.session_id = yield self.consul.session.create(
behavior='release',
ttl=self.session_time_to_live,
lock_delay=1)
log.info('new-consul-session', session=self.session_id)
except Exception as e:
log.exception('could-not-create-a-consul-session', e=e)
@inlineCallbacks
def _renew_session(m_callback):
try:
log.debug('_renew_session-before')
consul_ref = self.consul
result = yield consul_ref.session.renew(
session_id=self.session_id)
log.info('just-renewed-session', result=result)
if not m_callback.called:
# Triggering callback will cancel the timeout timer
log.info('trigger-callback-to-cancel-timout-timer')
m_callback.callback(result)
else:
# Timeout event has already been called. Just ignore
# this event
log.info('renew-called-after-timout',
new_consul_ref=self.consul,
old_consul_ref=consul_ref)
except Exception, e:
# Let the invoking method receive a timeout
log.exception('could-not-renew-session', e=e)
try:
while 1:
log.debug('session-tracking-start')
rcvd = DeferredWithTimeout(
timeout=self.session_renewal_timeout)
_renew_session(rcvd)
try:
_ = yield rcvd
except TimeOutError as e:
log.info('session-renew-timeout', e=e)
# Redo the session
yield _redo_session()
except Exception as e:
log.exception('session-renew-exception', e=e)
else:
log.debug('successfully-renewed-session')
# Async sleep before the next session tracking
yield asleep(self.session_renewal_loop_delay)
except Exception as e:
log.exception('renew-exception', e=e)
finally:
reactor.callLater(self.session_renewal_loop_delay,
self._session_tracking_loop)
def _start_leader_tracking(self):
reactor.callLater(0, self._leadership_tracking_loop)
@inlineCallbacks
def _leadership_tracking_loop(self):
try:
# Attempt to acquire leadership lock. True indicates success;
# False indicates there is already a leader. It's instance id
# is then the value under the leader key service/voltha/leader.
# attempt acquire leader lock
log.info('leadership-attempt-before')
result = yield self._retry('PUT',
self.leader_prefix,
self.instance_id,
acquire=self.session_id)
log.info('leadership-attempt-after')
# read it back before being too happy; seeing our session id is a
# proof and now we have the change id that we can use to reliably
# track any changes. In an unlikely scenario where the leadership
# key gets wiped out administratively since the previous line,
# the returned record can be None. Handle it.
(index, record) = yield self._retry('GET',
self.leader_prefix)
log.info('leader-prefix',
i_am_leader=result, index=index, record=record)
if record is not None:
if result is True:
if record['Session'] == self.session_id:
yield self._assert_leadership()
else:
pass # confusion; need to retry leadership
else:
leader_id = record['Value']
yield self._assert_nonleadership(leader_id)
# if record was none, we shall try leadership again
last = record
while last is not None:
# this shall return only when update is made to leader key
# or expires after 5 seconds wait
is_timeout, (tmp_index, updated) = yield \
self.coordinator_get_with_timeout(
key=self.leader_prefix,
index=index,
timeout=5)
# Timeout means either there is a lost connectivity to
# consul or there are no change to that key. Do nothing.
if is_timeout:
continue
# After timeout event the index returned from
# coordinator_get_with_timeout is None. If we are here it's
# not a timeout, therefore the index is a valid one.
index=tmp_index
if updated is None or updated != last:
log.info('leader-key-change',
index=index, updated=updated, last=last)
# leadership has changed or vacated (or forcefully
# removed), apply now
# If I was previoulsy the leader then assert a non
# leadership role before going for election
if self.i_am_leader:
log.info('leaving-leaderdhip',
leader=self.instance_id)
yield self._assert_nonleadership(self.instance_id)
break
last = updated
except Exception, e:
log.exception('unexpected-error-leader-trackin', e=e)
finally:
# except in shutdown, the loop must continue (after a short delay)
if not self.shutting_down:
reactor.callLater(self.tracking_loop_delay,
self._leadership_tracking_loop)
@inlineCallbacks
def _assert_leadership(self):
"""(Re-)assert leadership"""
if not self.i_am_leader:
self.i_am_leader = True
self._set_leader_id(self.instance_id)
yield self._just_gained_leadership()
@inlineCallbacks
def _assert_nonleadership(self, leader_id):
"""(Re-)assert non-leader role"""
# update leader_id anyway
self._set_leader_id(leader_id)
if self.i_am_leader:
self.i_am_leader = False
yield self._just_lost_leadership()
def _set_leader_id(self, leader_id):
self.leader_id = leader_id
deferreds, self.wait_for_leader_deferreds = \
self.wait_for_leader_deferreds, []
for d in deferreds:
d.callback(leader_id)
def _just_gained_leadership(self):
log.info('became-leader')
self.leader = Leader(self)
return self.leader.start()
def _just_lost_leadership(self):
log.info('lost-leadership')
return self._halt_leader()
def _halt_leader(self):
if self.leader:
d = self.leader.stop()
self.leader = None
return d
def get_consul(self):
return self.consul
@inlineCallbacks
def _retry(self, operation, *args, **kw):
while 1:
try:
consul = yield self.get_consul()
log.info('start', operation=operation, args=args)
if operation == 'GET':
result = yield consul.kv.get(*args, **kw)
elif operation == 'PUT':
for name, value in kw.items():
if name == 'acquire':
if value != self.session_id:
log.info('updating-session-in-put-operation',
old_session=value,
new_session=self.session_id)
kw['acquire'] = self.session_id
break
result = yield consul.kv.put(*args, **kw)
elif operation == 'DELETE':
result = yield consul.kv.delete(*args, **kw)
else:
# Default case - consider operation as a function call
result = yield operation(*args, **kw)
self._clear_backoff()
break
except ConsulException, e:
log.exception('consul-not-up',
operation=operation,
args=args,
session=self.consul.Session,
e=e)
yield self._backoff('consul-not-up')
except ConnectionError, e:
log.exception('cannot-connect-to-consul',
operation=operation,
args=args,
session=self.consul.Session,
e=e)
yield self._backoff('cannot-connect-to-consul')
except DNSLookupError, e:
log.info('dns-lookup-failed', operation=operation, args=args,
host=self.host)
yield self._backoff('dns-lookup-failed')
except StaleMembershipEntryException, e:
log.exception('stale-membership-record-in-the-way',
operation=operation,
args=args,
session=self.consul.Session,
e=e)
yield self._backoff('stale-membership-record-in-the-way')
except Exception, e:
if not self.shutting_down:
log.exception(e)
yield self._backoff('unknown-error')
log.info('end', operation=operation, args=args)
returnValue(result)
@inlineCallbacks
def coordinator_get_with_timeout(self, key, timeout, **kw):
"""
Query consul with a timeout
:param key: Key to query
:param timeout: timeout value
:param kw: additional key-value params
:return: (is_timeout, (index, result)).
"""
@inlineCallbacks
def _get(key, m_callback):
try:
(index, result) = yield self._retry('GET', key, **kw)
if not m_callback.called:
log.debug('got-result-cancelling-timer')
m_callback.callback((index, result))
except Exception as e:
log.exception('got-exception', e=e)
try:
rcvd = DeferredWithTimeout(timeout=timeout)
_get(key, rcvd)
try:
result = yield rcvd
log.debug('result-received', result=result)
returnValue((False, result))
except TimeOutError as e:
log.debug('timeout-or-no-data-change', consul_key=key)
except Exception as e:
log.exception('exception', e=e)
except Exception as e:
log.exception('exception', e=e)
returnValue((True, (None, None)))
| apache-2.0 | -2,272,918,664,994,395,000 | 38.206954 | 79 | 0.550737 | false | 4.464744 | true | false | false |
deepeshmittal/daytona | Scheduler+Agent/system_metrics_gather.py | 1 | 29943 | # This file implement different thread routines which spaws on agent startup and continue to run until agnet is up.
# These threads continuously query stats from the linux system and write designated log file for a particular test if
# it is currently running on agent
#!/usr/bin/env python
import os
import common
import subprocess
import re
import action
import logging
import time
# Global
sys_logger = None
iostat_file_ext = "_iostat_block_devices.plt"
network_io_file_ext = "_network_devices.plt"
system_metrics_interval = '5'
docker_stat_header = "NAME CONTAINER CPU % MEM %"
# Bash Commands
date_cmd = ['date', '-u', '+%Y-%m-%dT%H:%M:%SZ']
top_cmd = ['top', '-b', '-i', '-d', system_metrics_interval]
top_get_header = ['top', '-b', '-n', '1', '-i']
iostat_cmd = ['iostat', '-dtx', system_metrics_interval]
iostat_get_header = ['iostat', '-dtx']
sar_get_header = {'cpu': ['sar', '-u', '1', '1'],
'task': ['sar', '-w', '1', '1'],
'nfs': ['sar', '-n', 'NFS', '1', '1'],
'mem': ['sar', '-r', '1', '1'],
'network_io': ['sar', '-n', 'DEV', '1', '1']
}
docker_version = ['docker', '-v']
docker_command = "( date -u +'%Y-%m-%dT%H:%M:%SZ' && docker stats -a --format " \
"'table {{.Name}}\t{{.Container}}\t{{.CPUPerc}}\t{{.MemPerc}}\t' --no-stream )"
sar_cmd = ['sar', '-n', 'DEV', '-n', 'NFS', '-u', '-r', '-w', system_metrics_interval]
get_pid = ["ps", "-eo", "pid,cmd,%cpu", "--sort=-%cpu"]
grep2 = ["grep", "-v", "grep"]
awk = ["awk", "FNR == 1 {print $1}"]
def loggersetup(filename):
"""
Logger object setup for capturing system metric gather activity in a given debug file filename
"""
if os.path.isfile(filename):
os.remove(filename)
logger = logging.getLogger("system_metrics")
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(filename)
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
formatter = logging.Formatter('%(asctime)s %(levelname)-6s {%(filename)s %(lineno)d} %(message)-100s',
'%Y-%m-%d %H:%M:%S')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
logger.addHandler(fh)
logger.addHandler(ch)
logger.propagate = False
return logger
def top_gather(self):
"""
This method implement thread routine for querying TOP output in a fixed interval of time. If any test is in
running state on this agent then this routine append top_output.txt for this test with new stat values
"""
running_queue = {}
# execute top batch command
p1 = subprocess.Popen(top_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
while True:
output = p1.stdout.readline()
if output == '' and p1.poll() is not None:
break
if output:
# Read new output
output = output.rstrip()
# if output line starts with "top" then it need to dump current timestamp value. It also dump list of test
# currently in running state in seperate list. As this is the new output sequence, we want to start writing
# subsequent logs for currently running tests. Hence it won't check running test list until new output
# sequence
if output.startswith('top'):
p2 = subprocess.Popen(date_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
timestamp = p2.communicate()[0].strip()
action.action_lock.acquire()
running_queue = action.running_tests
action.action_lock.release()
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
top_file = test.statsdir + "top_output.txt"
if os.path.isfile(top_file):
# If file exists then append this new output squence in this file with current TS
with open(top_file, 'a') as fh:
fh.write("\n" + timestamp + "\n")
fh.write(output + "\n")
sys_logger.debug("Generating top output for test : " + str(testid))
else:
# If file doesn't exists then this is new test just started on agent, create
# top_output.txt and dump this new output squence in this file with current TS
with open(top_file, 'w') as fh:
fh.write(timestamp + "\n")
fh.write(output + "\n")
sys_logger.debug("Starting top output for test : " + str(testid))
continue
# Continuing writing output squence in files for running tests dump at the start of new squence
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
top_file = test.statsdir + "top_output.txt"
if os.path.isfile(top_file):
with open(top_file, 'a') as fh:
fh.write(output + "\n")
def iostat_gather(self):
"""
This method implement thread routine for querying IOSTAT output in a fixed interval of time. If any test is in
running state on this agent then this routine append seperate file for each io device it create for this test
"""
iostat_header = None
device_header = 0
device_list = []
p1 = subprocess.Popen(iostat_get_header, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = p1.communicate()[0].strip()
output = output.split("\n")
# Check header and device list from iostat output
for header in output:
header = header.strip()
if header.startswith("Device"):
header = re.sub(' +', ' ', header)
header = header.replace(' ', ',')
header = header.replace("Device:", "Time")
iostat_header = header
device_header = 1
continue
if device_header:
header = re.sub(' +', ' ', header)
header = header.split(' ')
device_list.append(header[0])
# Start IOSTAT batch command for continued output
p2 = subprocess.Popen(iostat_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
running_queue = {}
timestamp = 0
try:
while True:
output = p2.stdout.readline()
if output == '' and p2.poll() is not None:
break
if output:
output = output.strip()
output = re.sub(' +', ' ', output)
output = output.replace(' ', ',')
# if output line starts with "Device" then it need to dump current timestamp value. It also dump list
# of test currently in running state in seperate list. As this is the new output sequence, we want to
# start writing subsequent logs for currently running tests. Hence it won't check running test list
# until new output sequence
if output.startswith("Device"):
p3 = subprocess.Popen(date_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
timestamp = p3.communicate()[0].strip()
action.action_lock.acquire()
running_queue = action.running_tests
action.action_lock.release()
continue
output = output.split(",")
output_device = output[0]
output[0] = str(timestamp)
output = ",".join(output)
# Continuing writing output squence in files for running tests dump at the start of new squence
if output_device in device_list:
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
iostat_file_name = output_device + iostat_file_ext
iostat_file = test.statsdir + iostat_file_name
if os.path.isfile(iostat_file):
# If file exists then append this new output squence in this file with current TS
sys_logger.debug("Generating iostat output in " + iostat_file_name + " for test : "
+ str(testid))
with open(iostat_file, 'a') as fh:
fh.write(output + "\n")
else:
# If file doesn't exists then this is new test just started on agent, create
# file and dump IOSTAT header in this file with current TS
with open(iostat_file, 'w') as fh:
sys_logger.debug("Starting " + iostat_file_name + " for test : " + str(testid))
fh.write(iostat_header + "\n")
fh.write(output + "\n")
except Exception as e:
sys_logger.error(e)
def sar_gather(self):
header_row = 2 # In SAR output header is in 2nd row, modify accordingly
# getting cpu.plt header
p = subprocess.Popen(sar_get_header['cpu'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
output = p.communicate()[0].strip()
output = output.split("\n")[header_row]
output = re.sub(' +', ' ', output)
output = output.split(" ")
del output[:3]
cpu_plt_header = ",".join(output)
# getting task.plt header
p = subprocess.Popen(sar_get_header['task'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
output = p.communicate()[0].strip()
output = output.split("\n")[header_row]
output = re.sub(' +', ' ', output)
output = output.split(" ")
del output[:2]
task_plt_header = ",".join(output)
# getting mem.plt header
p = subprocess.Popen(sar_get_header['mem'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
output = p.communicate()[0].strip()
output = output.split("\n")[header_row]
output = re.sub(' +', ' ', output)
output = output.split(" ")
del output[:2]
mem_plt_header = ",".join(output)
# getting nfs.plt header
p = subprocess.Popen(sar_get_header['nfs'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
output = p.communicate()[0].strip()
output = output.split("\n")[header_row]
output = re.sub(' +', ' ', output)
output = output.split(" ")
del output[:2]
nfs_plt_header = ",".join(output)
# getting network_io.plt header
p = subprocess.Popen(sar_get_header['network_io'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
output = p.communicate()[0].strip()
header = output.split("\n")[header_row]
header = re.sub(' +', ' ', header)
header = header.split(" ")
del header[:3]
net_io_plt_header = ",".join(header)
# starting SAR gather
p = subprocess.Popen(sar_cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Flags for marking the output type based on header in output sequence
print_cpu_plt = 0
print_mem_plt = 0
print_task_plt = 0
print_net_io_plt = 0
print_nfs_plt = 0
while True:
output = p.stdout.readline()
if output == '' and p.poll() is not None:
break
if output:
output = output.strip()
output = re.sub(' +', ' ', output)
output = output.replace(' ', ',')
if cpu_plt_header in output:
# Set CPU usage output flag, print subsquent lines in cpu.plt, Also this is start of new output sequence
# hence dump the current timestamp value
print_cpu_plt = 1
p3 = subprocess.Popen(date_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
timestamp = p3.communicate()[0].strip()
continue
elif task_plt_header in output:
# Set task and context switch output flag, print subsquent lines in task.plt
print_task_plt = 1
continue
elif nfs_plt_header in output:
# Set NFS activity output flag, print subsquent lines in nfs.plt
print_nfs_plt = 1
continue
elif mem_plt_header in output:
# Set memory utilization output flag, print subsquent lines in mem.plt
print_mem_plt = 1
continue
elif net_io_plt_header in output:
# Set network io activity output flag, print subsquent lines in seperate file for each io device
print_net_io_plt = 1
continue
elif output == "":
# Set all flags to zero if blank line occur, this marks end of previously set flag
print_cpu_plt = 0
print_mem_plt = 0
print_task_plt = 0
print_net_io_plt = 0
print_nfs_plt = 0
continue
# Dump list of running test on agent in running_queue
action.action_lock.acquire()
running_queue = action.running_tests
action.action_lock.release()
# Print appropriate plt files based on output flags
if print_cpu_plt:
output = output.split(",")
del output[:3]
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
cpu_plt_file = test.statsdir + "cpu.plt"
if os.path.isfile(cpu_plt_file):
sys_logger.debug("Generating cpu.plt for test : " + str(testid))
with open(cpu_plt_file, 'a') as fh:
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
else:
sys_logger.debug("Starting cpu.plt for test : " + str(testid))
with open(cpu_plt_file, 'w') as fh:
header = "Time," + cpu_plt_header
fh.write(header + "\n")
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
if print_task_plt:
output = output.split(",")
del output[:2]
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
task_plt_file = test.statsdir + "task.plt"
if os.path.isfile(task_plt_file):
sys_logger.debug("Generating task.plt for test : " + str(testid))
with open(task_plt_file, 'a') as fh:
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
else:
sys_logger.debug("Starting task.plt for test : " + str(testid))
with open(task_plt_file, 'w') as fh:
header = "Time," + task_plt_header
fh.write(header + "\n")
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
if print_mem_plt:
output = output.split(",")
del output[:2]
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
mem_plt_file = test.statsdir + "mem.plt"
if os.path.isfile(mem_plt_file):
sys_logger.debug("Generating mem.plt for test : " + str(testid))
with open(mem_plt_file, 'a') as fh:
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
else:
sys_logger.debug("Starting mem.plt for test : " + str(testid))
with open(mem_plt_file, 'w') as fh:
header = "Time," + mem_plt_header
fh.write(header + "\n")
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
if print_nfs_plt:
output = output.split(",")
del output[:2]
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
nfs_plt_file = test.statsdir + "nfs.plt"
if os.path.isfile(nfs_plt_file):
sys_logger.debug("Generating nfs.plt for test : " + str(testid))
with open(nfs_plt_file, 'a') as fh:
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
else:
sys_logger.debug("Starting nfs.plt for test : " + str(testid))
with open(nfs_plt_file, 'w') as fh:
header = "Time," + nfs_plt_header
fh.write(header + "\n")
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
if print_net_io_plt:
output = output.split(",")
del output[:2]
device = output[0]
del output[:1]
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
net_io_plt_file_name = device + network_io_file_ext
net_io_plt_file = test.statsdir + net_io_plt_file_name
if os.path.isfile(net_io_plt_file):
sys_logger.debug("Generating " + net_io_plt_file_name + " for test : " + str(testid))
with open(net_io_plt_file, 'a') as fh:
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
else:
sys_logger.debug("Starting " + net_io_plt_file_name + " for test : " + str(testid))
with open(net_io_plt_file, 'w') as fh:
header = "Time," + net_io_plt_header
fh.write(header + "\n")
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
def docker_stat_gather(self):
# Checking docker version
try:
p1 = subprocess.Popen(docker_version, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
version = p1.communicate()[0].strip()
version = re.findall("\d+\.\d+", version)[0]
version = float(version)
if version < 10.0:
# Docker version less than 10 is not supported
sys_logger.error("Docker version less than 10, not supported !! ")
sys_logger.error("Aborting docker stat gather thread !! ")
quit()
except Exception:
# Docker is not installed, abort this thread
sys_logger.error("Docker not installed !! ")
sys_logger.error("Aborting docker stat gather thread !! ")
quit()
# Starting docker stats
# Spawning different thread for collecting docker stat as it takes some time to collect the stats
while True:
thread = common.FuncThread(collect_docker_stats, True)
thread.start()
time.sleep(float(system_metrics_interval))
def collect_docker_stats(self):
p1 = subprocess.Popen(docker_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
(output, err) = p1.communicate()
action.action_lock.acquire()
running_queue = action.running_tests
action.action_lock.release()
if err:
sys_logger.error("Not able to collect docker stats")
sys_logger.error(str(err.strip()))
quit()
output = output.strip()
output = output.split("\n")
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
docker_stat_file = test.statsdir + "docker_stat.txt"
if os.path.isfile(docker_stat_file):
sys_logger.debug("Generating docker_stat.txt for test : " + str(testid))
with open(docker_stat_file, 'a') as fh:
for line in output:
if line.startswith("NAME"):
continue
line = line.strip()
# line = re.sub(' +', ' ', line)
# line = line.replace(' ', ',')
fh.write(line + "\n")
fh.write("\n")
else:
sys_logger.debug("Starting docker_stat.txt for test : " + str(testid))
with open(docker_stat_file, 'w') as fh:
fh.write(docker_stat_header + "\n")
for line in output:
if line.startswith("NAME"):
continue
line = line.strip()
# line = re.sub(' +', ' ', line)
# line = line.replace(' ', ',')
fh.write(line + "\n")
fh.write("\n")
def strace_gather(self, testid, strace_config):
"""
STRACE profiler collector based on configuration provided in strace_config for a given testid
"""
delay = float(strace_config['delay'])
duration = strace_config['duration']
process = strace_config['process']
sys_logger.debug("Starting STRACE for Test " + str(testid) + " in " + str(delay) + " secs")
# Start STRACE collection after delay time provided by user
time.sleep(delay)
test = action.get_test(testid)
strace_output_file = test.statsdir + "strace_output.txt"
# PID selection based on process name provided by user, if there are multiple PIDs for same process then it
# chooses the most active process in terms of cpu usage
sys_logger.debug("Setting up STRACE for process : " + process)
grep1 = ["grep", process]
p1 = subprocess.Popen(get_pid, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p2 = subprocess.Popen(grep1, stdin=p1.stdout, stdout=subprocess.PIPE)
p3 = subprocess.Popen(grep2, stdin=p2.stdout, stdout=subprocess.PIPE)
p4 = subprocess.Popen(awk, stdin=p3.stdout, stdout=subprocess.PIPE)
pid = p4.communicate()[0].strip()
if not pid:
msg = "No active PID found for given process : " + process
sys_logger.debug(msg)
if test.status == "RUNNING":
with open(strace_output_file, 'w') as fh:
fh.write(msg + "\n")
else:
sys_logger.debug("PID selected for process " + process + " : " + pid)
strace_cmd = ["timeout", duration, "strace", "-p", pid, "-c", "-S", "time", "-o", strace_output_file]
sys_logger.debug("Executing Strace for test " + str(testid))
sys_logger.debug("Strace command : " + str(strace_cmd))
p5 = subprocess.Popen(strace_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p5.wait()
sys_logger.debug("Appending PID information in output file")
perl_cmd = ['perl', '-pi', '-e',
'print "Strace Process : ' + process + ' | PID : ' + pid + ' \\n\\n" if $. == 1',
strace_output_file]
subprocess.Popen(perl_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
sys_logger.debug("Strace complete for test : " + str(testid))
def perf_gather(self, testid, perf_config):
"""
PERF profiler collector based on configuration provided in perf_config for a given testid
"""
delay = float(perf_config['delay'])
duration = perf_config['duration']
sys_logger.debug("Starting PERF for Test " + str(testid) + " in " + str(delay) + " secs")
time.sleep(delay)
test = action.get_test(testid)
perf_output_file = test.statsdir + "perf_output.txt"
# Starting system wide perf data collection
perf_system_wide_cmd = ['perf', 'stat', '-e',
'cycles,instructions,LLC-load-misses,LLC-prefetch-misses,LLC-store-misses', '-a', '-o',
perf_output_file, "sleep", duration]
if test.status == "RUNNING":
sys_logger.debug("Executing system-wide PERF")
sys_logger.debug("PERF command : " + str(perf_system_wide_cmd))
p = subprocess.Popen(perf_system_wide_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
sys_logger.debug("Finished system-wide PERF")
error = p.communicate()[1].strip()
if error:
sys_logger.debug(error)
with open(perf_output_file, 'w') as fh:
fh.write(error + "\n")
return
# Configure perf for process level data collection, if process name is provided
if "process" in perf_config:
process = perf_config['process']
sys_logger.debug("Setting up PERF for process : " + process)
grep1 = ["grep", process]
p1 = subprocess.Popen(get_pid, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p2 = subprocess.Popen(grep1, stdin=p1.stdout, stdout=subprocess.PIPE)
p3 = subprocess.Popen(grep2, stdin=p2.stdout, stdout=subprocess.PIPE)
p4 = subprocess.Popen(awk, stdin=p3.stdout, stdout=subprocess.PIPE)
pid = p4.communicate()[0].strip()
if not pid:
msg = "No active PID found for given process : " + process
sys_logger.debug(msg)
if os.path.isfile(perf_output_file):
with open(perf_output_file, 'a') as fh:
fh.write(msg + "\n")
else:
msg = "PID selected for process " + process + " : " + pid
sys_logger.debug(msg)
perf_process_cmd = ['perf', 'stat', '-e', 'cycles:u,instructions:u', '-a', '-p', pid, '-o',
perf_output_file, '--append', 'sleep', duration]
sys_logger.debug("Executing PERF for process " + process)
sys_logger.debug("PERF command : " + str(perf_process_cmd))
p5 = subprocess.Popen(perf_process_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p5.wait()
error = p5.communicate()[1].strip()
if error:
sys_logger.debug(error)
sys_logger.debug("Finished PERF on process")
sys_logger.debug("PERF complete for test : " + str(testid))
def init_sar_iostat_top():
"""
Agent process invoke this method on startup. This will spawn 4 threads for system metrics collection. Below are
thread details:
1. top_gather - For TOP output collection
2. iostat_gather - For iostat output collection
3. sar_gather - For SAR data collection
4. docker_stat_gather - For docker stat of all active containers
"""
global sys_logger
logger_file = os.getcwd() + "/system_metrics_gather_debug.out"
sys_logger = loggersetup(logger_file)
sys_logger.debug("Starting system metrics gather threads")
sys_logger.debug("Starting top gather")
t1 = common.FuncThread(top_gather, True)
t1.start()
sys_logger.debug("Starting iostat gather")
t2 = common.FuncThread(iostat_gather, True)
t2.start()
sys_logger.debug("Starting SAR gather")
t3 = common.FuncThread(sar_gather, True)
t3.start()
sys_logger.debug("Starting docker stat gather")
t4 = common.FuncThread(docker_stat_gather, True)
t4.start()
def perf_strace_gather(testid, perf_config=None, strace_config=None):
"""
Agent invoke this procedure on test startup for configuring profiler information provided in test details
"""
sys_logger.debug("Starting Profilers setup for test ID : " + str(testid))
sys_logger.debug("Perf configuration details")
if "process" in perf_config:
sys_logger.debug(
"Delay - " + perf_config['delay'] + " Duration - " + perf_config['duration'] + " Process - " + perf_config[
'process'])
else:
sys_logger.debug("Delay - " + perf_config['delay'] + " Duration - " + perf_config['duration'])
t1 = common.FuncThread(perf_gather, True, testid, perf_config)
t1.start()
if strace_config is not None:
sys_logger.debug("Strace configuration details")
sys_logger.debug(
"Delay - " + strace_config['delay'] + " Duration - " + strace_config['duration'] + " Process - " +
strace_config['process'])
t2 = common.FuncThread(strace_gather, True, testid, strace_config)
t2.start()
else:
sys_logger.debug("Strace not configured ")
| apache-2.0 | 4,758,093,547,575,734,000 | 44.575342 | 120 | 0.526734 | false | 4.154711 | true | false | false |
exord/activist | tools.py | 1 | 2677 | import numpy as np
import pylab as plt
def integrate(x, y, weight, xmin, xmax, vary=None):
"""
Return integrated flux of array y (already integrated in
each pixel) with respecto to x, with variance vary and
weight function weigth between limits xmin and xmax.
"""
# Keep only elements of x, y, and weight within the interval,
# include fractional pixels
deltax = np.diff(x)
# Reduce x of first element for compatibility with deltax
x = x[1:]
cond = (x > xmin - deltax/2.0) * (x <= xmax + deltax/2.0)
# Compute fraction of pixel within interval
fraction_left = 0.5 - (xmin - x[cond])/deltax[cond]
fraction_right = 0.5 - (x[cond] - xmax)/deltax[cond]
fraction_left = np.where(fraction_left > 1, 1.0, fraction_left)
fraction_right = np.where(fraction_right > 1, 1.0, fraction_right)
fraction = np.minimum(fraction_left, fraction_right)
# Sum contributions of pixels inside interval, considering fractions
# when necessary
summed_y = np.sum(y[1:][cond] * weight[1:][cond] * fraction)
summed_weight = np.sum(weight[1:][cond] * fraction)
integral = np.divide(summed_y, summed_weight)
if vary is not None:
# Also compute error
summed_var = np.sum(vary[1:][cond] * (weight[1:][cond] * fraction)**2)
var_integral = np.divide(summed_var, summed_weight**2)
else:
var_integral = None
return integral, var_integral
def plot_windows(ww, y, windows):
ax = plt.subplots(1, 1)[1]
for win in windows:
indices = np.where(np.logical_and(ww > windows[win]['wwmin'],
ww < windows[win]['wwmax']))
for order in np.unique(indices[0]):
ax.plot(ww[order], y[order], 'k')
for ind in enumerate(indices):
ax.plot(ww[ind], y[ind], 'r', lw=2)
return
def load_rdbfile(rdb_file, sepchar='\t'):
"""
Loads data from an rdb file.
"""
# read file
f = open(rdb_file, 'r')
lines = f.readlines()
f.close()
header = lines[0].split()
#
fmt_all = dict((header[i], i) for i in range(len(header)))
data = {}
for line in lines[2:]:
if line.startswith('#'):
continue
for fmt in fmt_all.keys():
elem = line.split(sepchar)[fmt_all[fmt]]
try:
elem = float(elem)
except ValueError:
pass
if fmt in data:
data[fmt].append(elem)
else:
data[fmt] = [elem, ]
for dd in data:
data[dd] = np.array(data[dd])
return data
__author__ = 'Rodrigo F. Diaz'
| mit | 6,237,362,165,617,631,000 | 25.77 | 78 | 0.570041 | false | 3.472114 | false | false | false |
PoornimaNayak/autotest-client-tests | linux-tools/binutils/binutils.py | 1 | 1398 | #!/bin/python
import os, subprocess
import logging
from autotest.client import test
from autotest.client.shared import error
class binutils(test.test):
"""
Autotest module for testing basic functionality
of binutils
@author Xu Zheng [email protected]
"""
version = 1
nfail = 0
path = ''
def initialize(self, test_path=''):
"""
Sets the overall failure counter for the test.
"""
self.nfail = 0
ret_val = subprocess.Popen(['make', 'all'], cwd="%s/binutils" %(test_path))
ret_val.communicate()
if ret_val.returncode != 0:
self.nfail += 1
logging.info('\n Test initialize successfully')
def run_once(self, test_path=''):
"""
Trigger test run
"""
try:
os.environ["LTPBIN"] = "%s/shared" %(test_path)
ret_val = subprocess.Popen(['./gprof.sh'], cwd="%s/binutils" %(test_path))
ret_val.communicate()
if ret_val.returncode != 0:
self.nfail += 1
except error.CmdError, e:
self.nfail += 1
logging.error("Test Failed: %s", e)
def postprocess(self):
if self.nfail != 0:
logging.info('\n nfails is non-zero')
raise error.TestError('\nTest failed')
else:
logging.info('\n Test completed successfully ')
| gpl-2.0 | 1,499,941,769,291,629,800 | 25.884615 | 86 | 0.552217 | false | 3.915966 | true | false | false |
abhishekkr/nix-bootstrapper | commands/__init__.py | 1 | 2456 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import logging
from common import agent_logger
cmd_list = {}
def command_add(cmd_name, data_keys=[]):
def wrap(_func):
agent_logger.log(logging.error, str(data_keys))
if cmd_name in cmd_list.keys():
agent_logger.log(logging.error,
"%s already exists in feature list. Duplicacy." %
cmd_name)
else:
_data_keys = data_keys
if isinstance(_data_keys, str):
_data_keys = _data_keys.split()
with open("/tmp/pylog", "a") as fyl:
fyl.write(repr(_data_keys))
cmd_list[cmd_name] = {"func": _func,
"data_keys": _data_keys}
agent_logger.log(logging.info,
"%s added to feature list with keys: %s." %
(cmd_name, _data_keys))
return _func
return wrap
def _dict_value_or_none(dictionary, key):
if isinstance(dictionary, dict):
if dictionary.has_key(key):
return dictionary[key]
return None
def run(cmd, get_value_for):
try:
data_keys = cmd_list[cmd]["data_keys"]
data_values = {}
for _data_key in data_keys:
data_values[_data_key] = get_value_for(_data_key)
agent_logger.log(logging.info, ">>>> %s" % repr(data_keys))
agent_logger.log(logging.info, ">>>> %s" % repr(data_values))
config_result = cmd_list[cmd]["func"](data_values)
agent_logger.log(logging.info, "Running '%s'" % cmd,
data=data_values, result=config_result)
return config_result
except Exception, e:
agent_logger.log(logging.error, e.message)
return None
| apache-2.0 | 7,277,540,905,514,252,000 | 31.315789 | 79 | 0.578176 | false | 3.807752 | false | false | false |
Holzhaus/msm | msmgui/assistants/invoicing.py | 1 | 12358 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This file is part of MSM.
MSM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
MSM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with MSM. If not, see <http://www.gnu.org/licenses/>.
"""
import logging
logger = logging.getLogger( __name__ )
import datetime
import threading
import dateutil.parser
from gi.repository import Gtk, GObject, GLib
from core import paths
import core.database
from core.errors import InvoiceError
import msmgui.widgets.invoicetable
from msmgui.widgets.base import ScopedDatabaseObject
class InvoicingAssistant( GObject.GObject, ScopedDatabaseObject ):
__gsignals__ = { 'saved': ( GObject.SIGNAL_RUN_FIRST, None, ( int, ) ) }
def __init__( self ):
ScopedDatabaseObject.__init__( self )
GObject.GObject.__init__( self )
# Build GUI
self.builder = Gtk.Builder()
self.builder.add_from_file( paths.data("ui","assistants","invoicing.glade" ))
self._assistant = self.builder.get_object( "content" )
self._assistant.set_modal( True )
self._invoicetable = msmgui.widgets.invoicetable.InvoiceTable()
self._invoicetable.active_only = False
self.builder.get_object( "invoicetablebox" ).add( self._invoicetable )
self._assistant.set_forward_page_func( self.page_forward_func )
# Connect Signals
self.builder.connect_signals( self )
def set_parent( self, parent ):
self._assistant.set_transient_for( parent )
def show( self ):
invoice_date = datetime.date.today()
maturity_date = invoice_date + datetime.timedelta( days=14 )
self.builder.get_object( "invoice_date_entry" ).set_text( invoice_date.strftime("%x") )
self.builder.get_object( "invoice_maturitydate_entry" ).set_text( maturity_date.strftime("%x") )
self.builder.get_object( "invoice_accountingenddate_entry" ).set_text( invoice_date.strftime("%x") )
self.builder.get_object( "content" ).show_all()
class Page:
""" Page Enum """
Intro, Select, Details, Generate, Confirm, Save, Summary = range( 7 )
def page_forward_func( self, page ):
"""
Function called when the forward button is pressed,
Arguments:
page:
integer index of the current page
returns:
integer index of the next page to display
"""
if page == InvoicingAssistant.Page.Intro and self.builder.get_object( "contracts_all_radiobutton" ).get_active():
return InvoicingAssistant.Page.Details
elif page == InvoicingAssistant.Page.Generate and len( self._invoicetable.get_contents() ) == 0:
return InvoicingAssistant.Page.Summary
else:
return page + 1
"""
Page prepare funcs
"""
def page_generate_prepare_func( self, assistant, page ):
class ThreadObject( GObject.GObject, threading.Thread ):
__gsignals__ = {
'start': ( GObject.SIGNAL_RUN_FIRST, None, () ),
'stop': ( GObject.SIGNAL_RUN_FIRST, None, ( int, int ) )
}
def __init__( self, contracts, invoice_options, gui_objects ):
GObject.GObject.__init__( self )
threading.Thread.__init__( self )
self.contracts = contracts
self.invoice_options = invoice_options
self.gui_objects = gui_objects
self.invoices = []
def run( self ):
GLib.idle_add( self._gui_start )
local_session = core.database.Database.get_scoped_session()
i = 1
num_contracts = len( self.contracts )
for unmerged_contract in self.contracts:
contract = local_session.merge( unmerged_contract ) # add them to the local session
try:
invoice = contract.add_invoice( **self.invoice_options )
except InvoiceError as err:
logger.critical( "Error adding invoice: %r", err )
invoice = None
if invoice is not None:
self.invoices.append( invoice )
i += 1
GLib.idle_add( self._gui_update, i, num_contracts )
local_session.expunge_all() # expunge everything afterwards
local_session.remove()
GLib.idle_add( self._gui_stop, len( self.invoices ), num_contracts )
def _gui_start( self ):
invoicingassistant, spinner, label, assistant, page, invoicetable = self.gui_objects
label.set_text( "Generiere Rechnungen..." )
spinner.start()
def _gui_update( self, contract_current, contract_total ):
invoicingassistant, spinner, label, assistant, page, invoicetable = self.gui_objects
label.set_text( "Generiere Rechnungen... (Vertrag {}/{})".format( contract_current, contract_total ) )
def _gui_stop( self, num_invoices, num_contracts ):
invoicingassistant, spinner, label, assistant, page, invoicetable = self.gui_objects
merged_contracts = []
for unmerged_contract in self.contracts:
contract = invoicingassistant.session.merge( unmerged_contract ) # Readd the object to the main thread session
merged_contracts.append( contract )
self.contracts = merged_contracts
invoicetable.clear()
def gen( invoicetable, invoices, session, step=10 ):
treeview = invoicetable.builder.get_object( "invoices_treeview" )
model = invoicetable.builder.get_object( "invoices_liststore" )
treeview.freeze_child_notify()
sort_settings = model.get_sort_column_id()
model.set_default_sort_func( lambda *unused: 0 )
model.set_sort_column_id( -1, Gtk.SortType.ASCENDING )
i = 0
for unmerged_invoice in invoices:
invoice = session.merge( unmerged_invoice )
invoicetable.add_invoice( invoice )
i += 1
# change something
if i % step == 0:
# freeze/thaw not really necessary here as sorting is wrong because of the
# default sort function
yield True
if sort_settings != ( None, None ):
model.set_sort_column_id( *sort_settings )
treeview.thaw_child_notify()
yield False
g = gen( invoicetable, self.invoices, invoicingassistant.session )
if next( g ): # run once now, remaining iterations when idle
GLib.idle_add( next, g )
label.set_text( "Fertig! {} Rechnungen aus {} Verträgen generiert.".format( num_invoices, num_contracts ) )
spinner.stop()
assistant.set_page_complete( page, True )
def parse_date( text ):
new_date = None
if text:
try:
new_date = dateutil.parser.parse( text, dayfirst=True )
except Exception as error:
logger.warning( 'Invalid date entered: %s (%r)', text, error )
else:
return new_date.date()
assistant.set_page_complete( page, False )
spinner = self.builder.get_object( "generate_spinner" )
label = self.builder.get_object( "generate_label" )
gui_objects = ( self, spinner, label, assistant, page, self._invoicetable )
self._session.close()
contracts = core.database.Contract.get_all( session=self.session ) # We expunge everything, use it inside the thread and readd it later
self._session.expunge_all()
invoice_date = parse_date( self.builder.get_object( "invoice_date_entry" ).get_text().strip() )
if not invoice_date:
invoice_date = datetime.date.today()
maturity_date = parse_date( self.builder.get_object( "invoice_maturitydate_entry" ).get_text().strip() )
if not maturity_date:
maturity_date = invoice_date + datetime.timedelta( days=14 )
accounting_enddate = parse_date( self.builder.get_object( "invoice_accountingenddate_entry" ).get_text().strip() )
if not accounting_enddate:
accounting_enddate = invoice_date
self.invoice_generator_threadobj = ThreadObject( contracts, {"date":invoice_date, "maturity_date":maturity_date, "accounting_enddate": accounting_enddate}, gui_objects )
self.invoice_generator_threadobj.start()
def page_save_prepare_func( self, assistant, page ):
class ThreadObject( GObject.GObject, threading.Thread ):
__gsignals__ = {
'start': ( GObject.SIGNAL_RUN_FIRST, None, () ),
'stop': ( GObject.SIGNAL_RUN_FIRST, None, ( int, int ) )
}
def __init__( self, invoices, gui_objects ):
GObject.GObject.__init__( self )
threading.Thread.__init__( self )
self.gui_objects = gui_objects
self.invoices = invoices
def run( self ):
GLib.idle_add( lambda: self._gui_start() )
local_session = core.database.Database.get_scoped_session()
for invoice in self.invoices:
local_session.add( invoice ) # add them to the local session
local_session.commit()
local_session.remove() # expunge everything afterwards
GLib.idle_add( lambda: self._gui_stop( len( self.invoices ) ) )
def _gui_start( self ):
spinner, label, assistant, page, window = self.gui_objects
label.set_text( "Speichere Rechnungen..." )
spinner.start()
def _gui_stop( self, num_invoices ):
spinner, label, assistant, page, window = self.gui_objects
assistant.commit()
label.set_text( "Fertig! {} Rechnungen gespeichert.".format( num_invoices ) )
spinner.stop()
assistant.set_page_complete( page, True )
window.emit( "saved", num_invoices )
assistant.set_page_complete( page, False )
spinner = self.builder.get_object( "save_spinner" )
label = self.builder.get_object( "save_label" )
gui_objects = ( spinner, label, assistant, page, self )
invoices = self.invoice_generator_threadobj.invoices
self._session.expunge_all()
self._session.close()
threadobj = ThreadObject( invoices, gui_objects )
threadobj.start()
"""
Callbacks
"""
def hide_cb( self, assistant ):
self._session.rollback()
self._session.close()
def close_cb( self, assistant ):
assistant.hide()
def cancel_cb( self, assistant ):
assistant.hide()
def apply_cb( self, assistant ):
pass
def prepare_cb( self, assistant, page ):
if page == assistant.get_nth_page( InvoicingAssistant.Page.Intro ):
assistant.set_page_complete( page, True )
elif page == assistant.get_nth_page( InvoicingAssistant.Page.Details ):
assistant.set_page_complete( page, True )
elif page == assistant.get_nth_page( InvoicingAssistant.Page.Generate ):
self.page_generate_prepare_func( assistant, page )
elif page == assistant.get_nth_page( InvoicingAssistant.Page.Save ):
self.page_save_prepare_func( assistant, page )
| gpl-3.0 | 7,313,680,218,967,222,000 | 50.920168 | 177 | 0.582423 | false | 4.200204 | false | false | false |
mogria/rtsh | srv/tickSystem.py | 1 | 1024 | import glob
import time
from printWithFlush import p
from model.storage import Storage
from model.builder import Builder
from commandQueueProcessor import CommandQueueProcessor
TICK_INTERVAL_SEC = 1
class TickSystem(object):
def __init__(self, players):
self._players = players
def process_user_commands(self):
for player_name in self._players:
cqp = CommandQueueProcessor(player_name)
cqp.processCommands()
def get_unit_files(self):
return glob.glob("/world/**/unit-*.json", recursive=True)
def units_tick(self):
unit_files = self.get_unit_files()
for f in unit_files:
with Storage.from_file(f) as u:
u.move()
if isinstance(u, Builder):
u.build()
def start(self):
tick = 0
while True:
time.sleep(TICK_INTERVAL_SEC)
p("tick ", tick)
tick += 1
self.process_user_commands()
self.units_tick()
| gpl-2.0 | -7,344,670,052,374,880,000 | 23.97561 | 65 | 0.586914 | false | 4.079681 | false | false | false |
thunderhoser/GewitterGefahr | gewittergefahr/gg_io/netcdf_io.py | 1 | 2048 | """IO methods for NetCDF files."""
import os
import gzip
import shutil
import tempfile
from netCDF4 import Dataset
from gewittergefahr.gg_utils import error_checking
GZIP_FILE_EXTENSION = '.gz'
def open_netcdf(netcdf_file_name, raise_error_if_fails=False):
"""Attempts to open NetCDF file.
Code for handling gzip files comes from jochen at the following
StackOverflow page: https://stackoverflow.com/posts/45356133/revisions
:param netcdf_file_name: Path to input file.
:param raise_error_if_fails: Boolean flag. If raise_error_if_fails = True
and file cannot be opened, this method will throw an error.
:return: netcdf_dataset: Instance of `NetCDF4.Dataset`, containing all data
from the file. If raise_error_if_fails = False and file could not be
opened, this will be None.
:raises: IOError: if file could not be opened and raise_error_if_fails =
True.
"""
error_checking.assert_file_exists(netcdf_file_name)
error_checking.assert_is_boolean(raise_error_if_fails)
gzip_as_input = netcdf_file_name.endswith(GZIP_FILE_EXTENSION)
if gzip_as_input:
gzip_file_object = gzip.open(netcdf_file_name, 'rb')
netcdf_temporary_file_object = tempfile.NamedTemporaryFile(delete=False)
netcdf_file_name = netcdf_temporary_file_object.name
success = False
try:
shutil.copyfileobj(gzip_file_object, netcdf_temporary_file_object)
success = True
except:
if raise_error_if_fails:
raise
gzip_file_object.close()
netcdf_temporary_file_object.close()
if not success:
os.remove(netcdf_file_name)
return None
try:
netcdf_dataset = Dataset(netcdf_file_name)
except IOError:
if raise_error_if_fails:
if gzip_as_input:
os.remove(netcdf_file_name)
raise
netcdf_dataset = None
if gzip_as_input:
os.remove(netcdf_file_name)
return netcdf_dataset
| mit | -2,323,290,946,649,447,400 | 31 | 80 | 0.657715 | false | 3.703436 | false | false | false |
ken-muturi/pombola | pombola/hansard/migrations/0010_auto__add_field_source_list_page.py | 3 | 8132 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Source.list_page'
db.add_column(u'hansard_source', 'list_page',
self.gf('django.db.models.fields.CharField')(max_length=50, null=True, blank=True),
keep_default=False)
# Set the list_page property for those sources where we can
# make a resonable guess at which list page it came from:
for source in orm.Source.objects.all():
if 'plone/national-assembly' in source.url:
source.list_page = 'national-assembly'
source.save()
elif 'plone/senate' in source.url:
source.list_page = 'senate'
source.save()
# For any sources where this didn't help, and there is an
# associated sitting and venue, use that venue to set
# list_page:
venue_slug_to_list_page = {
'national_assembly': 'national-assembly',
'senate': 'senate',
}
for venue in orm.Venue.objects.all():
for source in orm.Source.objects.filter(sitting__venue=venue, list_page__isnull=True):
source.list_page = venue_slug_to_list_page[venue.slug]
source.save()
def backwards(self, orm):
# Deleting field 'Source.list_page'
db.delete_column(u'hansard_source', 'list_page')
models = {
u'core.person': {
'Meta': {'ordering': "['sort_name']", 'object_name': 'Person'},
'_biography_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'_summary_rendered': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'additional_name': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'biography': ('markitup.fields.MarkupField', [], {'default': "''", 'no_rendered_field': 'True', 'blank': 'True'}),
'can_be_featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_of_birth': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'blank': 'True'}),
'date_of_death': ('django_date_extensions.fields.ApproximateDateField', [], {'max_length': '10', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'family_name': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'given_name': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'honorific_prefix': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'honorific_suffix': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'legal_name': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'national_identity': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '200'}),
'sort_name': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'summary': ('markitup.fields.MarkupField', [], {'default': "''", 'no_rendered_field': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'hansard.alias': {
'Meta': {'ordering': "['alias']", 'object_name': 'Alias'},
'alias': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Person']", 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'})
},
'hansard.entry': {
'Meta': {'ordering': "['sitting', 'text_counter']", 'object_name': 'Entry'},
'content': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page_number': ('django.db.models.fields.IntegerField', [], {'blank': 'True'}),
'sitting': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['hansard.Sitting']"}),
'speaker': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'hansard_entries'", 'null': 'True', 'to': u"orm['core.Person']"}),
'speaker_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'speaker_title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'text_counter': ('django.db.models.fields.IntegerField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'hansard.sitting': {
'Meta': {'ordering': "['-start_date']", 'object_name': 'Sitting'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'end_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['hansard.Source']"}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'start_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'venue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['hansard.Venue']"})
},
'hansard.source': {
'Meta': {'ordering': "['-date', 'name']", 'object_name': 'Source'},
'date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_processing_attempt': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_processing_success': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'list_page': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '1000'})
},
'hansard.venue': {
'Meta': {'ordering': "['slug']", 'object_name': 'Venue'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
}
}
complete_apps = ['hansard']
| agpl-3.0 | -8,070,837,183,725,730,000 | 68.504274 | 175 | 0.548942 | false | 3.59823 | false | false | false |
cloudify-cosmo/cloudify-nsx-plugin | tests/platformtests/test_security.py | 1 | 15821 | ########
# Copyright (c) 2017 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# Stdlib imports
import os
# Third party imports
import unittest
import mock
import pytest
# Cloudify imports
from cloudify.workflows import local
from cloudify import mocks as cfy_mocks
from cloudify.state import current_ctx
from cloudify_cli import constants as cli_constants
import cloudify_nsx.library.nsx_common as common
import cloudify_nsx.library.nsx_security_tag as nsx_security_tag
import cloudify_nsx.library.nsx_security_group as nsx_security_group
import cloudify_nsx.library.nsx_security_policy as nsx_security_policy
class SecurityTest(unittest.TestCase):
def setUp(self):
super(SecurityTest, self).setUp()
self.local_env = None
self.ext_inputs = {
# prefix for run
'node_name_prefix': os.environ.get('NODE_NAME_PREFIX', ""),
# nsx inputs
'nsx_ip': os.environ.get('NSX_IP'),
'nsx_user': os.environ.get('NSX_USER'),
'nsx_password': os.environ.get('NSX_PASSWORD'),
}
if (
not self.ext_inputs['nsx_ip'] or
not self.ext_inputs['nsx_ip'] or
not self.ext_inputs['nsx_password']
):
self.skipTest("You dont have credentials for nsx")
blueprints_path = os.path.split(os.path.abspath(__file__))[0]
self.blueprints_path = os.path.join(
blueprints_path,
'resources'
)
self._regen_ctx()
# credentials
self.client_session = common.nsx_login({
'nsx_auth': {
'username': self.ext_inputs['nsx_user'],
'password': self.ext_inputs['nsx_password'],
'host': self.ext_inputs['nsx_ip']
}
})
def _regen_ctx(self):
self.fake_ctx = cfy_mocks.MockCloudifyContext()
instance = mock.Mock()
instance.runtime_properties = {}
self.fake_ctx._instance = instance
node = mock.Mock()
self.fake_ctx._node = node
node.properties = {}
node.runtime_properties = {}
current_ctx.set(self.fake_ctx)
def tearDown(self):
current_ctx.clear()
if self.local_env:
try:
self.local_env.execute(
'uninstall',
task_retries=50,
task_retry_interval=3,
)
except Exception as ex:
print str(ex)
super(SecurityTest, self).tearDown()
@pytest.mark.external
def test_security_tag(self):
"""Platform check: security tag"""
# set blueprint name
blueprint = os.path.join(
self.blueprints_path,
'security_tag.yaml'
)
# check prexist of security tag
resource_id, _ = nsx_security_tag.get_tag(
self.client_session,
self.ext_inputs['node_name_prefix'] + "secret_tag"
)
self.assertIsNone(resource_id)
# cfy local init
self.local_env = local.init_env(
blueprint,
inputs=self.ext_inputs,
name=self._testMethodName,
ignored_modules=cli_constants.IGNORED_LOCAL_WORKFLOW_MODULES)
# cfy local execute -w install
self.local_env.execute(
'install',
task_retries=50,
task_retry_interval=3,
)
# check security tag properties
resource_id, info = nsx_security_tag.get_tag(
self.client_session,
self.ext_inputs['node_name_prefix'] + "secret_tag"
)
self.assertIsNotNone(resource_id)
self.assertIsNotNone(info)
self.assertEqual(
info['name'], self.ext_inputs['node_name_prefix'] + "secret_tag"
)
self.assertEqual(info['description'], "What can i say?")
# cfy local execute -w uninstall
self.local_env.execute(
'uninstall',
task_retries=50,
task_retry_interval=3,
)
# must be deleted
resource_id, _ = nsx_security_tag.get_tag(
self.client_session,
self.ext_inputs['node_name_prefix'] + "secret_tag"
)
self.assertIsNone(resource_id)
self.local_env = None
@pytest.mark.external
def test_security_tag_vm_bind(self):
"""Platform check: bind security tag to vm"""
inputs = {k: self.ext_inputs[k] for k in self.ext_inputs}
# Define inputs related to this function
inputs.update({
'name_of_tag': str(os.environ.get('NAME_OF_TAG', 'tag_name')),
# vcenter inputs
'vcenter_ip': os.environ.get('VCENTER_IP'),
'vcenter_user': os.environ.get('VCENTER_USER'),
'vcenter_password': os.environ.get('VCENTER_PASSWORD'),
})
# update custom params
if os.environ.get('VCENTER_PORT'):
inputs['vcenter_port'] = str(os.environ.get(
'VCENTER_PORT'
))
# update custom params
if os.environ.get('VCENTER_DATACENTER'):
inputs['vcenter_datacenter'] = os.environ.get(
'VCENTER_DATACENTER'
)
if os.environ.get('VCENTER_RESOURCE_POOL'):
inputs['vcenter_resource_pool'] = os.environ.get(
'VCENTER_RESOURCE_POOL'
)
if os.environ.get('VCENTER_TEMPLATE'):
inputs['template_name'] = os.environ.get('VCENTER_TEMPLATE')
if (
not inputs['vcenter_ip'] or
not inputs['vcenter_ip'] or
not inputs['vcenter_password']
):
self.skipTest("You dont have credentials for vcenter")
# set blueprint name
blueprint = os.path.join(
self.blueprints_path,
'security_tag_vm.yaml'
)
# check prexist of security tag
resource_id, _ = nsx_security_tag.get_tag(
self.client_session,
inputs['node_name_prefix'] + inputs['name_of_tag']
)
self.assertIsNone(resource_id)
# cfy local init
self.local_env = local.init_env(
blueprint,
inputs=inputs,
name=self._testMethodName,
ignored_modules=cli_constants.IGNORED_LOCAL_WORKFLOW_MODULES)
# cfy local execute -w install
self.local_env.execute(
'install',
task_retries=4,
task_retry_interval=3,
)
# check security tag properties
resource_id, info = nsx_security_tag.get_tag(
self.client_session,
inputs['node_name_prefix'] + inputs['name_of_tag']
)
self.assertIsNotNone(resource_id)
self.assertIsNotNone(info)
self.assertEqual(
info['name'],
inputs['node_name_prefix'] + inputs['name_of_tag']
)
self.assertEqual(
info['description'],
"Example security tag which will be assigned to example VM"
)
# cfy local execute -w uninstall
self.local_env.execute(
'uninstall',
task_retries=50,
task_retry_interval=3,
)
# must be deleted
resource_id, _ = nsx_security_tag.get_tag(
self.client_session,
inputs['node_name_prefix'] + inputs['name_of_tag']
)
self.assertIsNone(resource_id)
@pytest.mark.external
def test_security_group(self):
"""Platform check: security group"""
inputs = {k: self.ext_inputs[k] for k in self.ext_inputs}
# Define inputs related to this function
inputs['security_group_name'] = os.environ.get(
'SECURITY_GROUP_NAME', "security_group_name"
)
inputs['nested_security_group_name'] = os.environ.get(
'NESTED_SECURITY_GROUP_NAME', "nested_security_group_name"
)
# set blueprint name
blueprint = os.path.join(
self.blueprints_path,
'security_groups.yaml'
)
# check prexist of security groups
resource_id, _ = nsx_security_group.get_group(
self.client_session,
'globalroot-0',
inputs['node_name_prefix'] + inputs['security_group_name']
)
self.assertIsNone(resource_id)
resource_id, _ = nsx_security_group.get_group(
self.client_session,
'globalroot-0',
inputs['node_name_prefix'] + inputs['nested_security_group_name']
)
self.assertIsNone(resource_id)
# cfy local init
self.local_env = local.init_env(
blueprint,
inputs=inputs,
name=self._testMethodName,
ignored_modules=cli_constants.IGNORED_LOCAL_WORKFLOW_MODULES)
# cfy local execute -w install
self.local_env.execute(
'install',
task_retries=4,
task_retry_interval=3,
)
# check security groups properties
resource_id, main_properties = nsx_security_group.get_group(
self.client_session,
'globalroot-0',
inputs['node_name_prefix'] + inputs['security_group_name']
)
self.assertIsNotNone(resource_id)
nested_resource_id, nested_properties = nsx_security_group.get_group(
self.client_session,
'globalroot-0',
inputs['node_name_prefix'] + inputs['nested_security_group_name']
)
self.assertIsNotNone(nested_resource_id)
self.assertEqual(
main_properties['member']['name'],
inputs['node_name_prefix'] + inputs['nested_security_group_name']
)
self.assertEqual(
main_properties['member']['objectId'],
nested_resource_id
)
self.assertFalse(nested_properties.get('member'))
# cfy local execute -w uninstall
self.local_env.execute(
'uninstall',
task_retries=50,
task_retry_interval=3,
)
# must be deleted
resource_id, _ = nsx_security_group.get_group(
self.client_session,
'globalroot-0',
inputs['node_name_prefix'] + inputs['security_group_name']
)
self.assertIsNone(resource_id)
resource_id, _ = nsx_security_group.get_group(
self.client_session,
'globalroot-0',
inputs['node_name_prefix'] + inputs['nested_security_group_name']
)
self.assertIsNone(resource_id)
@pytest.mark.external
def test_security_policy(self):
"""Platform check: security policy"""
inputs = {k: self.ext_inputs[k] for k in self.ext_inputs}
# Define inputs related to this function
inputs['policy_name'] = os.environ.get(
'POLICY_NAME', 'policy_name'
)
# set blueprint name
blueprint = os.path.join(
self.blueprints_path,
'security_policy.yaml'
)
# check prexist of security policy
resource_id, policy = nsx_security_policy.get_policy(
self.client_session,
inputs['node_name_prefix'] + inputs['policy_name']
)
self.assertIsNone(resource_id)
# cfy local init
self.local_env = local.init_env(
blueprint,
inputs=inputs,
name=self._testMethodName,
ignored_modules=cli_constants.IGNORED_LOCAL_WORKFLOW_MODULES)
# cfy local execute -w install
self.local_env.execute(
'install',
task_retries=4,
task_retry_interval=3,
)
# check security policy properties
resource_id, policy = nsx_security_policy.get_policy(
self.client_session,
inputs['node_name_prefix'] + inputs['policy_name']
)
self.assertIsNotNone(resource_id)
self.assertIsNotNone(policy)
# cfy local execute -w uninstall
self.local_env.execute(
'uninstall',
task_retries=50,
task_retry_interval=3,
)
# must be deleted
resource_id, policy = nsx_security_policy.get_policy(
self.client_session,
inputs['node_name_prefix'] + inputs['policy_name']
)
self.assertIsNone(resource_id)
@pytest.mark.external
def test_security_policy_bind(self):
"""Platform check: bind security policy to security group"""
inputs = {k: self.ext_inputs[k] for k in self.ext_inputs}
# Define inputs related to this function
inputs['security_group_name'] = os.environ.get(
'SECURITY_GROUP_NAME', "security_group_name"
)
inputs['policy_name'] = os.environ.get(
'POLICY_NAME', 'policy_name'
)
# set blueprint name
blueprint = os.path.join(
self.blueprints_path,
'bind_policy_group.yaml'
)
# check prexist of security policy
resource_id, policy = nsx_security_policy.get_policy(
self.client_session,
inputs['node_name_prefix'] + inputs['policy_name']
)
self.assertIsNone(resource_id)
# check prexist of security group
resource_id, _ = nsx_security_group.get_group(
self.client_session,
'globalroot-0',
inputs['node_name_prefix'] + inputs['security_group_name']
)
self.assertIsNone(resource_id)
# cfy local init
self.local_env = local.init_env(
blueprint,
inputs=inputs,
name=self._testMethodName,
ignored_modules=cli_constants.IGNORED_LOCAL_WORKFLOW_MODULES)
# cfy local execute -w install
self.local_env.execute(
'install',
task_retries=4,
task_retry_interval=3,
)
# check security policy properties
resource_id, policy = nsx_security_policy.get_policy(
self.client_session,
inputs['node_name_prefix'] + inputs['policy_name']
)
self.assertIsNotNone(resource_id)
self.assertIsNotNone(policy)
# check security group
resource_id, _ = nsx_security_group.get_group(
self.client_session,
'globalroot-0',
inputs['node_name_prefix'] + inputs['security_group_name']
)
self.assertIsNotNone(resource_id)
# cfy local execute -w uninstall
self.local_env.execute(
'uninstall',
task_retries=50,
task_retry_interval=3,
)
# must be deleted
resource_id, policy = nsx_security_policy.get_policy(
self.client_session,
inputs['node_name_prefix'] + inputs['policy_name']
)
self.assertIsNone(resource_id)
resource_id, _ = nsx_security_group.get_group(
self.client_session,
'globalroot-0',
inputs['node_name_prefix'] + inputs['security_group_name']
)
self.assertIsNone(resource_id)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 8,010,831,182,299,763,000 | 29.660853 | 79 | 0.563997 | false | 4.042156 | true | false | false |
TravisCG/SI_scripts | exonreadcount.py | 1 | 1804 | #!/usr/bin/python
"""
This script is an HTSeq alternatives.
It is count reads in regions, but if there
are overlapping regions the read is counted in both
regions. Read also counted if partially overlap
"""
import sys
import gzip
class Exon:
def __init__(self, fields):
self.chrx = fields[0]
self.start = int(fields[3])
self.stop = int(fields[4])
self.exonnum = int(fields[17].replace('"', "")[:-1])
self.genesym = fields[19].replace('"', "")[:-1]
self.tr = fields[13].replace('"', "")[:-1]
self.count = 0
def incAllExons(pos, length, array, lowindex, hiindex):
if hiindex - lowindex < 2:
# found
for i in range(lowindex, len(array)):
if array[i].start < pos + length and array[i].stop > pos:
array[i].count += 1
if array[i].start > pos + length:
break
else:
midindex = lowindex + (hiindex - lowindex) / 2
if array[lowindex].start < pos + length and array[midindex].stop > pos:
incAllExons(pos, length, array, lowindex, midindex)
else:
incAllExons(pos, length, array, midindex, hiindex)
gtf = dict()
gtffile = gzip.open(sys.argv[1])
for line in gtffile:
if line.startswith("#"):
continue
fields = line.split()
if fields[2] != "exon":
continue
exon = Exon(fields)
if fields[0] not in gtf:
gtf[fields[0]] = list()
gtf[fields[0]].append(exon)
gtffile.close()
for i in gtf:
gtf[i] = sorted(gtf[i], key = lambda x:x.start)
readcount = 0
for i in sys.stdin:
fields = i.rstrip().split("\t")
if fields[4] == "255":
pos = int(fields[3])
chrx = fields[2]
length = len(fields[9])
readcount += 1
incAllExons(pos, length, gtf[chrx], 0, len(gtf[chrx]) - 1)
for i in gtf:
for exon in gtf[i]:
print "%s_%s:%d\t%d" % (exon.genesym, exon.tr, exon.exonnum, exon.count)
print "__readcount\t" + str(readcount)
| gpl-3.0 | 8,617,532,160,333,807,000 | 25.529412 | 74 | 0.637472 | false | 2.641288 | false | false | false |
fclerg/inora | Gateway/lib/eventsender.py | 1 | 1160 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Lib for handling message sending to the Inora server"""
import time
import requests
import lib.inoralogger
LOGGING = lib.inoralogger.InoraLogger.get_logger()
class EventSender(object):
"""Class to send messages over IP"""
def __init__(self, ip_address, port):
self.__ip_address = ip_address
self.__port = port
def send_https_message(self, message):
"""sending a message over https"""
url = 'https://' + self.__ip_address + ':' + str(self.__port)
headers = {'Content-Length': str(len(message))}
# tweak in case of Connection exception
while True:
try:
LOGGING.debug(" Sending poll result over Https. dst_IP:%s port:%s", self.__ip_address, self.__port)
requests.post(url, data=message, headers=headers, verify=False, timeout=30)
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as exception:
LOGGING.error(exception)
LOGGING.info("Retrying to connect...")
time.sleep(3)
continue
break
| gpl-3.0 | 2,685,681,894,963,935,700 | 37.666667 | 115 | 0.602586 | false | 4.09894 | false | false | false |
boltzj/movies-in-sf | app/utils/import_data.py | 1 | 4974 | from app import db
import csv
from app.models.movie import Movie
from app.models.location import Location
from app.models.director import Director
from app.models.writer import Writer
from app.models.actor import Actor
def import_data_from_database():
"""
Build dictionaries from database
:return:
"""
# Init dictionaries
movies, actors, writers, directors, locations = {}, {}, {}, {}, {}
for movie in Movie.query.all():
# Save director information
movies[movie.name] = movie.id
for actor in Actor.query.all():
# Save actor information
actors[actor.name] = actor.id
for writer in Writer.query.all():
# Save writer information
writers[writer.name] = writer.id
for director in Director.query.all():
# Save director information
directors[director.name] = director.id
for location in Location.query.all():
locations[(location, location.movie_id)] = location.id
return movies, actors, writers, directors, locations
def import_data_from_csv(file_path):
"""
Import data from a csv file into database
:return:
"""
try:
with open(file_path) as csv_file:
reader = csv.reader(csv_file, delimiter=',')
# Init dictionaries
movies, actors, writers, directors, locations = import_data_from_database()
# FIXME : test header !
header = next(reader)
if header[0] != 'Title' or header[1] != 'Release Year':
return "Bad File.."
for row in reader:
# Read CSV line
name = row[0].strip()
location = row[2]
fun_facts = row[3]
# Movie already exists create new location
if name in movies:
if '' != location:
new_location = Location(location, fun_facts, movies[name])
db.session.add(new_location)
continue
# Read more information from csv line about movie
release_year = row[1]
production = row[4]
distributor = row[5]
director = row[6]
writer = row[7]
movie_actors = [row[8], row[9], row[10]]
# Create a new Movie
movie = Movie(name, release_year, production, distributor)
# Add director
if '' != director:
if director not in directors:
director = Director(director)
db.session.add(director)
db.session.flush()
# Save director id in local dictionary
directors[director.name] = director.id
# add director_id to movie
movie.add_director(director.id)
else:
movie.add_director(directors[director])
# Add writer
if '' != writer:
if writer not in writers:
writer = Writer(writer)
db.session.add(writer)
db.session.flush()
# Save director information
writers[writer.name] = writer.id
# add director_id to movie
movie.add_writer(writer.id)
else:
movie.add_writer(writers[writer])
# Add Actors
for actor_name in movie_actors:
if actor_name != '':
if actor_name not in actors:
actor = Actor(actor_name)
db.session.add(actor)
db.session.flush()
# Save director information
actors[actor_name] = actor.id
# add actor to movie
movie.add_actor(actor)
else:
movie.add_actor(actor_name)
# Add Movie in DB
db.session.add(movie)
db.session.flush()
# Store movie id in local dictionary
movies[name] = movie.id
# Create new Location, if not empty and does not exist
if '' != location:
if (location, movie.id) not in locations:
new_location = Location(location, fun_facts, movie.id)
db.session.add(new_location)
db.session.flush()
locations[(location, movie.id)] = new_location.id
# Commit imported data
db.session.commit()
except FileNotFoundError:
print("File : `" + file_path + '` not found')
| mit | 6,835,127,686,174,911,000 | 32.38255 | 87 | 0.484721 | false | 4.847953 | false | false | false |
jason-weirather/py-seq-tools | seqtools/cli/utilities/bam_to_bed_depth.py | 1 | 4235 | """ Convert a BAM or a SAM into a bed depth file
The file is a TSV format with the fields
1. Chromosome
2. Start (0-index)
3. End (1-index)
4. Read depth
The file is ordered and covers all regions covered by alignments
"""
import argparse, sys, os
from shutil import rmtree
from multiprocessing import cpu_count, Lock, Pool
from tempfile import mkdtemp, gettempdir
from seqtools.format.sam.bam.files import BAMFile
from seqtools.format.sam import SAMStream
from seqtools.stream import LocusStream
from seqtools.range.multi import ranges_to_coverage, sort_genomic_ranges
current = 0
glock = Lock()
results = {}
of = None
def main(args):
#do our inputs
args = do_inputs()
bf = None
if args.input != '-':
bf = BAMFile(args.input)
else:
bf = SAMStream(sys.stdin)
ls = LocusStream(bf)
if args.output:
args.output = open(args.output,'w')
else:
args.output = sys.stdout
global of
of = args.output
z = 0
if args.threads > 1:
p = Pool(processes=args.threads)
for entries in ls:
bedarray = []
for e in entries.payload:
if not e.is_aligned(): continue
tx = e.get_target_transcript(min_intron=args.minimum_intron_size)
for exon in tx.exons:
bedarray.append(exon.copy())
if len(bedarray) == 0: continue
if args.threads > 1:
p.apply_async(get_output,args=(bedarray,z,),callback=do_output)
else:
r = get_output(bedarray,z)
do_output(r)
z += 1
if args.threads > 1:
p.close()
p.join()
# Temporary working directory step 3 of 3 - Cleanup
if not args.specific_tempdir:
rmtree(args.tempdir)
args.output.close()
def do_output(outputs):
global glock
global current
global of
global results
glock.acquire()
oline = outputs[0]
z = outputs[1]
results[z] = oline
while current in results:
prev = current
of.write(results[current])
del results[prev]
current += 1
glock.release()
def get_output(bedarray,z):
sarray = sort_genomic_ranges(bedarray[:])
covs = ranges_to_coverage(bedarray)
olines = ''
for c in covs:
olines += c.chr+"\t"+str(c.start-1)+"\t"+str(c.end)+"\t"+str(c.payload)+"\n"
return [olines,z]
def do_inputs():
# Setup command line inputs
parser=argparse.ArgumentParser(description="Convert a sorted bam file (all alignments) into a bed file with depth. If you want to limit it to primary alignments you better filter the bam.",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('input',help="BAM file or - for SAM stdin")
parser.add_argument('-o','--output',help="OUTPUTFILE or STDOUT if not set")
parser.add_argument('--minimum_intron_size',default=68,type=int,help="any gaps smaller than this we close")
parser.add_argument('--threads',type=int,default=cpu_count(),help="INT number of threads to run. Default is system cpu count")
# Temporary working directory step 1 of 3 - Definition
group = parser.add_mutually_exclusive_group()
group.add_argument('--tempdir',default=gettempdir(),help="The temporary directory is made and destroyed here.")
group.add_argument('--specific_tempdir',help="This temporary directory will be used, but will remain after executing.")
args = parser.parse_args()
# Temporary working directory step 2 of 3 - Creation
setup_tempdir(args)
return args
def setup_tempdir(args):
if args.specific_tempdir:
if not os.path.exists(args.specific_tempdir):
os.makedirs(args.specific_tempdir.rstrip('/'))
args.tempdir = args.specific_tempdir.rstrip('/')
if not os.path.exists(args.specific_tempdir.rstrip('/')):
sys.stderr.write("ERROR: Problem creating temporary directory\n")
sys.exit()
else:
args.tempdir = mkdtemp(prefix="weirathe.",dir=args.tempdir.rstrip('/'))
if not os.path.exists(args.tempdir.rstrip('/')):
sys.stderr.write("ERROR: Problem creating temporary directory\n")
sys.exit()
if not os.path.exists(args.tempdir):
sys.stderr.write("ERROR: Problem creating temporary directory\n")
sys.exit()
return
def external_cmd(cmd):
cache_argv = sys.argv
sys.argv = cmd
args = do_inputs()
main(args)
sys.argv = cache_argv
if __name__=="__main__":
args = do_inputs()
main(args)
| apache-2.0 | -9,172,658,648,721,654,000 | 30.37037 | 247 | 0.68902 | false | 3.440292 | false | false | false |
BytesGalore/PetersRIOT | dist/tools/compile_test/compile_test.py | 3 | 3666 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Copyright (C) 2014 René Kijewski <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from __future__ import print_function
from itertools import groupby
from os import devnull, environ, listdir
from os.path import abspath, dirname, isfile, join
from subprocess import CalledProcessError, check_call, PIPE, Popen
from sys import exit, stdout
riotbase = environ.get('RIOTBASE') or abspath(join(dirname(abspath(__file__)), '../' * 3))
null = open(devnull, 'w', 0)
success = []
failed = []
exceptions = []
def is_tracked(application_folder):
if not isfile(join(application_folder, 'Makefile')):
return False
try:
check_call(('git', 'ls-files', '--error-unmatch', 'Makefile'),
stdin=null, stdout=null, stderr=null, cwd=application_folder)
except CalledProcessError:
return False
else:
return True
def get_lines(readline, prefix):
while 1:
result = readline()
if not result:
break
elif not result.startswith(prefix):
continue
result = result[len(prefix):].rstrip().split(' .. ')[::-1]
if len(result) == 2:
stdout.write('.')
stdout.flush()
yield result
for folder in ('examples', 'tests'):
print('Building all applications in: \033[1;34m{}\033[0m'.format(folder))
applications = listdir(join(riotbase, folder))
applications = filter(lambda app: is_tracked(join(riotbase, folder, app)), applications)
applications = sorted(applications)
for nth, application in enumerate(applications, 1):
stdout.write('\tBuilding application: \033[1;34m{}\033[0m ({}/{}) '.format(application, nth, len(applications)))
stdout.flush()
try:
subprocess = Popen(('make', 'buildtest'),
bufsize=1, stdin=null, stdout=PIPE, stderr=null,
cwd=join(riotbase, folder, application))
lines = get_lines(subprocess.stdout.readline, 'Building for ')
lines = groupby(sorted(lines), lambda (outcome, board): outcome)
for group, results in lines:
print('\n\t\t{}: {}'.format(group, ', '.join(sorted(board for outcome, board in results))))
returncode = subprocess.wait()
(failed if returncode else success).append(application)
except Exception, e:
print('\n\t\tException: {}'.format(e))
exceptions.append(application)
finally:
try:
subprocess.kill()
except:
pass
print('Outcome:')
for color, group in (('2', 'success'), ('1', 'failed'), ('4', 'exceptions')):
applications = locals()[group]
if applications:
print('\t\033[1;3{}m{}\033[0m: {}'.format(color, group, ', '.join(applications)))
if exceptions:
exit(2)
elif failed:
exit(1)
else:
exit(0)
| lgpl-2.1 | 5,454,383,394,964,891,000 | 34.582524 | 120 | 0.629741 | false | 4.023052 | false | false | false |
macarthur-lab/xbrowse | seqr/views/utils/phenotips_utils.py | 1 | 4706 | import json
import logging
import requests
import settings
from seqr.views.utils.proxy_request_utils import proxy_request
logger = logging.getLogger(__name__)
def delete_phenotips_patient(project, individual):
"""Deletes patient from PhenoTips for the given patient_id.
Args:
project (Model): seqr Project - used to retrieve PhenoTips credentials
individual (Model): seqr Individual
Raises:
PhenotipsException: if api call fails
"""
if phenotips_patient_exists(individual):
url = phenotips_patient_url(individual)
auth_tuple = get_phenotips_uname_and_pwd_for_project(project.phenotips_user_id, read_only=False)
return make_phenotips_api_call('DELETE', url, auth_tuple=auth_tuple, expected_status_code=204)
def phenotips_patient_url(individual):
if individual.phenotips_patient_id:
return '/rest/patients/{0}'.format(individual.phenotips_patient_id)
else:
return '/rest/patients/eid/{0}'.format(individual.phenotips_eid)
def phenotips_patient_exists(individual):
return individual.phenotips_patient_id or individual.phenotips_eid
def create_phenotips_user(username, password):
"""Creates a new user in PhenoTips"""
headers = { "Content-Type": "application/x-www-form-urlencoded" }
data = { 'parent': 'XWiki.XWikiUsers' }
url = '/rest/wikis/xwiki/spaces/XWiki/pages/{username}'.format(username=username)
make_phenotips_api_call(
'PUT',
url,
http_headers=headers,
data=data,
auth_tuple=(settings.PHENOTIPS_ADMIN_UNAME, settings.PHENOTIPS_ADMIN_PWD),
parse_json_resonse=False,
expected_status_code=[201, 202],
)
data = {
'className': 'XWiki.XWikiUsers',
'property#password': password,
#'property#first_name': first_name,
#'property#last_name': last_name,
#'property#email': email_address,
}
url = '/rest/wikis/xwiki/spaces/XWiki/pages/{username}/objects'.format(username=username)
return make_phenotips_api_call(
'POST',
url,
data=data,
auth_tuple=(settings.PHENOTIPS_ADMIN_UNAME, settings.PHENOTIPS_ADMIN_PWD),
parse_json_resonse=False,
expected_status_code=201,
)
def make_phenotips_api_call(
method,
url,
http_headers=None,
data=None,
auth_tuple=None,
expected_status_code=200,
parse_json_resonse=True,
verbose=False):
"""Utility method for making an API call and then parsing & returning the json response.
Args:
method (string): 'GET' or 'POST'
url (string): url path, starting with '/' (eg. '/bin/edit/data/P0000001')
data (string): request body - used for POST, PUT, and other such requests.
auth_tuple (tuple): ("username", "password") pair
expected_status_code (int or list): expected server response code
parse_json_resonse (bool): whether to parse and return the json response
verbose (bool): whether to print details about the request & response
Returns:
json object or None if response content is empty
"""
try:
response = proxy_request(None, url, headers=http_headers or {}, method=method, scheme='http', data=data,
auth_tuple=auth_tuple, host=settings.PHENOTIPS_SERVER, verbose=verbose)
except requests.exceptions.RequestException as e:
raise PhenotipsException(e.message)
if (isinstance(expected_status_code, int) and response.status_code != expected_status_code) or (
isinstance(expected_status_code, list) and response.status_code not in expected_status_code):
raise PhenotipsException("Unable to retrieve %s. response code = %s: %s" % (
url, response.status_code, response.reason_phrase))
if parse_json_resonse:
if not response.content:
return {}
try:
return json.loads(response.content)
except ValueError as e:
logger.error("Unable to parse PhenoTips response for %s request to %s" % (method, url))
raise PhenotipsException("Unable to parse response for %s:\n%s" % (url, e))
else:
return dict(response.items())
def get_phenotips_uname_and_pwd_for_project(phenotips_user_id, read_only=False):
"""Return the PhenoTips username and password for this seqr project"""
if not phenotips_user_id:
raise ValueError("Invalid phenotips_user_id: " + str(phenotips_user_id))
uname = phenotips_user_id + ('_view' if read_only else '')
pwd = phenotips_user_id + phenotips_user_id
return uname, pwd
class PhenotipsException(Exception):
pass
| agpl-3.0 | 6,222,624,996,166,021,000 | 34.651515 | 112 | 0.659584 | false | 3.625578 | false | false | false |
codefisher/tbutton_web | lbutton/migrations/0004_auto_20170823_0029.py | 1 | 1045 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-23 00:29
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('lbutton', '0003_auto_20150314_2025'),
]
operations = [
migrations.AddField(
model_name='linkbutton',
name='chrome_file',
field=models.FilePathField(null=True, path='/home/michael/WebSites/dev/git/codefisher_org/../www/media/lbutton/chrome'),
),
migrations.AddField(
model_name='linkbutton',
name='firefox_file',
field=models.FilePathField(null=True, path='/home/michael/WebSites/dev/git/codefisher_org/../www/media/lbutton/firefox'),
),
migrations.AddField(
model_name='linkbutton',
name='time',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
]
| mit | 5,305,851,959,424,343,000 | 31.65625 | 133 | 0.615311 | false | 3.813869 | false | false | false |
kobotoolbox/kobocat | onadata/apps/restservice/tasks.py | 1 | 1102 | # coding: utf-8
import logging
from celery import shared_task
from django.conf import settings
from onadata.apps.restservice.models import RestService
@shared_task(bind=True)
def service_definition_task(self, rest_service_id, data):
"""
Tries to send data to the endpoint of the hook
It retries 3 times maximum.
- after 2 minutes,
- after 20 minutes,
- after 200 minutes
:param self: Celery.Task.
:param rest_service_id: RestService primary key.
:param data: dict.
"""
try:
rest_service = RestService.objects.get(pk=rest_service_id)
service = rest_service.get_service_definition()()
service.send(rest_service.service_url, data)
except Exception as e:
logger = logging.getLogger("console_logger")
logger.error("service_definition_task - {}".format(str(e)), exc_info=True)
# Countdown is in seconds
countdown = 120 * (10 ** self.request.retries)
# Max retries is 3 by default.
raise self.retry(countdown=countdown, max_retries=settings.REST_SERVICE_MAX_RETRIES)
return True
| bsd-2-clause | 5,079,829,175,311,263,000 | 30.485714 | 92 | 0.676951 | false | 3.761092 | false | false | false |
jptomo/rpython-lang-scheme | rpython/jit/backend/llsupport/regalloc.py | 1 | 27235 | import os
from rpython.jit.metainterp.history import Const, REF, JitCellToken
from rpython.rlib.objectmodel import we_are_translated, specialize
from rpython.jit.metainterp.resoperation import rop, AbstractValue
from rpython.rtyper.lltypesystem import lltype
from rpython.rtyper.lltypesystem.lloperation import llop
try:
from collections import OrderedDict
except ImportError:
OrderedDict = dict # too bad
class TempVar(AbstractValue):
def __init__(self):
pass
def __repr__(self):
return "<TempVar at %s>" % (id(self),)
class NoVariableToSpill(Exception):
pass
class Node(object):
def __init__(self, val, next):
self.val = val
self.next = next
def __repr__(self):
return '<Node %d %r>' % (self.val, next)
class LinkedList(object):
def __init__(self, fm, lst=None):
# assume the list is sorted
if lst is not None:
node = None
for i in range(len(lst) - 1, -1, -1):
item = lst[i]
node = Node(item, node)
self.master_node = node
else:
self.master_node = None
self.fm = fm
def append(self, size, item):
key = self.fm.get_loc_index(item)
if size == 2:
self._append(key)
self._append(key + 1)
else:
assert size == 1
self._append(key)
def _append(self, key):
if self.master_node is None or self.master_node.val > key:
self.master_node = Node(key, self.master_node)
else:
node = self.master_node
prev_node = self.master_node
while node and node.val < key:
prev_node = node
node = node.next
prev_node.next = Node(key, node)
@specialize.arg(1)
def foreach(self, function, arg):
node = self.master_node
while node is not None:
function(arg, node.val)
node = node.next
def pop(self, size, tp, hint=-1):
if size == 2:
return self._pop_two(tp) # 'hint' ignored for floats on 32-bit
assert size == 1
if not self.master_node:
return None
node = self.master_node
#
if hint >= 0:
# Look for and remove the Node with the .val matching 'hint'.
# If not found, fall back to removing the first Node.
# Note that the loop below ignores the first Node, but
# even if by chance it is the one with the correct .val,
# it will be the one we remove at the end anyway.
prev_node = node
while prev_node.next:
if prev_node.next.val == hint:
node = prev_node.next
prev_node.next = node.next
break
prev_node = prev_node.next
else:
self.master_node = node.next
else:
self.master_node = node.next
#
return self.fm.frame_pos(node.val, tp)
def _candidate(self, node):
return (node.val & 1 == 0) and (node.val + 1 == node.next.val)
def _pop_two(self, tp):
node = self.master_node
if node is None or node.next is None:
return None
if self._candidate(node):
self.master_node = node.next.next
return self.fm.frame_pos(node.val, tp)
prev_node = node
node = node.next
while True:
if node.next is None:
return None
if self._candidate(node):
# pop two
prev_node.next = node.next.next
return self.fm.frame_pos(node.val, tp)
node = node.next
def len(self):
node = self.master_node
c = 0
while node:
node = node.next
c += 1
return c
def __len__(self):
""" For tests only
"""
return self.len()
def __repr__(self):
if not self.master_node:
return 'LinkedList(<empty>)'
node = self.master_node
l = []
while node:
l.append(str(node.val))
node = node.next
return 'LinkedList(%s)' % '->'.join(l)
class FrameManager(object):
""" Manage frame positions
start_free_depth is the start where we can allocate in whatever order
we like.
"""
def __init__(self, start_free_depth=0, freelist=None):
self.bindings = {}
self.current_frame_depth = start_free_depth
self.hint_frame_pos = {}
self.freelist = LinkedList(self, freelist)
def get_frame_depth(self):
return self.current_frame_depth
def get(self, box):
return self.bindings.get(box, None)
def loc(self, box):
"""Return or create the frame location associated with 'box'."""
# first check if it's already in the frame_manager
try:
return self.bindings[box]
except KeyError:
pass
return self.get_new_loc(box)
def get_new_loc(self, box):
size = self.frame_size(box.type)
hint = self.hint_frame_pos.get(box, -1)
# frame_depth is rounded up to a multiple of 'size', assuming
# that 'size' is a power of two. The reason for doing so is to
# avoid obscure issues in jump.py with stack locations that try
# to move from position (6,7) to position (7,8).
newloc = self.freelist.pop(size, box.type, hint)
if newloc is None:
#
index = self.get_frame_depth()
if index & 1 and size == 2:
# we can't allocate it at odd position
self.freelist._append(index)
newloc = self.frame_pos(index + 1, box.type)
self.current_frame_depth += 3
index += 1 # for test
else:
newloc = self.frame_pos(index, box.type)
self.current_frame_depth += size
#
if not we_are_translated(): # extra testing
testindex = self.get_loc_index(newloc)
assert testindex == index
#
self.bindings[box] = newloc
if not we_are_translated():
self._check_invariants()
return newloc
def bind(self, box, loc):
pos = self.get_loc_index(loc)
size = self.frame_size(box.type)
self.current_frame_depth = max(pos + size, self.current_frame_depth)
self.bindings[box] = loc
def finish_binding(self):
all = [0] * self.get_frame_depth()
for b, loc in self.bindings.iteritems():
size = self.frame_size(b.type)
pos = self.get_loc_index(loc)
for i in range(pos, pos + size):
all[i] = 1
self.freelist = LinkedList(self) # we don't care
for elem in range(len(all)):
if not all[elem]:
self.freelist._append(elem)
if not we_are_translated():
self._check_invariants()
def mark_as_free(self, box):
try:
loc = self.bindings[box]
except KeyError:
return # already gone
del self.bindings[box]
size = self.frame_size(box.type)
self.freelist.append(size, loc)
if not we_are_translated():
self._check_invariants()
def _check_invariants(self):
all = [0] * self.get_frame_depth()
for b, loc in self.bindings.iteritems():
size = self.frame_size(b)
pos = self.get_loc_index(loc)
for i in range(pos, pos + size):
assert not all[i]
all[i] = 1
node = self.freelist.master_node
while node is not None:
assert not all[node.val]
all[node.val] = 1
node = node.next
@staticmethod
def _gather_gcroots(lst, var):
lst.append(var)
# abstract methods that need to be overwritten for specific assemblers
def frame_pos(loc, type):
raise NotImplementedError("Purely abstract")
@staticmethod
def frame_size(type):
return 1
@staticmethod
def get_loc_index(loc):
raise NotImplementedError("Purely abstract")
@staticmethod
def newloc(pos, size, tp):
""" Reverse of get_loc_index
"""
raise NotImplementedError("Purely abstract")
class RegisterManager(object):
""" Class that keeps track of register allocations
"""
box_types = None # or a list of acceptable types
all_regs = []
no_lower_byte_regs = []
save_around_call_regs = []
frame_reg = None
def __init__(self, longevity, frame_manager=None, assembler=None):
self.free_regs = self.all_regs[:]
self.free_regs.reverse()
self.longevity = longevity
self.temp_boxes = []
if not we_are_translated():
self.reg_bindings = OrderedDict()
else:
self.reg_bindings = {}
self.bindings_to_frame_reg = {}
self.position = -1
self.frame_manager = frame_manager
self.assembler = assembler
def is_still_alive(self, v):
# Check if 'v' is alive at the current position.
# Return False if the last usage is strictly before.
return self.longevity[v][1] >= self.position
def stays_alive(self, v):
# Check if 'v' stays alive after the current position.
# Return False if the last usage is before or at position.
return self.longevity[v][1] > self.position
def next_instruction(self, incr=1):
self.position += incr
def _check_type(self, v):
if not we_are_translated() and self.box_types is not None:
assert isinstance(v, TempVar) or v.type in self.box_types
def possibly_free_var(self, v):
""" If v is stored in a register and v is not used beyond the
current position, then free it. Must be called at some
point for all variables that might be in registers.
"""
self._check_type(v)
if isinstance(v, Const):
return
if v not in self.longevity or self.longevity[v][1] <= self.position:
if v in self.reg_bindings:
self.free_regs.append(self.reg_bindings[v])
del self.reg_bindings[v]
if self.frame_manager is not None:
self.frame_manager.mark_as_free(v)
def possibly_free_vars(self, vars):
""" Same as 'possibly_free_var', but for all v in vars.
"""
for v in vars:
self.possibly_free_var(v)
def possibly_free_vars_for_op(self, op):
for i in range(op.numargs()):
self.possibly_free_var(op.getarg(i))
def free_temp_vars(self):
self.possibly_free_vars(self.temp_boxes)
self.temp_boxes = []
def _check_invariants(self):
if not we_are_translated():
# make sure no duplicates
assert len(dict.fromkeys(self.reg_bindings.values())) == len(self.reg_bindings)
rev_regs = dict.fromkeys(self.reg_bindings.values())
for reg in self.free_regs:
assert reg not in rev_regs
assert len(rev_regs) + len(self.free_regs) == len(self.all_regs)
else:
assert len(self.reg_bindings) + len(self.free_regs) == len(self.all_regs)
assert len(self.temp_boxes) == 0
if self.longevity:
for v in self.reg_bindings:
assert self.longevity[v][1] > self.position
def try_allocate_reg(self, v, selected_reg=None, need_lower_byte=False):
""" Try to allocate a register, if we have one free.
need_lower_byte - if True, allocate one that has a lower byte reg
(e.g. eax has al)
selected_reg - if not None, force a specific register
returns allocated register or None, if not possible.
"""
self._check_type(v)
assert not isinstance(v, Const)
if selected_reg is not None:
res = self.reg_bindings.get(v, None)
if res is not None:
if res is selected_reg:
return res
else:
del self.reg_bindings[v]
self.free_regs.append(res)
if selected_reg in self.free_regs:
self.free_regs = [reg for reg in self.free_regs
if reg is not selected_reg]
self.reg_bindings[v] = selected_reg
return selected_reg
return None
if need_lower_byte:
loc = self.reg_bindings.get(v, None)
if loc is not None and loc not in self.no_lower_byte_regs:
return loc
for i in range(len(self.free_regs) - 1, -1, -1):
reg = self.free_regs[i]
if reg not in self.no_lower_byte_regs:
if loc is not None:
self.free_regs[i] = loc
else:
del self.free_regs[i]
self.reg_bindings[v] = reg
return reg
return None
try:
return self.reg_bindings[v]
except KeyError:
if self.free_regs:
loc = self.free_regs.pop()
self.reg_bindings[v] = loc
return loc
def _spill_var(self, v, forbidden_vars, selected_reg,
need_lower_byte=False):
v_to_spill = self._pick_variable_to_spill(v, forbidden_vars,
selected_reg, need_lower_byte=need_lower_byte)
loc = self.reg_bindings[v_to_spill]
del self.reg_bindings[v_to_spill]
if self.frame_manager.get(v_to_spill) is None:
newloc = self.frame_manager.loc(v_to_spill)
self.assembler.regalloc_mov(loc, newloc)
return loc
def _pick_variable_to_spill(self, v, forbidden_vars, selected_reg=None,
need_lower_byte=False):
""" Slightly less silly algorithm.
"""
cur_max_age = -1
candidate = None
for next in self.reg_bindings:
reg = self.reg_bindings[next]
if next in forbidden_vars:
continue
if selected_reg is not None:
if reg is selected_reg:
return next
else:
continue
if need_lower_byte and reg in self.no_lower_byte_regs:
continue
max_age = self.longevity[next][1]
if cur_max_age < max_age:
cur_max_age = max_age
candidate = next
if candidate is None:
raise NoVariableToSpill
return candidate
def force_allocate_reg(self, v, forbidden_vars=[], selected_reg=None,
need_lower_byte=False):
""" Forcibly allocate a register for the new variable v.
It must not be used so far. If we don't have a free register,
spill some other variable, according to algorithm described in
'_pick_variable_to_spill'.
Will not spill a variable from 'forbidden_vars'.
"""
self._check_type(v)
if isinstance(v, TempVar):
self.longevity[v] = (self.position, self.position)
loc = self.try_allocate_reg(v, selected_reg,
need_lower_byte=need_lower_byte)
if loc:
return loc
loc = self._spill_var(v, forbidden_vars, selected_reg,
need_lower_byte=need_lower_byte)
prev_loc = self.reg_bindings.get(v, None)
if prev_loc is not None:
self.free_regs.append(prev_loc)
self.reg_bindings[v] = loc
return loc
def force_allocate_frame_reg(self, v):
""" Allocate the new variable v in the frame register."""
self.bindings_to_frame_reg[v] = None
def force_spill_var(self, var):
self._sync_var(var)
try:
loc = self.reg_bindings[var]
del self.reg_bindings[var]
self.free_regs.append(loc)
except KeyError:
pass # 'var' is already not in a register
def loc(self, box, must_exist=False):
""" Return the location of 'box'.
"""
self._check_type(box)
if isinstance(box, Const):
return self.convert_to_imm(box)
try:
return self.reg_bindings[box]
except KeyError:
if box in self.bindings_to_frame_reg:
return self.frame_reg
if must_exist:
return self.frame_manager.bindings[box]
return self.frame_manager.loc(box)
def return_constant(self, v, forbidden_vars=[], selected_reg=None):
""" Return the location of the constant v. If 'selected_reg' is
not None, it will first load its value into this register.
"""
self._check_type(v)
assert isinstance(v, Const)
immloc = self.convert_to_imm(v)
if selected_reg:
if selected_reg in self.free_regs:
self.assembler.regalloc_mov(immloc, selected_reg)
return selected_reg
loc = self._spill_var(v, forbidden_vars, selected_reg)
self.free_regs.append(loc)
self.assembler.regalloc_mov(immloc, loc)
return loc
return immloc
def make_sure_var_in_reg(self, v, forbidden_vars=[], selected_reg=None,
need_lower_byte=False):
""" Make sure that an already-allocated variable v is in some
register. Return the register. See 'force_allocate_reg' for
the meaning of the optional arguments.
"""
self._check_type(v)
if isinstance(v, Const):
return self.return_constant(v, forbidden_vars, selected_reg)
prev_loc = self.loc(v, must_exist=True)
if prev_loc is self.frame_reg and selected_reg is None:
return prev_loc
loc = self.force_allocate_reg(v, forbidden_vars, selected_reg,
need_lower_byte=need_lower_byte)
if prev_loc is not loc:
self.assembler.regalloc_mov(prev_loc, loc)
return loc
def _reallocate_from_to(self, from_v, to_v):
reg = self.reg_bindings[from_v]
del self.reg_bindings[from_v]
self.reg_bindings[to_v] = reg
def _move_variable_away(self, v, prev_loc):
if self.free_regs:
loc = self.free_regs.pop()
self.reg_bindings[v] = loc
self.assembler.regalloc_mov(prev_loc, loc)
else:
loc = self.frame_manager.loc(v)
self.assembler.regalloc_mov(prev_loc, loc)
def force_result_in_reg(self, result_v, v, forbidden_vars=[]):
""" Make sure that result is in the same register as v.
The variable v is copied away if it's further used. The meaning
of 'forbidden_vars' is the same as in 'force_allocate_reg'.
"""
self._check_type(result_v)
self._check_type(v)
if isinstance(v, Const):
if self.free_regs:
loc = self.free_regs.pop()
else:
loc = self._spill_var(v, forbidden_vars, None)
self.assembler.regalloc_mov(self.convert_to_imm(v), loc)
self.reg_bindings[result_v] = loc
return loc
if v not in self.reg_bindings:
prev_loc = self.frame_manager.loc(v)
loc = self.force_allocate_reg(v, forbidden_vars)
self.assembler.regalloc_mov(prev_loc, loc)
assert v in self.reg_bindings
if self.longevity[v][1] > self.position:
# we need to find a new place for variable v and
# store result in the same place
loc = self.reg_bindings[v]
del self.reg_bindings[v]
if self.frame_manager.get(v) is None:
self._move_variable_away(v, loc)
self.reg_bindings[result_v] = loc
else:
self._reallocate_from_to(v, result_v)
loc = self.reg_bindings[result_v]
return loc
def _sync_var(self, v):
if not self.frame_manager.get(v):
reg = self.reg_bindings[v]
to = self.frame_manager.loc(v)
self.assembler.regalloc_mov(reg, to)
# otherwise it's clean
def before_call(self, force_store=[], save_all_regs=0):
""" Spill registers before a call, as described by
'self.save_around_call_regs'. Registers are not spilled if
they don't survive past the current operation, unless they
are listed in 'force_store'. 'save_all_regs' can be 0 (default),
1 (save all), or 2 (save default+PTRs).
"""
for v, reg in self.reg_bindings.items():
if v not in force_store and self.longevity[v][1] <= self.position:
# variable dies
del self.reg_bindings[v]
self.free_regs.append(reg)
continue
if save_all_regs != 1 and reg not in self.save_around_call_regs:
if save_all_regs == 0:
continue # we don't have to
if v.type != REF:
continue # only save GC pointers
self._sync_var(v)
del self.reg_bindings[v]
self.free_regs.append(reg)
def after_call(self, v):
""" Adjust registers according to the result of the call,
which is in variable v.
"""
self._check_type(v)
r = self.call_result_location(v)
if not we_are_translated():
assert r not in self.reg_bindings.values()
self.reg_bindings[v] = r
self.free_regs = [fr for fr in self.free_regs if fr is not r]
return r
# abstract methods, override
def convert_to_imm(self, c):
""" Platform specific - convert a constant to imm
"""
raise NotImplementedError("Abstract")
def call_result_location(self, v):
""" Platform specific - tell where the result of a call will
be stored by the cpu, according to the variable type
"""
raise NotImplementedError("Abstract")
def get_scratch_reg(self, type, forbidden_vars=[], selected_reg=None):
""" Platform specific - Allocates a temporary register """
raise NotImplementedError("Abstract")
class BaseRegalloc(object):
""" Base class on which all the backend regallocs should be based
"""
def _set_initial_bindings(self, inputargs, looptoken):
""" Set the bindings at the start of the loop
"""
locs = []
base_ofs = self.assembler.cpu.get_baseofs_of_frame_field()
for box in inputargs:
assert not isinstance(box, Const)
loc = self.fm.get_new_loc(box)
locs.append(loc.value - base_ofs)
if looptoken.compiled_loop_token is not None: # <- for tests
looptoken.compiled_loop_token._ll_initial_locs = locs
def next_op_can_accept_cc(self, operations, i):
op = operations[i]
next_op = operations[i + 1]
opnum = next_op.getopnum()
if (opnum != rop.GUARD_TRUE and opnum != rop.GUARD_FALSE
and opnum != rop.COND_CALL):
return False
if next_op.getarg(0) is not op:
return False
if self.longevity[op][1] > i + 1:
return False
if opnum != rop.COND_CALL:
if op in operations[i + 1].getfailargs():
return False
else:
if op in operations[i + 1].getarglist()[1:]:
return False
return True
def locs_for_call_assembler(self, op):
descr = op.getdescr()
assert isinstance(descr, JitCellToken)
if op.numargs() == 2:
self.rm._sync_var(op.getarg(1))
return [self.loc(op.getarg(0)), self.fm.loc(op.getarg(1))]
else:
return [self.loc(op.getarg(0))]
def compute_vars_longevity(inputargs, operations):
# compute a dictionary that maps variables to index in
# operations that is a "last-time-seen"
# returns a pair longevity/useful. Non-useful variables are ones that
# never appear in the assembler or it does not matter if they appear on
# stack or in registers. Main example is loop arguments that go
# only to guard operations or to jump or to finish
last_used = {}
last_real_usage = {}
for i in range(len(operations)-1, -1, -1):
op = operations[i]
if op.type != 'v':
if op not in last_used and op.has_no_side_effect():
continue
opnum = op.getopnum()
for j in range(op.numargs()):
arg = op.getarg(j)
if isinstance(arg, Const):
continue
if arg not in last_used:
last_used[arg] = i
if opnum != rop.JUMP and opnum != rop.LABEL:
if arg not in last_real_usage:
last_real_usage[arg] = i
if op.is_guard():
for arg in op.getfailargs():
if arg is None: # hole
continue
assert not isinstance(arg, Const)
if arg not in last_used:
last_used[arg] = i
#
longevity = {}
for i, arg in enumerate(operations):
if arg.type != 'v' and arg in last_used:
assert not isinstance(arg, Const)
assert i < last_used[arg]
longevity[arg] = (i, last_used[arg])
del last_used[arg]
for arg in inputargs:
assert not isinstance(arg, Const)
if arg not in last_used:
longevity[arg] = (-1, -1)
else:
longevity[arg] = (0, last_used[arg])
del last_used[arg]
assert len(last_used) == 0
if not we_are_translated():
produced = {}
for arg in inputargs:
produced[arg] = None
for op in operations:
for arg in op.getarglist():
if not isinstance(arg, Const):
assert arg in produced
produced[op] = None
return longevity, last_real_usage
def is_comparison_or_ovf_op(opnum):
from rpython.jit.metainterp.resoperation import opclasses
cls = opclasses[opnum]
# hack hack: in theory they are instance method, but they don't use
# any instance field, we can use a fake object
class Fake(cls):
pass
op = Fake()
return op.is_comparison() or op.is_ovf()
def valid_addressing_size(size):
return size == 1 or size == 2 or size == 4 or size == 8
def get_scale(size):
assert valid_addressing_size(size)
if size < 4:
return size - 1 # 1, 2 => 0, 1
else:
return (size >> 2) + 1 # 4, 8 => 2, 3
def not_implemented(msg):
msg = '[llsupport/regalloc] %s\n' % msg
if we_are_translated():
llop.debug_print(lltype.Void, msg)
raise NotImplementedError(msg)
| mit | -2,670,520,486,654,573,600 | 34.930079 | 91 | 0.549807 | false | 3.866411 | false | false | false |
raags/ipmitool | ipmi/ipmicli.py | 1 | 5984 | from __future__ import print_function
import re, sys, site, getpass, socket, argparse, collections
from threading import Thread
from ipmi import ipmitool
# Todo: add logging
class Runner(Thread):
"""
Build ipmitool object and run through tasks as per requested command
"""
ipmi_map = { "reboot" : "chassis power reset",
"pxe" : "chassis bootdev pxe",
"fix" : "chassis bootdev cdrom",
"disk" : "chassis bootdev disk",
"status": "chassis power status",
"off": "chassis power off",
"on": "chassis power on" }
def __init__(self, console, password, command="disk", username="root"):
"""
:param console: The console dns or ip address
:param command: The ipmi command to execute specified in `ipmi_map`
:param username: Console username
:param password: Console password
"""
Thread.__init__(self)
self.console = console
self.command = command
self.username = username
self.password = password
try:
socket.inet_aton(self.console)
self.consoleip = socket.gethostbyname(self.console)
except socket.error:
try:
self.consoleip = socket.gethostbyname(self.console)
except socket.gaierror:
raise NameError('Console Ip or dns name is invalid')
self.error = None
self.output = None
self.status = None
def ipmi_method(self, command):
"""Use ipmitool to run commands with ipmi protocol
"""
ipmi = ipmitool(self.console, self.password, self.username)
if command == "reboot":
self.ipmi_method(command="status")
if self.output == "Chassis Power is off":
command = "on"
ipmi.execute(self.ipmi_map[command])
if ipmi.status:
self.error = ipmi.error.strip()
else:
self.output = ipmi.output.strip()
self.status = ipmi.status
def run(self):
"""Start thread run here
"""
try:
if self.command == "pxer":
self.ipmi_method(command="pxe")
if self.status == 0 or self.status == None:
self.command = "reboot"
else:
return
self.ipmi_method(self.command)
except Exception as e:
self.error = str(e)
#raise
def print_report(runner_results):
"""
Print collated report with output and errors if any
"""
error_report = collections.defaultdict(list)
output_report = collections.defaultdict(list)
success_report = list()
for runner_info in runner_results:
hostname = runner_info['console']
error = runner_info['error']
output = runner_info['output']
if error:
error_report[error].append(hostname)
elif output:
output_report[output].append(hostname)
else:
success_report.append(hostname)
if error_report:
print("Errors : ")
for error in error_report:
print("{0} -- [{1}] {2}".format(error.strip(), len(error_report[error]), ", ".join(error_report[error])))
print()
if output_report:
for output in output_report:
print("{0} -- [{1}] {2}".format(output, len(output_report[output]), ", ".join(output_report[output])))
if success_report:
print("Completed config on {0} hosts".format(len(success_report)))
def main():
parser = argparse.ArgumentParser(
description="Run ipmitool commands on consoles")
group = parser.add_argument_group("Host selectors")
group.add_argument("-H", "--host", help="Console Ip or Dns Name")
group.add_argument("-f", "--file", help="File with list of Consoles")
group.add_argument("-u", "--username", default='root', help="Console username to use")
parser.add_argument("-v", "--verbose", help="Verbose output", action="store_true")
parser.add_argument("command", choices=["pxer", "pxe", "disk", "reboot", "off", "on", "status"],
help= "pxer - set to PXE and reboot host")
args = parser.parse_args()
if args.file:
try:
host_list = open(args.file).read().split()
except IOError as err:
print("Error: cannot open {0} ({1})".format(hostfile, err))
exit(1)
elif args.host:
host_list = [args.host]
else:
parser.print_usage()
sys.exit(1)
# Confirm with user for host power changes
if args.command == "reboot" or args.command == "off" or args.command == "pxer":
print("Power will be changed for the following hosts: ")
for host in host_list:
print(host)
choice = raw_input("Do you want to proceed? (y/n): ")
if not choice == "y":
exit(1)
# Get console password
passwd = getpass.getpass()
if not passwd:
print("Please provide the console password")
exit(1)
runner_list = []
for host in host_list:
runner_thread = Runner(host, command=args.command, username=args.username, password=passwd)
runner_thread.start()
runner_list.append(runner_thread)
runner_results = list()
for runner in runner_list:
runner.join()
runner_info = { 'console': runner.console, 'error': runner.error, 'output': runner.output }
runner_results.append(runner_info)
print_report(runner_results)
if __name__ == "__main__":
main()
# vim: autoindent tabstop=4 expandtab smarttab shiftwidth=4 softtabstop=4 tw=0
| apache-2.0 | -1,883,669,676,904,443,100 | 32.80791 | 117 | 0.550802 | false | 4.243972 | false | false | false |
hkaj/birdy_server | core/auth.py | 1 | 1549 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
import xml.etree.ElementTree as ET
from flask import abort, Flask, escape, request, session
from passlib.hash import sha256_crypt
from core.db import Retriever
def check_auth(session):
if 'username' in session:
xml_res = ET.fromstring("<response></response>")
msg = ET.Element('message')
# escape will protect against XSS if we decide to render this
msg.text = "%s - You are authenticated." % escape(session['username'])
xml_res.append(msg)
return ET.tostring(xml_res)
return None
def login(request, session):
xml_res = ET.fromstring("<response></response>")
login, passwd = request.form['usernm'], request.form['userpwd']
db_info = json.loads(Retriever(
['login_user', 'password'],
'utilisateur',
"login_user='%s'" % login
).fetch())
if not db_info:
abort(401)
# if the user exists and the password matches
elif 'password' in db_info.keys() and sha256_crypt.verify(passwd, db_info['password']):
session['username'] = login
msg = ET.Element('message')
msg.text = '%s - You are now authenticated.' % escape(login)
xml_res.append(msg)
return ET.tostring(xml_res)
else:
abort(401)
def logout(session):
session.pop('username', None)
xml_res = ET.fromstring("<response></response>")
msg = ET.Element('message')
msg.text = 'Log out.'
xml_res.append(msg)
return ET.tostring(xml_res)
| gpl-3.0 | 8,671,701,160,201,255,000 | 29.98 | 91 | 0.630084 | false | 3.670616 | false | false | false |
bitprophet/releases | tests/_util.py | 1 | 3526 | from docutils.nodes import list_item, paragraph
from mock import Mock
import six
from releases import (
Issue,
issues_role,
Release,
release_role,
construct_releases,
)
from releases.util import make_app, changelog2dict
def inliner(app=None):
app = app or make_app()
return Mock(document=Mock(settings=Mock(env=Mock(app=app))))
# Obtain issue() object w/o wrapping all parse steps
def issue(type_, number, **kwargs):
text = str(number)
if kwargs.get("backported", False):
text += " backported"
if kwargs.get("major", False):
text += " major"
if kwargs.get("spec", None):
text += " (%s)" % kwargs["spec"]
app = kwargs.get("app", None)
return issues_role(
name=type_,
rawtext="",
text=text,
lineno=None,
inliner=inliner(app=app),
)[0][0]
# Even shorter shorthand!
def b(number, **kwargs):
return issue("bug", str(number), **kwargs)
def f(number, **kwargs):
return issue("feature", str(number), **kwargs)
def s(number, **kwargs):
return issue("support", str(number), **kwargs)
def entry(i):
"""
Easy wrapper for issue/release objects.
Default is to give eg an issue/release object that gets wrapped in a LI->P.
May give your own (non-issue/release) object to skip auto wrapping. (Useful
since entry() is often called a few levels deep.)
"""
if not isinstance(i, (Issue, Release)):
return i
return list_item("", paragraph("", "", i))
def release(number, **kwargs):
app = kwargs.get("app", None)
nodes = release_role(
name=None,
rawtext="",
text="%s <2013-11-20>" % number,
lineno=None,
inliner=inliner(app=app),
)[0]
return list_item("", paragraph("", "", *nodes))
def release_list(*entries, **kwargs):
skip_initial = kwargs.pop("skip_initial", False)
entries = list(entries) # lol tuples
# Translate simple objs into changelog-friendly ones
for index, item in enumerate(entries):
if isinstance(item, six.string_types):
entries[index] = release(item)
else:
entries[index] = entry(item)
# Insert initial/empty 1st release to start timeline
if not skip_initial:
entries.append(release("1.0.0"))
return entries
def releases(*entries, **kwargs):
app = kwargs.pop("app", None) or make_app()
return construct_releases(release_list(*entries, **kwargs), app)[0]
def setup_issues(self):
self.f = f(12)
self.s = s(5)
self.b = b(15)
self.mb = b(200, major=True)
self.bf = f(27, backported=True)
self.bs = s(29, backported=True)
def expect_releases(entries, release_map, skip_initial=False, app=None):
kwargs = {"skip_initial": skip_initial}
# Let high level tests tickle config settings via make_app()
if app is not None:
kwargs["app"] = app
changelog = changelog2dict(releases(*entries, **kwargs))
snapshot = dict(changelog)
err = "Got unexpected contents for {}: wanted {}, got {}"
err += "\nFull changelog: {!r}\n"
for rel, issues in six.iteritems(release_map):
found = changelog.pop(rel)
msg = err.format(rel, issues, found, snapshot)
assert set(found) == set(issues), msg
# Sanity: ensure no leftover issue lists exist (empty ones are OK)
for key in list(changelog.keys()):
if not changelog[key]:
del changelog[key]
assert not changelog, "Found leftovers: {}".format(changelog)
| bsd-2-clause | 957,454,023,392,588,300 | 27.435484 | 79 | 0.621384 | false | 3.609007 | false | false | false |
Crespo911/pyspace | pySPACE/missions/operations/shuffle.py | 3 | 13660 | """ Take combinations of datasets in the summary for training and test each
The input of this
operation has to contain several comparable datasets of the same type.
Depending on whether the input datasets contain split data, the behavior
of this operation differs slightly.
.. note:: This operation creates an output directory with links,
not duplicated files!
If the input datasets are not split, the result of this operation
contains one dataset for every pair of datasets of the *input_path*.
For instance, if the input consists of the three datasets "A", "B",
"C", the result will at least contain the 6 datasets "A_vs_B",
"A_vs_C", "B_vs_A", "B_vs_C, "C_vs_A", "C_vs_B". The result dataset "A_vs_B"
uses the feature vectors from dataset "A" as training data and the feature
vectors from dataset "B" as test data.
If the input datasets contain split data, additionally the input
datasets are copied to the result directory so that this would contain
9 datasets. The dataset "X_vs_Y" contains the train data from dataset X
from the respective split for training and the test data from dataset Y for
testing.
A typical operation specification file might look like this
Specification file Parameters
+++++++++++++++++++++++++++++
type
----
Has to be set to *shuffle* to use this operation!
(*obligatory, shuffle*)
input_path
----------
Location of the input data
(*obligatory*)
dataset_constraints
-------------------
Optionally, constraints can be passed to the operation that specify which
datasets are combined based on the dataset name. For instance, the
constraint
'"%(dataset_name1)s".strip("}{").split("}{")[1:] == "%(dataset_name2)s".strip("}{").split("}{")[1:]'
would cause that only datasets are combined,
that were created by the same
preprocessing with the same parameterization.
(*optional, default: []*)
Exemplary Call
++++++++++++++
.. code-block:: yaml
type: shuffle
input_path: "operation_results/2009_8_13_15_8_57"
dataset_constraints:
# Combine only datasets that have been created using the same parameterization
- '"%(dataset_name1)s".strip("}{").split("}{")[1:] == "%(dataset_name2)s".strip("}{").split("}{")[1:]'
"""
import sys
import os
import glob
import time
import yaml
if sys.version_info[0] == 2 and sys.version_info[1] < 6:
import processing
else:
import multiprocessing as processing
import logging
import pySPACE
from pySPACE.missions.operations.base import Operation, Process
from pySPACE.tools.filesystem import create_directory
from pySPACE.resources.dataset_defs.base import BaseDataset
from pySPACE.tools.filesystem import get_author
class ShuffleOperation(Operation):
""" Forwards processing to process
.. todo:: Precalculate one process for each shuffling
"""
def __init__(self, processes, operation_spec, result_directory,
number_processes, create_process=None):
super(ShuffleOperation, self).__init__(processes, operation_spec,
result_directory)
self.number_processes = number_processes
self.create_process = create_process
@classmethod
def create(cls, operation_spec, result_directory, debug = False, input_paths=[]):
""" Factory method that creates a ShuffleOperation
A factory method that creates a ShuffleOperation based on the
information given in the operation specification operation_spec
"""
assert(operation_spec["type"] == "shuffle")
# Determine constraints on datasets that are combined
dataset_constraints = []
if "dataset_constraints" in operation_spec:
dataset_constraints.extend(operation_spec["dataset_constraints"])
# Create the ShuffleProcess (shuffling is not distributed over different
# processes)
number_processes = 1
processes = processing.Queue()
cls._createProcesses(processes, result_directory, input_paths,
dataset_constraints)
# create and return the shuffle operation object
return cls(processes, operation_spec, result_directory, number_processes)
@classmethod
def _createProcesses(cls, processes, result_directory, input_datasets,
dataset_constraints):
"""Function that creates the shuffle process.
Create the ShuffleProcess (shuffling is not distributed over different
processes)
"""
# Create the shuffle process and put it in the execution queue
processes.put(ShuffleProcess(input_datasets, result_directory,
dataset_constraints))
# give executing process the sign that creation is now finished
processes.put(False)
def consolidate(self):
""" Consolidation of the operation's results """
# Just do nothing
pass
class ShuffleProcess(Process):
""" The shuffle process
Combines datasets that fulfill all *dataset_constraints*
"""
def __init__(self, input_dataset, result_directory, dataset_constraints):
super(ShuffleProcess, self).__init__()
self.input_datasets = input_dataset
self.result_directory = result_directory
self.dataset_constraints = dataset_constraints
def __call__(self):
""" Executes this process on the respective modality """
############## Prepare benchmarking ##############
super(ShuffleProcess, self).pre_benchmarking()
for dataset_dir1 in self.input_datasets:
for dataset_dir2 in self.input_datasets:
dataset_name1 = dataset_dir1.split(os.sep)[-2]
dataset_name2 = dataset_dir2.split(os.sep)[-2]
# Check if the input data is split
splitted = len(glob.glob(os.sep.join([dataset_dir1, "data_run0",
"*"]))) > 1
# Check that all constraints are fulfilled for this pair of
# input datasets
if not all(eval(constraint_template % {'dataset_name1': dataset_name1,
'dataset_name2': dataset_name2})
for constraint_template in self.dataset_constraints):
continue
if dataset_name1 == dataset_name2:
if splitted:
# Copy the data
os.symlink(dataset_dir1,
os.sep.join([self.result_directory,
dataset_name1]))
continue
# Determine names of the original data sets the input
# datasets are based on
base_dataset1 = dataset_name1.strip("}{").split("}{")[0]
base_dataset2 = dataset_name2.strip("}{").split("}{")[0]
# Determine target dataset name and create directory
# for it
mixed_base_dataset = "%s_vs_%s" % (base_dataset1,
base_dataset2)
target_dataset_name = dataset_name1.replace(base_dataset1,
mixed_base_dataset)
target_dataset_dir = os.sep.join([self.result_directory,
target_dataset_name])
create_directory(os.sep.join([target_dataset_dir, "data_run0"]))
if splitted:
# For each split, copy the train data from dataset 1 and
# the test data from dataset 2 to the target dataset
for source_train_file_name in glob.glob(os.sep.join([dataset_dir1,
"data_run0",
"*_sp*_train.*"])):
# TODO: We have $n$ train sets and $n$ test sets, we "metadata.yaml"])),
# could use all $n*n$ combinations
target_train_file_name = source_train_file_name.replace(dataset_dir1,
target_dataset_dir)
if source_train_file_name.endswith("arff"):
self._copy_arff_file(source_train_file_name,
target_train_file_name,
base_dataset1,
mixed_base_dataset)
else:
os.symlink(source_train_file_name,
target_train_file_name)
source_test_file_name = source_train_file_name.replace(dataset_dir1,
dataset_dir2)
source_test_file_name = source_test_file_name.replace("train.",
"test.")
target_test_file_name = target_train_file_name.replace("train.",
"test.")
if source_train_file_name.endswith("arff"):
self._copy_arff_file(source_test_file_name,
target_test_file_name,
base_dataset2,
mixed_base_dataset)
else:
os.symlink(source_test_file_name,
target_test_file_name)
else:
# Use the data set from dataset 1 as training set and
# the data set from dataset 2 as test data
for source_train_file_name in glob.glob(os.sep.join([dataset_dir1,
"data_run0",
"*_sp*_test.*"])):
target_train_file_name = source_train_file_name.replace("test.",
"train.")
target_train_file_name = target_train_file_name.replace(dataset_dir1,
target_dataset_dir)
if source_train_file_name.endswith("arff"):
self._copy_arff_file(source_train_file_name,
target_train_file_name,
base_dataset1,
mixed_base_dataset)
else:
os.symlink(source_train_file_name,
target_train_file_name)
source_test_file_name = source_train_file_name.replace(dataset_dir1,
dataset_dir2)
target_test_file_name = target_train_file_name.replace("train.",
"test.")
if source_train_file_name.endswith("arff"):
self._copy_arff_file(source_test_file_name,
target_test_file_name,
base_dataset2,
mixed_base_dataset)
else:
os.symlink(source_test_file_name,
target_test_file_name)
# Write metadata.yaml based on input meta data
input_dataset1_meta = BaseDataset.load_meta_data(dataset_dir1)
output_dataset_meta = dict(input_dataset1_meta)
output_dataset_meta['train_test'] = True
output_dataset_meta['date'] = time.strftime("%Y%m%d_%H_%M_%S")
output_dataset_meta['author'] = get_author()
BaseDataset.store_meta_data(target_dataset_dir,output_dataset_meta)
############## Clean up after benchmarking ##############
super(ShuffleProcess, self).post_benchmarking()
def _copy_arff_file(self, input_arff_file_name, target_arff_file_name,
input_dataset_name, target_dataset_name):
""" Copy the arff files and adjust the relation name in the arff file"""
file = open(input_arff_file_name, 'r')
content = file.readlines()
file.close()
content[0] = content[0].replace(input_dataset_name,
target_dataset_name)
file = open(target_arff_file_name, 'w')
file.writelines(content)
file.close()
| gpl-3.0 | 2,813,379,751,357,658,600 | 45.148649 | 114 | 0.49978 | false | 5.07618 | true | false | false |
tinytunafish/PING-Mailbox | Mailbox.py | 1 | 1251 | #!/usr/bin/env python3
import os
import pingTracker
from time import strftime
#Set all variables here
API = "o.fdUi5rBICoNHmCf02ANsN5evuPvmZJDv"
pingTrack = pingTracker.pingTracker()
def addToDayCounter( dayArgs):
if dayArgs == "Monday":
pingTrack.addToMonday()
if dayArgs == "Tuesday":
pingTrack.addToTuesday()
if dayArgs == "Wednesday":
pingTrack.addToWednesday()
if dayArgs == "Thursday":
pingTrack.addToThursday()
if dayArgs == "Friday":
pingTrack.addToThursday()
if dayArgs == "Saturday":
pingTrack.addToSaturday()
if dayArgs == "Sunday":
pingTrack.addToSunday()
def pushSummary():
TTL = "Mailbox Alert: Delivery Summary"
MSG = pingTrack.statMostDay()
osCMD = "sh /home/pi/PING-Mailbox/PingPush.sh '%s' '%s' '%s'" % (API, TTL, MSG)
os.system(osCMD)
pingTrack.resetStats()
def pushMsg():
time = strftime("%I:%M %p")
day = strftime("%A")
TTL = "Mailbox Alert: You have recieved a package"
MSG = "You have recieved a package at %s on %s" % (time, day)
osCMD = "sh /home/pi/PING-Mailbox/PingPush.sh '%s' '%s' '%s'" % (API,TTL, MSG)
os.system(osCMD)
addToDayCounter(day)
#TODO: Save how many and at what time each package arrived
#TODO: After 5 packages delivered, send logistic summary notification
| mit | -6,346,126,935,643,209,000 | 24.02 | 80 | 0.701039 | false | 2.64482 | false | false | false |
dchaplinsky/declarations.com.ua | declarations_site/catalog/management/commands/export_aggregated_csv.py | 1 | 2335 | from csv import DictWriter
from datetime import datetime
from elasticsearch_dsl.query import Q
from django.core.management.base import BaseCommand
from catalog.elastic_models import NACPDeclaration
AGGREGATED_FIELD_NAME = 'aggregated'
class Command(BaseCommand):
help = 'Export aggregated values from NACP declarations '
'(annual only with corrected declarations resolved by default) into CSV format'
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def add_arguments(self, parser):
parser.add_argument(
'--export_all',
dest='export_all',
default=False,
action='store_true',
help='Export all declarations (all types, both corrected and originals)',
)
parser.add_argument(
'--filter_future_declarations',
dest='filter_future_declarations',
default=False,
action='store_true',
help='Export only declarations submitted for previous years',
)
parser.add_argument(
'destination',
help='Path to csv file',
)
def handle(self, *args, **options):
to_export = NACPDeclaration.search().source(
include=[AGGREGATED_FIELD_NAME]).query("exists", field=AGGREGATED_FIELD_NAME)
if not options["export_all"]:
to_export = to_export.query(
"bool",
must=[Q("term", intro__doc_type="Щорічна")],
must_not=[Q("exists", field="corrected_declarations")]
)
if options["filter_future_declarations"]:
to_export = to_export.query(
"range",
intro__declaration_year={
"lt": datetime.now().year
}
)
w = None
with open(options["destination"], "w") as fp:
for i, d in enumerate(to_export.scan()):
row = d[AGGREGATED_FIELD_NAME].to_dict()
row['id'] = d.meta.id
if not w:
w = DictWriter(fp, fieldnames=row.keys())
w.writeheader()
w.writerow(row)
if i % 10000 == 0 and i:
self.stdout.write("{} declarations exported".format(i))
| mit | 536,416,296,476,105,340 | 31.333333 | 89 | 0.54811 | false | 4.400756 | false | false | false |
LeiDai/meep_metamaterials | effparam.py | 1 | 46957 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""#{{{
Plots reflection and transmission of a metamaterial structure
Tries to calculate its effective parameters [Smith2002], avoiding branch jumps
Enables to save to several types of output (cartesian graphs, polar graphs, nice PDF graphs...)
Exports the effective parameters to another data file for further processing
About this script:
* Written in 2012-13 by Filip Dominec (dominecf at the server of fzu.cz).
* Being distributed under the GPL license, this script is free as speech after five beers.
* You are encouraged to use and modify it as you need. Feel free to write me if needed.
* Hereby I thank to the MEEP/python_meep authors and people of meep mailing list who helped me a lot.
TODOs:
* Guess the correct branch for N (using Kramers-Kronig relations?)
* Fix the passivity criterion for Im N > 0, Re Z > 0
"""
import numpy as np
import sys, os, re, matplotlib
matplotlib.use('Agg') ## Enable plotting even in the GNU screen session?
import matplotlib.pyplot as plt
plt.ioff() ## is this useful?
from scipy.optimize import fsolve, fmin
c = 2.99792458e8 # speed of light
## == User settings for postprocessing and plotting ==
frequnit, frequnitname = 1e12, "THz"
#}}}
N_init_branch = -3
N_init_sign = 1
autocorrect_signs = True
Z_init_sign = -1
check_hilbert = 1 ## Verifies if Kramers-Kronig relations hold for N ###XXX
legend_enable = 0
brillouin_boundaries = 1 ## Plots thin lines where the N would exceed the allowed
## range for 0-th Bloch mode
autobranch = 0
plot_publi = 0 ## prepares nice small graphs for publication
savedat = 1 ## saves eff params to PKGraph-compatible ascii file
savedat_wd = 1 ## uses the working directory to save the eff params
plot_polar = 0 ## plots them to polar graphs for diagnostics
plot_bands = 0 ## plots them to k-omega graphs for diagnostics
plot_expe = 1 ## if 'r.dat', 't.dat', 'N.dat', 'Z.dat', 'eps.dat' or 'mu.dat' available, overlay them
find_plasma_frequency = 0 ## find frequencies where epsilon crosses zero
plot_freq_min = None
#plot_freq_max = None
plot_freq_max = 3e12
padding = None
np.seterr(all='ignore') ## do not print warnings for negative-number logarithms etc.
## == </user settings> ==
## == Auxiliary functions ==
def get_simulation_name(argindex=1): #{{{
"""Get the name of the last simulation run.
Priority: 1) parameter, 2) last_simulation_name.txt, 3) working directory"""
cwd = os.getcwd()
if len(sys.argv)>argindex and sys.argv[argindex] != "-" and __name__ == "__main__":
print "Parameter passed:", sys.argv[argindex]
last_simulation_name = sys.argv[argindex]
elif os.path.exists(os.path.join(cwd, 'last_simulation_name.txt')):
print "Loading from", os.path.join(cwd, 'last_simulation_name.txt')
last_simulation_name = os.path.join(cwd, open(os.path.join(cwd, 'last_simulation_name.txt'),'r').read().strip())
else:
print "Error: No input file provided and 'last_simulation_name.txt' not found!"
last_simulation_name = cwd
if (last_simulation_name[-4:] == ".dat"): last_simulation_name = last_simulation_name[:-4] # strip the .dat extension
return last_simulation_name
#}}}
def load_rt(filename, layer_thickness=None, plot_freq_min=None, plot_freq_max=None, truncate=True, padding=None): #{{{
""" Loads the reflection and transmission spectra and simulation settings
Returns:
* frequency axis
* reflection s11 and transmission s12 as complex np arrays
Compatible with the PKGraph text data file with polar data:
* parameters in header like: #param name,value
* column identification like: #column Ydata
* data columns in ascii separated by space
Expects polar data with columns: frequency, s11 ampli, s11 phase, s12 ampli, s12 phase
"""
with open(filename+'.dat') as datafile:
for line in datafile:
if line[0:1] in "0123456789": break # end of file header
value = line.replace(",", " ").split()[-1] # the value of the parameter will be separated by space or comma
if ("layer_thickness" in line) and (layer_thickness == None): d = float(value)
if ("plot_freq_min" in line) and (plot_freq_min == None): plot_freq_min = float(value)
if ("plot_freq_max" in line) and (plot_freq_max == None): plot_freq_max = float(value)
if ("param padding" in line) and (padding == None): padding = float(value)
xlim = (plot_freq_min, plot_freq_max)
(freq, s11amp, s11phase, s12amp, s12phase) = \
map(lambda a: np.array(a, ndmin=1), np.loadtxt(filename+".dat", unpack=True))
## Limit the frequency range to what will be plotted (recommended)
if truncate and len(freq)>1:
(d0,d1) = np.interp((plot_freq_min, plot_freq_max), freq, range(len(freq)))
(freq, s11amp, s11phase, s12amp, s12phase) = \
map(lambda a: a[int(d0):int(d1)], (freq, s11amp, s11phase, s12amp, s12phase))
return freq, s11amp, s11phase, s12amp, s12phase, xlim, (d, plot_freq_min, plot_freq_max, padding)
#}}}
def shiftmp(freq, s11, shiftplanes):#{{{
""" Adjusts the reflection phase like if the monitor planes were not centered.
For symmetric metamaterial cell, this function is not needed. The symmetry requires that
the monitor planes in front of and behind the mm cell are centered.
However, for an asymmetric metamaterial, the correct position has to be found. Otherwise
the Fresnel inversion gives negative imaginary part of N and/or negative real part of Z, which
is quite spooky for passive medium.
Even such metamaterials, however, may be properly homogenized if we define the
position of monitor planes as a function of frequency. We can assume that:
1) This optimum shift shall hold for all simulations with one or more unit cells.
2) When the wave direction is reversed (to get s21, s22 parameters), the shift should be negated.
These rules should enable us to homogenize any asymmetric non-chiral metamaterial.
Note that this shifting is still an experimental technique and has to be tested out thoroughly.
"""
return np.array(s11) * np.exp(1j*np.array(shiftplanes)/(c/freq) * 2*np.pi * 2)
#}}}
def find_maxima(x, y, minimum_value=.1):#{{{
"""
Returns the x points where
1) y has a local maximum (i. e. dx/dy goes negative) AND
2) where y is above minimum_value
"""
d = y[1:-1] - y[0:-2] ## naïve first derivative
maxima = x[1:][np.sign(d[0:-2])-np.sign(d[1:-1]) + np.sign(y[2:-2]-minimum_value)==3]
return maxima
#}}}
def reasonable_ticks(a): #{{{
""" Define the grid and ticks a bit denser than by default """
x=np.trunc(np.log10(a)); y=a/10**x/10
return (10**x, 2*10**x,5*10**x)[np.int(3*y)]
#}}}
## == Homogenisation functions (processing whole np.array at once) ==
def polar2complex(amp, phase): return amp*np.exp(1j*phase) #{{{
#}}}
def unwrap_ofs(p, ofs):#{{{
""" Similar to np.unwrap, but take into account the initial offset.
Increment this offset if needed, and return it as the second return value.
"""
return np.unwrap(p)+ofs, (np.unwrap(p)-p)[-1]+ofs
#}}}
def rt2n(frequency, s11, s12, d, init_branch=0, init_sign=1, uo=[0,0,0,0]): #{{{
""" Invert Fresnel equations to obtain complex refractive index N, with autodetection of arccosine branch#{{{
Accepts:
* s11 - np.array of reflection,
* s12 - np.array of transmission,
* d - layer thickness
Returns: a tuple of three np.arrays
* the retrieved effective index of refraction,
* the arccos() branch used for its calculation,
* the debug information
Technical details are commented in the code.
This algorithm implements the method for effective refractive index retrieval from the
s11 and s12 scattering coefficients [Smith2002].
Such calculation is not unambiguous due to multiple branches of complex arccosine. If the branches
of the solution are not switched at proper frequencies, the index of refraction often becomes discontinuous
and generally wrong.
This function fixes this problem by the analysis of the arccos() behaviour. It requires that the r(f) and t(f)
are supplied as whole spectra. It is then possible to trace the argument passed to arccos() and properly choose
the correct branch for whole spectral region.
Limitations of this procedure:
* If structure is highly reflective at lowest frequencies (metallic wires etc.), the N branch cannot be determined
reliably. To fix this, increase 'plot_freq_min' (the start of computed frequency range), or provide init_branch.
Initial branch choosing is not implemented. Its value may be optionally provided in the argument init_branch and
init_sign. The user should choose theme so that the initial values for
i) the curves are continuous
ii) and: Im(N) > 0 (for a nonamplifying medium)
* The np.unwrap() function requires that the frequency is sampled fine enough. If the branch is wrongly detected
at sharp resonances, there are good news: You probably do not have to run the simulation longer; often is
sufficient to pad the time-domain data with zeroes.
* For some simulations, there is a weird _continuous_ branch transition at higher frequencies for thicker
metamaterial samples. The returned index of refraction breaks Kramers-Kronig relations.
However, the Hilbert transform of the imaginary part of N" gives proper data. Single-cell simulation also gives
proper data...
Putting the layers far apart alleviates this for 2 cells: can it be related to higher-order Bloch modes?
"""#}}}
## Argument passed to arccos():
arg = (1+0j-s11**2+s12**2)/2/(s12)
## Count passing through complex arccos() branch cuts in the complex plane:
lu, uo[0] = unwrap_ofs(np.angle(arg + 1. + 0e-3j) + np.pi, uo[0])
ru, uo[1] = unwrap_ofs(np.angle(arg - 1. + 0e-3j), uo[1])
lbc = np.floor(lu/2/np.pi)
rbc = np.floor(ru/2/np.pi)
anl = (-1)**(lbc) ## left cut: (-inf .. -1]
anr = (-1)**(rbc) ## right cut: [1 .. +inf)
## Retrieve the sign and branch of the arccos()
sign = anr*anl*init_sign
lbr, uo[2] = unwrap_ofs(np.angle(-anr + 1j*anl) + np.pi, uo[2])
rbr, uo[3] = unwrap_ofs(np.angle(+anr - 1j*anl) + np.pi, uo[3])
branch = np.floor(lbr/2/np.pi) + np.floor(rbr/2/np.pi) + 1 + init_branch
#branch = np.floor(np.unwrap(np.angle(rbc + 1j*lbc))/2/np.pi) + \
#np.floor(np.unwrap(np.angle(-rbc - 1j*lbc))/2/np.pi) + 1 + init_branch
## Standard Fresnel inversion:
k = 2*np.pi * frequency/c # the wave vector
N = np.conj((np.arccos(arg)*sign + 2*np.pi*branch) / (k*d))
#if abs(frequency[-1]-387.3e9)<1e9: ## debug
#print "f branch uo", frequency, branch, uo
return N, uo, (branch, sign, arg, anr, anl)
""" For diagnostics, you may also wish to plot these values:#{{{
#argLog = np.e**(1j*np.angle(arg))*np.log(1+abs(arg)) ## shrinked graph to see the topology
plt.plot(freq, arg.real, color="#aaaaaa", label=u"$arg$'", lw=1)
plt.plot(freq, arg.imag, color="#aaaaaa", label=u"$arg$'", lw=1, ls='--')
#plt.plot(freq, argLog.real, color="#000000", label=u"$arg$'", lw=1)
#plt.plot(freq, argLog.imag, color="#000000", label=u"$arg$'", lw=1, ls="--")
#plt.plot(freq, np.ones_like(freq)*np.log(2), color="#bbbbbb", label=u"$arg$'", lw=1)
#plt.plot(freq, -np.ones_like(freq)*np.log(2), color="#bbbbbb", label=u"$arg$'", lw=1)
#plt.plot(freq, anr, color="#aaaaff", label=u"$anr$'", lw=1)
#plt.plot(freq, anl, color="#aaffaa", label=u"$anr$'", lw=1)
#plt.plot(freq, anr_trunc, color="#0000ff", label=u"$anrR$'", lw=1)
#plt.plot(freq, anl_trunc*.9, color="#00dd00", label=u"$anrR$'", lw=1)
#plt.plot(freq, branch*.8, color="#dd0000", label=u"$turn$'", lw=2)
#plt.plot(freq, sign*.7, color="#ffbb00", label=u"$sign$'", lw=2)
"""#}}}
#}}}
def rt2z(s11, s12, init_sign=1, uo=0):#{{{
""" Invert Fresnel equations to obtain complex impedance Z
This function complements the refractive index obtained by rt2n() with the effective impedance.
The computation is much easier, because the only unambiguous function is the complex square root.
It allows two solutions differing by their sign. To prevent discontinuities, we calculate the
square root in polar notation.
Initial sign may be supplied by the user.
Returns complex impedance as np.array
"""
#def get_phase(complex_data):
#""" Unwraps and shifts the phase from Fourier transformation """
#if len(complex_data) <= 1: return np.angle(complex_data)
#phase, uo = unwrap,ofs(np.angle(complex_data), uo)
#center_phase = phase[min(5, len(phase)-1)] ## 5 is chosen to avoid zero freq.
#return phase-(round(center_phase/2/np.pi)*2*np.pi)
## Calculate square root arguments
Zarg1=((1+s11)**2 - s12**2)
Zarg2=((1-s11)**2 - s12**2)
## Get impedance from polar notation of (Zarg1/Zarg2)
Zamp = abs(Zarg1 / Zarg2)**.5 ## amplitude of square root
if hasattr(Zarg1, '__len__') and len(Zarg1)>1:
Zphase, uo = unwrap_ofs(np.angle(Zarg1/Zarg2), uo) ## phase of square root (without discontinuities) TODO
else:
Zphase = np.angle(Zarg1/Zarg2)
uo = 0
Z = np.conj(np.exp(1j*Zphase/2) * Zamp) * init_sign
return Z, uo
"""
### Possible experimental improvements:
EnforceZrePos = True
FlipZByPhaseMagic = True
Zrealflipper = 1 ## unphysical if not 1
Zconjugator = 1
## Exception to the Re(Z)>0 rule:
Z_turnaround = (-1)**np.round(Zphase/np.pi)
if FlipZByPhaseMagic:
Z = Z * Z_turnaround
## For testing only
Z = (Z.real * Zrealflipper + 1j*Z.imag * Zconjugator)
if EnforceZrePos:
Z *= np.sign(Z.real)
"""
#}}}
def nz2epsmu(N, Z):#{{{
""" Accepts index of refraction and impedance, returns effective permittivity and permeability"""
return N/Z, N*Z
#}}}
def epsmu2nz(eps, mu):#{{{
""" Accepts permittivity and permeability, returns effective index of refraction and impedance"""
N = np.sqrt(eps*mu)
N *= np.sign(N.imag)
Z = np.sqrt(mu / eps)
return N, Z
#}}}
def nz2rt(freq, N, Z, d):#{{{
""" Returns the complex reflection and transmission parameters for a metamaterial slab.
Useful for reverse calculation of eps and mu (to check results)
Accepts:
* frequency array,
* effective refractive index N,
* effective impedance Z,
* vacuum wave vector k and
* thickness d of the layer.
"""
## Direct derivation from infinite sum of internal reflections
k = 2*np.pi * freq/c # the wave vector
t1 = 2 / (1+Z) # transmission of front interface
t2 = 2*Z / (Z+1) # transmission of back interface
t1prime = Z*t1
r1=(Z-1)/(Z+1) # reflection of front interface
r2=(1-Z)/(1+Z) # reflection of back interface
s12 = t1*t2*np.exp(1j*k*N*d) / (1 + r1*r2*np.exp(2j*k*N*d))
s11 = r1 + t1prime*t1*r2*np.exp(2j*k*N*d)/(1+r1*r2*np.exp(2j*k*N*d))
return s11, s12
"""
Note: these results may be also re-expressed using goniometric functions.
Equations from Smith2002 or Cai-Shalaev, mathematically equivalent to those above
(only Smith's s11 has negative sign convention).
s12new = 1/(np.cos(N*k*d) - .5j*(Z+1/Z)*np.sin(N*k*d))
s11new = -s12new * .5j*(Z-1/Z)*np.sin(N*k*d)
TODO: implement also for other surrounding media than vacuum.
"""
#}}}
## == Auxiliary functions for monitor-plane fitting ==
def error_func(N1,Z1,N2,Z2,lastdif=0,p0=[0]):#{{{
""" Used for optimization: tries to match N1,N2 and Z1,Z2, avoiding forbidden values """
return abs(N1-N2) + abs(Z1-Z2) + \
lastdif + (abs(p0[0])*1e4)**2 +\
(abs(np.imag(N1))-np.imag(N1))*100 + (abs(np.imag(N2))-np.imag(N2))*100 + \
(abs(np.real(Z1))-np.real(Z1))*100 + (abs(np.real(Z2))-np.real(Z2))*100
#}}}
def eval_point(p0):#{{{
freq_p = freq[i-1:i+1]
s11p1 = shiftmp(freq[i-1:i+1], s11[i-1:i+1], p0[0])
s12p1 = s11[i-1:i+1]
new_N1, Nuo1x = rt2n(freq_p, s11p1, s12p1, d, init_branch=0, uo=Nuo1)[0:2]
new_Z1, Zuo1x = rt2z(s11p1, s12[i-1:i+1], uo=Zuo1)
s11p2 = shiftmp(freq[i-1:i+1], s11_2[i-1:i+1], p0[0])
s12p2 = s11_2[i-1:i+1]
new_N2, Nuo2x = rt2n(freq_p, s11p2, s12p2, d2, init_branch=0, uo=Nuo2)[0:2]
new_Z2, Zuo2x = rt2z(s11p2, s12[i-1:i+1], uo=Zuo2)
lastdif = abs(p0s[-1]-p0[0])*1e5 if (p0s[-1] != np.NaN) else 0
return error_func(new_N1[1], new_Z1[1], new_N2[1], new_Z2[1], lastdif=lastdif)
#}}}
## --- Calculation --------------------------------------------
## Get reflection and transmission data
last_simulation_name = get_simulation_name()
freq, s11amp, s11phase, s12amp, s12phase, xlim, (d, plot_freq_min, plot_freq_max, padding) = load_rt(last_simulation_name,
plot_freq_min=plot_freq_min, plot_freq_max=plot_freq_max, truncate=True, padding=padding)
## Compensating the additional padding of the monitor planes
s11 = shiftmp(freq, polar2complex(s11amp, s11phase), padding*np.ones_like(freq))
s12 = shiftmp(freq, polar2complex(s12amp, s12phase), padding*np.ones_like(freq))
## Calculate N, Z and try to correct the signs (TODO use K-K branch selection!)
if len(freq)>2:
N, N_uo, N_debug = rt2n(freq, s11, s12, d, init_branch=N_init_branch, init_sign=N_init_sign)
#print "N before correctio1", N[0:10]
Z, Z_uo = rt2z(s11, s12, init_sign=Z_init_sign)
if autocorrect_signs:
## Fix N sign so that N.imag > 0
if sum(np.clip(N.imag,-10., 10.))<0:
N *= -1
## Fix N branch so that N.real does not diverge at low frequencies
ii = 3
det_branch = np.round(2*np.real(N[ii]*freq[ii]/c*d))
#print "N before correction", N[0:10]
N -= det_branch / (freq/c*d)/2
#print "N after correction", N[0:10]
## Fixing Z sign so that Z.real > 0
#Z *= np.sign(Z.real)
if sum(np.clip(Z.real,-10., 10.))<0:
Z *= -1
#Z, Z_uo = rt2z(s11, s12, init_sign=Z_init_sign)
else:
N = np.zeros_like(freq)
Z = np.zeros_like(freq)
#}}}
## Detect resonances
losses = 1-abs(s11)**2-abs(s12)**2
loss_maxima = np.array(find_maxima(freq,losses))
print "Detected loss maxima at frequencies:", loss_maxima
np.savetxt("last_found_modes.dat", loss_maxima)
## Get epsilon and mu
eps, mu = nz2epsmu(N, Z)
## Verify the results by back-calculating s11, s12
s11backcalc, s12backcalc = nz2rt(freq, N, Z, d)
## Build the debug plots
arg = (1+0j-s11**2+s12**2)/2/(s12)
argLog = np.e**(1j*np.angle(arg))*np.log(1+abs(arg)) ## shrinked graph to see the topology
## --- Plotting to cartesian graphs -------------------------------------------- #{{{
plt.figure(figsize=(15,15))
xticks = np.arange(xlim[0], xlim[1], reasonable_ticks((xlim[1]-xlim[0])/3))
xnumbers = [("%.2f"%(f/frequnit) if abs(f%reasonable_ticks((xlim[1]-xlim[0])/5))<(frequnit/1000) else "") for f in xticks]
marker = "s" if (len(freq) < 20) else "" # Use point markers for short data files
subplot_number = 4
## Plot reflection and transmission amplitudes
plt.subplot(subplot_number, 1, 1)
plt.plot(freq, s11amp, marker=marker, color="#AA4A00", label=u'$|s_{11}|$')
plt.plot(freq, s12amp, marker=marker, color="#004AAA", label=u'$|s_{12}|$')
plt.plot(freq, losses, color="#aaaaaa", label=u'loss')
if plot_expe and os.path.exists('t.dat'):
tf, ty = np.loadtxt('t.dat', usecols=list(range(2)), unpack=True)
plt.plot(tf*frequnit, ty, lw=0, color='#004AAA', marker='o')
if plot_expe and os.path.exists('../t00kVcm_Comsol.dat'): ## XXX
tf, ty = np.loadtxt('../t00kVcm_Comsol.dat', usecols=list(range(2)), unpack=True)
plt.plot(tf*frequnit, ty, lw=0, color='#4A00AA', marker='o')
if plot_expe and os.path.exists('../t90kVcm_Comsol.dat'):
tf, ty = np.loadtxt('../t90kVcm_Comsol.dat', usecols=list(range(2)), unpack=True)
plt.plot(tf*frequnit, ty, lw=0, color='#00AA4A', marker='s')
plt.ylabel(u"Amplitude"); plt.ylim((-0.1,1.1)); plt.xlim(xlim)
#plt.xticks(xticks, xnumbers); plt.minorticks_on(); plt.grid(True)
if legend_enable: plt.legend(loc="upper right");
#for lm in loss_maxima: plt.axvspan(lm,lm+1e8, color='r')
## Plot r and t phase
# (Note: phase decreases with frequency, because meep uses the E=E0*exp(-i omega t) convention )
plt.subplot(subplot_number, 1, 2)
plt.plot(freq, np.unwrap(np.angle(s11))/np.pi, marker=marker, color="#AA4A00", label=u'$\\phi(s_{11})/\\pi$')
plt.plot(freq, np.unwrap(np.angle(s12))/np.pi, marker=marker, color="#004AAA", label=u'$\\phi(s_{12})/\\pi$')
#
#plt.plot(freq, np.unwrap(np.angle(s12))/np.pi + np.unwrap(np.angle(s11))/np.pi, marker=marker, color="#888AAA", label=u'$(\\phi(s_{11})+\\phi(s_{11}))/\\pi$')
#plt.plot(freq, np.unwrap(np.angle(s12))/np.pi - np.unwrap(np.angle(s11))/np.pi, marker=marker, color="#AA8A88", label=u'$(\\phi(s_{11})-\\phi(s_{11}))/\\pi$')
#plt.plot(freq, 2*np.unwrap(np.angle(s12))/np.pi + np.unwrap(np.angle(s11))/np.pi, marker=marker, color="#8A88AA", label=u'$(2\\phi(s_{11})+\\phi(s_{11}))/\\pi$')
#plt.plot(freq, 2*np.unwrap(np.angle(s12))/np.pi - np.unwrap(np.angle(s11))/np.pi, marker=marker, color="#8AAA88", label=u'$(2\\phi(s_{11})-\\phi(s_{11}))/\\pi$')
#plt.plot(freq, np.unwrap(np.angle(s12))/np.pi + 2*np.unwrap(np.angle(s11))/np.pi, marker=marker, color="#88AA8A", label=u'$(\\phi(s_{11})+2\\phi(s_{11}))/\\pi$')
#plt.plot(freq, np.unwrap(np.angle(s12))/np.pi - 2*np.unwrap(np.angle(s11))/np.pi, marker=marker, color="#AA888A", label=u'$(\\phi(s_{11})-2\\phi(s_{11}))/\\pi$')
# Optional: debugging curves(branch, sign, arg, anr, anl)
if len(freq)>2:
#plt.plot(freq, N_debug[0]*.95, color="#dd0000", label=u"$br$", lw=1.6)
#plt.plot(freq, N_debug[1]*.90, color="#dd8800", label=u"$si$", lw=1.6)
#plt.plot(freq, N_debug[2].real, color="#00dd00", label=u"$arg^'$", lw=.6, ls='-')
#plt.plot(freq, N_debug[2].imag, color="#00dd00", label=u"$arg^{''}$", lw=.6, ls='--')
#plt.plot(freq, np.sign(N_debug[2].imag), color="#008800", label=u"sign$arg^{''}$", lw=.3, ls='-')
#plt.plot(freq, np.arccos(N_debug[2]).real, color="#0000dd", label=u"arccos$arg^'$", lw=1.6, ls='-')
#plt.plot(freq, np.log10(np.pi-np.arccos(N_debug[2]).real), color="#0000dd", label=u"arccos$arg^'$", lw=.6, ls='-')
#plt.plot(freq, np.arccos(N_debug[2]).imag, color="#0000dd", label=u"arccos$arg^{''}$", lw=1.6, ls='--')
#plt.plot(freq, np.log10(abs(N_debug[2].imag)), color="#000000", label=u"log$arg^{''}$", lw=.6, ls='--')
#plt.plot(freq, abs(N_debug[2] - (1+0j)), color="#0088dd", label=u"$|arg-1|$", lw=2, ls='-')
#plt.plot(freq, abs(N_debug[2] + (1+0j)), color="#8800dd", label=u"$|arg+1|$", lw=2, ls='-')
#plt.plot(freq, np.log10(abs(N_debug[2] - (1+0j))), color="#0088dd", label=u"", lw=1, ls='-')
#plt.plot(freq, np.log10(abs(N_debug[2] + (1+0j))), color="#8800dd", label=u"", lw=1, ls='-')
#plt.plot(freq, np.sign(N_debug[2].imag), color="#00dd00", label=u"$sgn arg^{''}$", lw=.6, ls=':')
plt.plot(freq, -np.ones_like(freq), color="k", label=u"", lw=.3, ls='-')
plt.plot(freq, np.ones_like(freq), color="k", label=u"", lw=.3, ls='-')
if autobranch:
# Detection of key points in the spectrum (PBG boundaries, branch skips etc.)
def find_maxima(x, y, minimum_value=.1):
"""
Returns the x points where
1) y has a local maximum (i. e. dx/dy goes negative) AND
2) where y is above minimum_value
"""
d = y[1:-1] - y[0:-2] ## naïve first derivative
maxima = x[1:][np.sign(d[0:-2])-np.sign(d[1:-1]) + np.sign(y[2:-2]-minimum_value)==3]
return maxima
def find_maxima_indices(x, y, minimum_value=.1):
"""
Returns the x points where
1) y has a local maximum (i. e. dx/dy goes negative) AND
2) where y is above minimum_value
"""
d = y[1:-1] - y[0:-2] ## naïve first derivative
maximai = np.arange(1,len(x), dtype=np.dtype(np.int16))[np.sign(d[0:-2])-np.sign(d[1:-1]) + np.sign(y[2:-2]-minimum_value)==3]
return maximai
argPmin = find_maxima_indices(freq, -abs(N_debug[2] - (1+0j)), minimum_value=-np.inf)
argNmin = find_maxima_indices(freq, -abs(N_debug[2] + (1+0j)), minimum_value=-np.inf)
## (todo) check: maybe required, maybe not
#argNmax = find_maxima_indices(freq, abs(N_debug[2] + (1+0j)), minimum_value=-np.inf)
#plt.plot(freq[argNmax], np.zeros_like(argNmax), marker='o', color="#dd0000")
#allindices = np.hstack([np.array([0]), argPmin, argNmin, argNmax])
## Concatenate & sort all indices of interesting points
allindices = np.hstack([np.array([0]), argPmin, argNmin])
allindices.sort()
## Remove duplicate indices
allindices = np.hstack([allindices[0], [x[0] for x in zip(allindices[1:],allindices[:-1]) if x[0]!=x[1]]])
plt.plot(freq[allindices], np.zeros_like(allindices), marker='x', color="k")
## Scan through all photonic bands/bandgaps, seleting the correct N branch
print 'allindices', allindices
#N_init_branch = 0
print 'N_init_sign', N_init_sign
#N_init_sign = -1
#pN_uo = [0,0,0,0]
pN_uo = [2*np.pi,2*np.pi,2*np.pi,0]
det_branch = 0
#for i in [0]: ## whole spectrum
#i1 = 0
#i2 = len(freq)-1
for i in range(len(allindices)-1): ## spectrum by chunks
for q in (0,1):
if q==0:
print 'LONG ',
i1 = allindices[i]
i2 = allindices[i+1]-1
#i2 = allindices[i+1]+1 ## .. works for 'q in [0]'
else:
print 'SHORT',
i1 = allindices[i+1]-1
i2 = allindices[i+1]+1
if i1>=i2: continue
pfreq = freq[i1:i2]
if not q and pfreq[0] > 600e9: break
pts = np.arange(10000)[i1:i2]; print pts[0], pts[-1],; print pfreq[0]/1e9,
ps11 = s11[i1:i2]
ps12 = s12[i1:i2]
print 'start=', np.array(pN_uo)/np.pi,
## Plot oldschool N
pN_uo_old = pN_uo
pN, pN_uo, pN_debug = rt2n(pfreq, ps11, ps12, d, init_branch=N_init_branch, init_sign=N_init_sign, uo=pN_uo)
#if q!=0: pN_uo = pN_uo_old
print 'end=', np.array(pN_uo)/np.pi
if i == 0:
try:
#print len(pN)
ii = 0
det_branch = np.round(2*np.real(pN[ii]*freq[ii]/c*d))
#print 'det_branch', det_branch
except:
pass
#print "N before correction", N[0:10]
pN -= det_branch / (pfreq/c*d)/2
plt.plot(pfreq, pN.real, lw=1.2, marker='o', markersize=2)
#plt.plot(pfreq, pN.imag, lw=.8, ls='--')
## Plot oldschool UO
#plt.plot(pfreq, np.ones_like(3pfreq)*pN_uo_old[0]/10, lw=3, c='#8888ff')
#plt.plot(pfreq, np.ones_like(pfreq)*pN_uo_old[1]/10, lw=3, c='#88ff88', ls='-')
#plt.plot(pfreq, np.ones_like(pfreq)*pN_uo_old[2]/10, lw=3, c='#ff8888', ls='-')
#plt.plot(pfreq, np.ones_like(pfreq)*pN_uo_old[3]/10, lw=3, c='#88ffff', ls='-')
plt.ylabel(u"Phase"); None
plt.ylim((-15,15))
plt.xlim(xlim) # XXX
#plt.xlim((00e9, 440e9))
plt.xticks(xticks, xnumbers); plt.minorticks_on(); plt.grid(True)
if legend_enable: plt.legend();
## Plot Z, N and figure-of-merit
plt.subplot(subplot_number, 1, 3)
if brillouin_boundaries:
for i in range(1,4):
plt.plot(freq, c/(2*freq*d)*i, color="#000000", label=u'', ls='-', lw=.5, alpha=.5)
plt.plot(freq, -c/(2*freq*d)*i, color="#000000", label=u'', ls='-', lw=.5, alpha=.5)
if check_hilbert and len(freq)>1:
import scipy.fftpack
N[0] = N[1] ## avoid NaN
#np.kaiser(len(N), 5)
N_KK = scipy.fftpack.hilbert(N.real + 1j*abs(N.imag)) / 1j
plt.plot(freq, np.real(N_KK), color="#FF9900", label=u"$N^{'}_{KK}$", alpha=1)
plt.plot(freq, np.imag(N_KK), color="#FF9900", label=u'$N^{''}_{KK}$', ls='--', alpha=1)
plt.plot(freq, np.real(N_KK)-np.real(N), color="#99FF00", label=u"$\\Delta N^{'}_{KK}$", alpha=.5)
plt.plot(freq, np.imag(N_KK)-np.imag(N), color="#99FF00", label=u'$\\Delta N^{''}_{KK}$', ls='--', alpha=.5)
Z[0] = Z[1]
Z_KK = scipy.fftpack.hilbert(Z.real + 1j*Z.imag) / 1j ## Why minus needed?
#plt.plot(freq, np.real(Z_KK), color="#0099FF", label=u"$Z^{'}_{KK}$", alpha=.3)
#plt.plot(freq, np.imag(Z_KK), color="#4499FF", label=u'$Z^{''}_{KK}$', ls='--', alpha=.3)
DZr = np.real(Z_KK)-np.real(Z)
DZi = np.imag(Z_KK)-np.imag(Z)
#plt.plot(freq, DZr, color="#DDDD00", label=u"$\\Delta Z^{'}_{KK}$", alpha=.3)
#plt.plot(freq, DZi, color="#DDDD44", label=u'$\\Delta Z^{''}_{KK}$', ls='--', alpha=.3)
#plt.plot(freq[1:], (DZr[1:]+DZr[:-1])/2, color="#DDDD00", label=u"$\\Delta Z^{'}_{KK}$", alpha=.31)
#plt.plot(freq[1:], (DZi[1:]+DZi[:-1])/2, color="#DDDD44", label=u'$\\Delta Z^{''}_{KK}$', ls='--', alpha=.31)
plt.plot(freq, np.real(N), color="#33AA00", label=u"$N$'")
plt.plot(freq, np.imag(N), color="#33AA33", label=u'$N$"', ls='--')
plt.plot(freq, np.real(Z), color="#0044DD", label=u"$Z$'")
plt.plot(freq, np.imag(Z), color="#4466DD", label=u'$Z$"', ls='--')
plt.plot(freq, np.log(-(np.real(N)/np.imag(N)))/np.log(10),
color="#FF9922", ls=":", label=u"$N^'<0$ FOM")
plt.plot(freq, np.log((np.real(N)/np.imag(N)))/np.log(10), \
color="#BB22FF", ls=":", label=u"$N^'>0$ FOM")
plt.ylabel(u"Value");
plt.ylim((-5., 15.));
plt.xlim(xlim);
plt.xticks(xticks, xnumbers); plt.minorticks_on(); plt.grid(True)
if legend_enable: plt.legend();
## 4) Plot epsilon and mu
plt.subplot(subplot_number, 1, 4)
if find_plasma_frequency:
try:
from scipy.optimize import fsolve
x, y = freq, eps.real
estimates = x[np.where(np.diff(np.sign(y)))[0]]
print "Plasma frequency (eps=0) at:", fsolve(lambda x0: np.interp(x0, x, y), estimates)
except:
print "Plasma frequency (epsilon(f) == 0) detection failed"
plt.xlabel(u"Frequency [%s]" % frequnitname)
if plot_expe and os.path.exists('eps.dat'):
tf, ty = np.loadtxt('eps.dat', usecols=list(range(2)), unpack=True)
plt.plot(tf*frequnit, ty, lw=0, color='#AA0088', marker='o') ## XXX
plt.plot(tf*frequnit, -ty, lw=0, color='#AA8888', marker='s') ## XXX
#plt.plot(tf , ty, lw=0, color='#AA0088', marker='o') ## XXX
if plot_expe and os.path.exists('mu.dat'):
tf, ty = np.loadtxt('mu.dat', usecols=list(range(2)), unpack=True)
plt.plot(tf*frequnit, ty, lw=0, color='#AA8800', marker='o') ## XXX
plt.plot(tf*frequnit, -ty, lw=0, color='#AA8888', marker='s') ## XXX
#plt.plot(tf , ty, lw=0, color='#AA0088', marker='o') ## XXX
if check_hilbert and len(freq)>1:
import scipy.fftpack
eps[0] = 0 ## avoid NaN
eps_KK = scipy.fftpack.hilbert(eps.real + 1j*abs(eps.imag)) / 1j
plt.plot(freq, np.real(eps_KK), color="#FF9900", label=u"$eps^{'}_{KK}$", alpha=.5)
plt.plot(freq, np.imag(eps_KK), color="#FF9900", label=u'$eps^{''}_{KK}$', ls='--', alpha=.5)
plt.plot(freq, np.real(eps_KK)-np.real(eps), color="#FF0099", label=u"$eps^{'}_{KK}$", alpha=.5)
plt.plot(freq, np.imag(eps_KK)-np.imag(eps), color="#FF0099", label=u'$eps^{''}_{KK}$', ls='--', alpha=.5)
mu[0] = 0
mu_KK = scipy.fftpack.hilbert(N.real + 1j*abs(N.imag)) / 1j
plt.plot(freq, np.real(mu_KK), color="#0099FF", label=u"$mu^{'}_{KK}$", alpha=.5)
plt.plot(freq, np.imag(mu_KK), color="#4499FF", label=u'$mu^{''}_{KK}$', ls='--', alpha=.5)
plt.plot(freq, np.real(mu_KK)-np.real(mu), color="#0099FF", label=u"$mu^{'}_{KK}$", alpha=.5)
plt.plot(freq, np.imag(mu_KK)-np.imag(mu), color="#4499FF", label=u'$mu^{''}_{KK}$', ls='--', alpha=.5)
plt.plot(freq, np.real(eps), color="#AA0088", label=u"$\\varepsilon_{eff}$'")
plt.plot(freq, np.imag(eps), color="#AA66DD", label=u'$\\varepsilon_{eff}$"', ls='--')
plt.plot(freq, np.real(mu), color="#AA8800", label=u"$\\mu_{eff}$'")
plt.plot(freq, np.imag(mu), color="#AA8844", label=u'$\\mu_{eff}$"', ls='--')
plt.ylabel(u"Value"); plt.ylim((-1000.,1000.)); plt.yscale('symlog', linthreshy=10.); plt.xlim(xlim)
#plt.xticks(xticks, xnumbers); plt.minorticks_on(); plt.grid(True)
if legend_enable: plt.legend();
## 5) Verification of calculated data by calculating reflection and transmission again
plt.subplot(subplot_number, 1, 1)
plt.plot(freq, abs(s11backcalc), color="#FA9962", label=u'$|s_{11FD}|$', ls='--')
plt.plot(freq, abs(s12backcalc), color="#6299FA", label=u'$|s_{12FD}|$', ls='--')
plt.xticks(xticks, xnumbers); plt.minorticks_on(); plt.grid(1)
plt.xlim(xlim); plt.ylim((-0.1,1.1))
## Final plotting
plt.savefig(last_simulation_name+".png", bbox_inches='tight')
#}}}
## --- Plotting to k-omega graph -------------------------------------------- #{{{
if plot_bands and not os.path.exists("band"): os.mkdir("band")
if plot_bands and os.path.isdir("band"):
plt.figure(figsize=(8,8))
plt.plot(np.arcsin(np.sin(np.real(N*freq*d/c) * np.pi)) / np.pi, freq, color="#33AA00", label=u"$k$'")
plt.plot(np.imag(N*freq*d/c), freq, color="#33AA33", label=u'$\\kappa$', ls='--')
## Detection of bandgap: ratio of the real to the imaginary part of complex wavenumber
## the real part however may reach borders of Brillouin zone: we will use its sine
try:
realpart = np.arcsin(np.sin(np.pi * 2*np.real(N*freq/c*d)))
imagpart = np.abs(np.imag(N*freq/c*d))
pbg_indicator = np.sign(abs(realpart) - abs(imagpart))
## starts and ends of band-gap
pbg_starts = np.interp(np.where(pbg_indicator[1:] < pbg_indicator[0:-1]), range(len(freq)), freq)[0]
pbg_ends = np.interp(np.where(pbg_indicator[1:] > pbg_indicator[0:-1]), range(len(freq)), freq)[0]
## Fix the un-started and un-ended bandgaps (TODO)
#print len(pbg_starts), len(pbg_ends)
if len(pbg_starts) < len(pbg_ends): pbg_starts = np.concatenate([np.array([0]), pbg_starts])
#print len(pbg_starts), len(pbg_ends)
if len(pbg_starts) > len(pbg_ends): pbg_starts = pbg_starts[:-1]
#print pbg_ends, pbg_starts
for start, end in np.vstack([pbg_starts, pbg_ends]).T:
#print start, end
plt.axhspan(start, end, color='#FFDD00', alpha=.1)
except:
print "Bandgap detection failed"
plt.ylabel(u"frequency");
plt.xlabel(u"wavenumber $ka/\\pi$");
plt.xlim((-.5, .5));
plt.xticks(xticks, xnumbers); plt.minorticks_on();
plt.grid(True)
if legend_enable: plt.legend(loc="upper right");
## Final plotting
splitpath = os.path.split(last_simulation_name)
outfile = os.path.join(splitpath[0], "band", splitpath[1]+"_band.png")
plt.savefig(outfile, bbox_inches='tight')
#}}}
## --- Nice plotting to PDF ----------------------------------------------------------------------------------#{{{
if plot_publi and not os.path.exists("publi"): os.mkdir("publi")
if plot_publi:
#matplotlib.rc('text', usetex=True)
#matplotlib.rc('text.latex', preamble = \
#'\usepackage{amsmath}, \usepackage{yfonts}, \usepackage{txfonts}, \usepackage{lmodern},')
# ../../effparam_clean.py SphereWireYaki_resolution=4.00e-06_comment=XYS_simtime=5.00e-10_wlth=2.40e-05_wzofs=0.00e+00_monzd=1.00e-04_cells=1.00e+00_spacing=9.00e-05_monzc=0.00e+00_radius=3.75e-05_wtth=6.00e-06
matplotlib.rc('text', usetex=True)
matplotlib.rc('font', size=14)
matplotlib.rc('text.latex', preamble = \
'\usepackage{amsmath}, \usepackage{palatino},\usepackage{upgreek}')
matplotlib.rc('font',**{'family':'serif','serif':['palatino, times']}) ## select fonts
fig = plt.figure(figsize=(8,8)); toplot = {'rt':1, 'N':1, 'eps':1, 'mu':1, 'Z':0} ## For XYS, XYSAs
fig.subplots_adjust(left=.05, bottom=.05, right=.99, top=.99, wspace=.05, hspace=.05) ## XXX
#plt.figure(figsize=(6,6)); toplot = {'rt':1, 'N':0, 'eps':1, 'mu':1, 'Z':0} ## For XYS, XYSAs
#plt.figure(figsize=(6,5)); toplot = {'rt':1, 'N':0, 'eps':0, 'mu':1, 'Z':0} ## For S
#plt.figure(figsize=(6,5)); toplot = {'rt':1, 'N':0, 'eps':1, 'mu':0, 'Z':0} ## For XY
subplot_count = sum(toplot.values())
subplot_index = 1
subplot_columns = [1,1,1,1,1]
## ---- r, t -----
if toplot['rt']:
ax= plt.subplot(subplot_count, subplot_columns[subplot_index], subplot_index)
#plt.title(u"Dielectric spheres $r=%d\\;\\upmu$m" % 25)
#plt.title(u"Dielectric spheres in wire mesh")
plt.title(u"Wire mesh")
ax.label_outer()
plt.grid()
plt.plot(freq, s11amp, marker=marker, color="#880000", label=u'$|r|$', lw=1)
plt.plot(freq, s12amp, marker=marker, color="#0088ff", label=u'$|t|$', lw=1)
plt.ylabel(u"Amplitude");
if plot_expe and os.path.exists('t.dat'):
tf, ty = np.loadtxt('t.dat', usecols=list(range(2)), unpack=True)
plt.plot(tf*frequnit, ty, lw=0, color='#004AAA', marker='o', ms=2, label=u'$|t|$ exp')
subplot_index += 1
plt.xticks(xticks, xnumbers); plt.minorticks_on();
plt.xlim(xlim); plt.ylim((0,1.)); plt.legend(loc='center right');
## Todo allow plotting phase! (And in the 'cartesian' plot, too)
## ---- N -----
if toplot['N']:
ax = plt.subplot(subplot_count, subplot_columns[subplot_index], subplot_index)
ax.label_outer()
plt.grid()
plt.ylabel(u"Index of refraction $N_{\\text{eff}}$");
for ii in np.arange(-5, 5):
plt.plot(freq, ii*c/freq/d, color="#000000", label=u"", lw=.2)
plt.plot(freq, (ii+.5)*c/freq/d, color="#777777", label=u"", lw=.2)
#TODO if plot_expe and os.path.exists('k.dat'):
#tf, ty = np.loadtxt('t.dat', usecols=list(range(2)), unpack=True)
#plt.plot(tf*frequnit, ty, lw=0, color='#004AAA', marker='o', ms=2, label=u'$|t|$ exp')
plt.plot(freq, np.real(N), color="#448800", label=u"$N'$")
plt.plot(freq, np.imag(N), color="#448800", label=u"$N''$", ls='--')
if check_hilbert and len(freq)>1:
plt.plot(freq, np.real(N_KK), color="#dd88aa", label=u"")
plt.plot(freq, np.imag(N_KK), color="#dd88aa", label=u"", ls='--')
plt.xticks(xticks, xnumbers); plt.minorticks_on()
plt.xlim(xlim); plt.ylim((-5,5)); plt.legend(loc='lower right');
subplot_index += 1
## ----- EPS -----
if toplot['eps']:
ax = plt.subplot(subplot_count, subplot_columns[subplot_index], subplot_index)
ax.label_outer()
plt.grid()
plt.ylabel(u"Permittivity $\\varepsilon_{\\text{eff}}$")
plt.plot(freq, np.real(eps), color="#660044", label=u"$\\varepsilon'$")
plt.plot(freq, np.imag(eps), color="#660044", label=u"$\\varepsilon''$", ls='--')
plt.plot(freq, 1-(1100e9/freq)**2, color="#888888", label=u"$1-\\frac{f_p^2}{f^2}$", ls='-') ## Drude model
plt.xticks(xticks, xnumbers); plt.minorticks_on()
plt.xlim(xlim); plt.ylim((-12.,3.)); plt.legend(loc='lower right');
subplot_index += 1
## ----- MU -----
if toplot['mu']:
ax = plt.subplot(subplot_count, subplot_columns[subplot_index], subplot_index)
ax.label_outer()
plt.grid()
plt.ylabel(u"Permeability $\\mu_{\\text{eff}}$");
plt.plot(freq, np.real(mu), color="#663300", label=u"$\\mu'$")
plt.plot(freq, np.imag(mu), color="#663300", label=u"$\\mu''$", ls='--')
plt.xticks(xticks, xnumbers); plt.minorticks_on();
plt.xlim(xlim);
plt.ylim((-5,10));
plt.legend(loc='upper right');
subplot_index += 1
### ----- Z -----
if toplot['Z']:
ax = plt.subplot(subplot_number, 1, subplot_index)
ax.label_outer()
plt.ylabel(u"Impedance"); plt.ylim((-2.,4.))
plt.plot(freq, np.real(Z), color="#004488", label=u"$Z'$")
plt.plot(freq, np.imag(Z), color="#004488", label=u"$Z''$", ls='--')
plt.xticks(xticks, xnumbers); plt.minorticks_on();
plt.xlim(xlim); plt.legend(loc=(.03,.6));
subplot_index += 1
plt.xlabel(u"Frequency [%s]" % frequnitname)
#plt.xlim((0, 1.8))
#plt.grid()
splitpath = os.path.split(last_simulation_name)
outfile = os.path.join(splitpath[0], "publi", splitpath[1]+"_publi.pdf")
plt.savefig(outfile, bbox_inches='tight')
#}}}
## --- Save data to /tmp/effparam.dat ------------------------------------------#{{{
## This is again in the PKGraph ascii format; see loadrt() docstring for further info
if savedat_wd:
if not os.path.exists("effparam"): os.mkdir("effparam")
splitpath = os.path.split(last_simulation_name)
savedatfile = os.path.join(splitpath[0], "effparam", splitpath[1]+"_effparam.dat")
else:
savedatfile = "/tmp/effparam.dat"
if savedat or savedat_wd:
header = ""
## Copy parameters
with open(last_simulation_name+".dat") as datafile:
for line in datafile:
if (line[:1]=="#") and (not "olumn" in line): header+=line
with open(savedatfile, "w") as outfile:
## Post-fixing the older files from rtsim to PKGraph
if not "itle" in header: outfile.write("#title Simulation %s\n" % last_simulation_name.split("_")[0])
if not "arameters" in header: outfile.write("#Parameters Parameters\n")
header = re.sub("Parameter", "param", header)
## Write column headers
outfile.write(header)
outfile.write("#x-column Frequency [Hz]\n#Column |r|\n#Column r phase\n#Column |t|\n#Column t phase\n" + \
"#Column real N\n#Column imag N\n#Column real Z\n#Column imag Z\n" + \
"#Column real eps\n#Column imag eps\n#Column real mu\n#Column imag mu\n")
## Write column data
np.savetxt(outfile, zip(freq, s11amp, s11phase, s12amp, s12phase,
N.real, N.imag, Z.real, Z.imag, eps.real, eps.imag, mu.real, mu.imag), fmt="%.8e")
#}}}
## --- Plot polar ------------------------------------------------------------#{{{
if plot_polar and not os.path.exists("polar"): os.mkdir("polar")
if plot_polar and os.path.isdir("polar"):
## Truncate the arrays (optional)
#(d0,d1) = np.interp((500e9, 650e9), freq, range(len(freq)))
#(freq, s11, s12, N, Z, eps, mu, arg, argLog) = \
#map(lambda a: a[int(d0):int(d1)], (freq, s11, s12, N, Z, eps, mu, arg, argLog))
print "Plotting polar..."
from matplotlib.collections import LineCollection
lims={"s11":(-1,1), "s12":(-1,1), "N":(-10,10), "Z":(-5,5),
"mu":(-10,10), "eps":(-10,10), "arg":(-3,3), "argLog":(-10,10) }
datalist=(s11, s12, N, Z, eps, mu, arg, argLog)
plotlabels=("s11", "s12", "N", "Z", "eps", "mu", "arg", "argLog")
freqlabels = np.append(loss_maxima[loss_maxima<plot_freq_max], freq[-1])
fig = plt.figure(figsize=(11,22))
subplot_number = len(datalist)
for (subpl, data, plotlabel) in zip(range(subplot_number), datalist, plotlabels):
plt.subplot(4,2,subpl+1)
if plotlabel.startswith('s'):
plt.plot(np.sin(np.linspace(0,2*np.pi)), np.cos(np.linspace(0,2*np.pi)), c='#888888')
plt.plot(np.sin(np.linspace(0,2*np.pi))/2+.5, np.cos(np.linspace(0,2*np.pi))/2, c='#aaaaaa')
plt.plot(np.sin(np.linspace(0,2*np.pi))+1, np.cos(np.linspace(0,2*np.pi))+1, c='#aaaaaa')
plt.plot(np.sin(np.linspace(0,2*np.pi))+1, np.cos(np.linspace(0,2*np.pi))-1, c='#aaaaaa')
x = data.real; y = data.imag
t = np.linspace(0, 10, len(freq))
points = np.array([x, y]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
lc = LineCollection(segments, cmap=plt.get_cmap('jet'), norm=plt.Normalize(0, 10))
lc.set_array(t)
lc.set_linewidth(2)
plt.gca().add_collection(lc)
## Add black points to every xtick
xpoints = np.interp(xticks, freq, x)
ypoints = np.interp(xticks, freq, y)
for xpoint, ypoint in zip(xpoints, ypoints):
plt.plot(xpoint, ypoint, marker="o", markersize=3, color="#000000", label='')
## Annotate resonant frequencies
xpoints = np.interp(freqlabels, freq, x.real)
ypoints = np.interp(freqlabels, freq, y.real)
freqlabelstxt = [("%d" % (fr*1000/frequnit)) for fr in freqlabels]
for label, xpoint, ypoint in zip(freqlabelstxt, xpoints, ypoints):
plt.annotate(label, xy = (xpoint, ypoint), xytext = (-10, 10),
textcoords = 'offset points', ha = 'right', va = 'bottom',
bbox = dict(boxstyle = 'round,pad=.15', fc = 'white', alpha = 0.5),
arrowprops = dict(arrowstyle = '->', connectionstyle = 'arc3,rad=0'))
plt.plot(xpoint, ypoint, marker="o", markersize=2, color="#000000", label='')
lim = lims[plotlabel]
plt.xlim(lim); plt.ylim(lim); plt.grid(True); plt.title(plotlabel)
## Final plotting
splitpath = os.path.split(last_simulation_name)
outfile = os.path.join(splitpath[0], "polar", splitpath[1]+"_polar.png")
plt.savefig(outfile, bbox_inches='tight')
#}}}
## ----------------------------------------------------------------------------------------------------
## Notes
# arh = (1+0j+s11**2-s12**2)/2/(s11); np.round(get_phase(arh)/np.pi/2) ## This is a Z.imag zero-pass detector
def get_cmdline_parameters():#{{{
# (optional) Manual N branch override
if len(sys.argv)>2 and sys.argv[2] != "-" and __name__ == "__main__":
print "Setting branch:", sys.argv[2]
branch_offset = np.ones(len(freq))*int(sys.argv[2])
last_simulation_name += "_BRANCH=%s" % sys.argv[2]
if len(sys.argv)>3 and sys.argv[3] != "-" and __name__ == "__main__":
print "Setting branch sign:", sys.argv[3]
Nsign = np.ones(len(freq))*int(sys.argv[3])
last_simulation_name += "_SIGN=%s" % sys.argv[3]
return branch_offset, Nsign#}}}
| mit | -5,967,803,258,731,536,000 | 48.063741 | 214 | 0.595008 | false | 2.8792 | false | false | false |
avinetworks/avi-heat | avi/heat/resources/vs.py | 1 | 87514 | # GENERATED FILE - DO NOT EDIT THIS FILE UNLESS YOU ARE A WIZZARD
#pylint: skip-file
from heat.engine import properties
from heat.engine import constraints
from heat.engine import attributes
from heat.common.i18n import _
from avi.heat.avi_resource import AviResource
from avi.heat.avi_resource import AviNestedResource
from options import *
from common import *
from options import *
from pool import *
from debug_se import *
from analytics_policy import *
from vs_datascript import *
from application_policy import *
from auth import *
from rate import *
from gslb import *
from dns import *
from dns_policy import *
from content_rewrite_profile import *
from traffic_clone_profile import *
from error_page import *
from l4_policy import *
class ServicePoolSelector(object):
# all schemas
service_port_schema = properties.Schema(
properties.Schema.NUMBER,
_("Pool based destination port"),
required=True,
update_allowed=True,
)
service_pool_uuid_schema = properties.Schema(
properties.Schema.STRING,
_(" You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
service_protocol_schema = properties.Schema(
properties.Schema.STRING,
_("Destination protocol to match for the pool selection. If not specified, it will match any protocol."),
required=False,
update_allowed=True,
constraints=[
constraints.AllowedValues(['PROTOCOL_TYPE_TCP_FAST_PATH', 'PROTOCOL_TYPE_TCP_PROXY', 'PROTOCOL_TYPE_UDP_FAST_PATH', 'PROTOCOL_TYPE_UDP_PROXY']),
],
)
service_pool_group_uuid_schema = properties.Schema(
properties.Schema.STRING,
_(" You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
service_port_range_end_schema = properties.Schema(
properties.Schema.NUMBER,
_("(Introduced in: 17.2.4) The end of the Service port number range. (Default: 0)"),
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'service_port',
'service_pool_uuid',
'service_protocol',
'service_pool_group_uuid',
'service_port_range_end',
)
# mapping of properties to their schemas
properties_schema = {
'service_port': service_port_schema,
'service_pool_uuid': service_pool_uuid_schema,
'service_protocol': service_protocol_schema,
'service_pool_group_uuid': service_pool_group_uuid_schema,
'service_port_range_end': service_port_range_end_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'service_pool_uuid': 'pool',
'service_pool_group_uuid': 'poolgroup',
}
unique_keys = {
'my_key': 'service_port,service_protocol,service_port_range_end',
}
class VirtualServiceResource(object):
# all schemas
num_vcpus_schema = properties.Schema(
properties.Schema.NUMBER,
_(""),
required=False,
update_allowed=True,
)
memory_schema = properties.Schema(
properties.Schema.NUMBER,
_(""),
required=False,
update_allowed=True,
)
is_exclusive_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(""),
required=False,
update_allowed=True,
)
scalein_primary_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(""),
required=False,
update_allowed=True,
)
num_se_schema = properties.Schema(
properties.Schema.NUMBER,
_(""),
required=False,
update_allowed=True,
)
scalein_se_uuid_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=False,
update_allowed=True,
)
num_standby_se_schema = properties.Schema(
properties.Schema.NUMBER,
_(""),
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'num_vcpus',
'memory',
'is_exclusive',
'scalein_primary',
'num_se',
'scalein_se_uuid',
'num_standby_se',
)
# mapping of properties to their schemas
properties_schema = {
'num_vcpus': num_vcpus_schema,
'memory': memory_schema,
'is_exclusive': is_exclusive_schema,
'scalein_primary': scalein_primary_schema,
'num_se': num_se_schema,
'scalein_se_uuid': scalein_se_uuid_schema,
'num_standby_se': num_standby_se_schema,
}
class Service(object):
# all schemas
port_schema = properties.Schema(
properties.Schema.NUMBER,
_("The Virtual Service's port number."),
required=True,
update_allowed=True,
)
enable_ssl_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Enable SSL termination and offload for traffic from clients. (Default: False)"),
required=False,
update_allowed=True,
)
override_network_profile_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Override the network profile for this specific service port. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
port_range_end_schema = properties.Schema(
properties.Schema.NUMBER,
_("The end of the Virtual Service's port number range. (Default: 0)"),
required=False,
update_allowed=True,
)
override_application_profile_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.4) Enable application layer specific features for the this specific service. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'port',
'enable_ssl',
'override_network_profile_uuid',
'port_range_end',
'override_application_profile_uuid',
)
# mapping of properties to their schemas
properties_schema = {
'port': port_schema,
'enable_ssl': enable_ssl_schema,
'override_network_profile_uuid': override_network_profile_uuid_schema,
'port_range_end': port_range_end_schema,
'override_application_profile_uuid': override_application_profile_uuid_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'override_network_profile_uuid': 'networkprofile',
'override_application_profile_uuid': 'applicationprofile',
}
class PerformanceLimits(object):
# all schemas
max_concurrent_connections_schema = properties.Schema(
properties.Schema.NUMBER,
_("The maximum number of concurrent client conections allowed to the Virtual Service."),
required=False,
update_allowed=True,
)
max_throughput_schema = properties.Schema(
properties.Schema.NUMBER,
_("The maximum throughput per second for all clients allowed through the client side of the Virtual Service."),
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'max_concurrent_connections',
'max_throughput',
)
# mapping of properties to their schemas
properties_schema = {
'max_concurrent_connections': max_concurrent_connections_schema,
'max_throughput': max_throughput_schema,
}
class SidebandProfile(object):
# all schemas
ip_item_schema = properties.Schema(
properties.Schema.MAP,
_("IP Address of the sideband server."),
schema=IpAddr.properties_schema,
required=True,
update_allowed=False,
)
ip_schema = properties.Schema(
properties.Schema.LIST,
_("IP Address of the sideband server."),
schema=ip_item_schema,
required=False,
update_allowed=True,
)
sideband_max_request_body_size_schema = properties.Schema(
properties.Schema.NUMBER,
_("Maximum size of the request body that will be sent on the sideband. (Units: BYTES) (Default: 1024)"),
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'ip',
'sideband_max_request_body_size',
)
# mapping of properties to their schemas
properties_schema = {
'ip': ip_schema,
'sideband_max_request_body_size': sideband_max_request_body_size_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'ip': getattr(IpAddr, 'field_references', {}),
}
unique_keys = {
'ip': getattr(IpAddr, 'unique_keys', {}),
}
class TLSTicket(object):
# all schemas
name_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=True,
update_allowed=True,
)
aes_key_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=True,
update_allowed=True,
)
hmac_key_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=True,
update_allowed=True,
)
# properties list
PROPERTIES = (
'name',
'aes_key',
'hmac_key',
)
# mapping of properties to their schemas
properties_schema = {
'name': name_schema,
'aes_key': aes_key_schema,
'hmac_key': hmac_key_schema,
}
class IPNetworkSubnet(object):
# all schemas
network_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Network for VirtualService IP allocation with Vantage as the IPAM provider. Network should be created before this is configured. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
subnet_schema = properties.Schema(
properties.Schema.MAP,
_("Subnet for VirtualService IP allocation with Vantage or Infoblox as the IPAM provider. Only one of subnet or subnet_uuid configuration is allowed."),
schema=IpAddrPrefix.properties_schema,
required=False,
update_allowed=True,
)
subnet_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Subnet UUID or Name or Prefix for VirtualService IP allocation with AWS or OpenStack as the IPAM provider. Only one of subnet or subnet_uuid configuration is allowed."),
required=False,
update_allowed=True,
)
subnet6_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 18.1.1) Subnet for VirtualService IPv6 allocation with Vantage or Infoblox as the IPAM provider. Only one of subnet or subnet_uuid configuration is allowed."),
schema=IpAddrPrefix.properties_schema,
required=False,
update_allowed=True,
)
subnet6_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 18.1.1) Subnet UUID or Name or Prefix for VirtualService IPv6 allocation with AWS or OpenStack as the IPAM provider. Only one of subnet or subnet_uuid configuration is allowed."),
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'network_uuid',
'subnet',
'subnet_uuid',
'subnet6',
'subnet6_uuid',
)
# mapping of properties to their schemas
properties_schema = {
'network_uuid': network_uuid_schema,
'subnet': subnet_schema,
'subnet_uuid': subnet_uuid_schema,
'subnet6': subnet6_schema,
'subnet6_uuid': subnet6_uuid_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'subnet': getattr(IpAddrPrefix, 'field_references', {}),
'subnet6': getattr(IpAddrPrefix, 'field_references', {}),
'network_uuid': 'network',
}
unique_keys = {
'subnet': getattr(IpAddrPrefix, 'unique_keys', {}),
'subnet6': getattr(IpAddrPrefix, 'unique_keys', {}),
}
class VsSeVnic(object):
# all schemas
mac_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=True,
update_allowed=True,
)
type_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=True,
update_allowed=True,
constraints=[
constraints.AllowedValues(['VNIC_TYPE_BE', 'VNIC_TYPE_FE']),
],
)
lif_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'mac',
'type',
'lif',
)
# mapping of properties to their schemas
properties_schema = {
'mac': mac_schema,
'type': type_schema,
'lif': lif_schema,
}
class VsApicExtension(AviResource):
resource_name = "vsapicextension"
# all schemas
avi_version_schema = properties.Schema(
properties.Schema.STRING,
_("Avi Version to use for the object. Default is 16.4.2. If you plan to use any fields introduced after 16.4.2, then this needs to be explicitly set."),
required=False,
update_allowed=True,
)
txn_uuid_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=True,
update_allowed=True,
)
se_uuid_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=False,
update_allowed=True,
)
vnic_item_schema = properties.Schema(
properties.Schema.MAP,
_(""),
schema=VsSeVnic.properties_schema,
required=True,
update_allowed=False,
)
vnic_schema = properties.Schema(
properties.Schema.LIST,
_(""),
schema=vnic_item_schema,
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'avi_version',
'txn_uuid',
'se_uuid',
'vnic',
)
# mapping of properties to their schemas
properties_schema = {
'avi_version': avi_version_schema,
'txn_uuid': txn_uuid_schema,
'se_uuid': se_uuid_schema,
'vnic': vnic_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'vnic': getattr(VsSeVnic, 'field_references', {}),
}
unique_keys = {
'vnic': getattr(VsSeVnic, 'unique_keys', {}),
}
class SeVipInterfaceList(object):
# all schemas
vip_intf_mac_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=True,
update_allowed=True,
)
vlan_id_schema = properties.Schema(
properties.Schema.NUMBER,
_(" (Default: 0)"),
required=False,
update_allowed=True,
)
vip_intf_ip_schema = properties.Schema(
properties.Schema.MAP,
_(""),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
is_portchannel_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(" (Default: False)"),
required=False,
update_allowed=True,
)
vip_intf_ip6_schema = properties.Schema(
properties.Schema.MAP,
_(""),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'vip_intf_mac',
'vlan_id',
'vip_intf_ip',
'is_portchannel',
'vip_intf_ip6',
)
# mapping of properties to their schemas
properties_schema = {
'vip_intf_mac': vip_intf_mac_schema,
'vlan_id': vlan_id_schema,
'vip_intf_ip': vip_intf_ip_schema,
'is_portchannel': is_portchannel_schema,
'vip_intf_ip6': vip_intf_ip6_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'vip_intf_ip6': getattr(IpAddr, 'field_references', {}),
'vip_intf_ip': getattr(IpAddr, 'field_references', {}),
}
unique_keys = {
'vip_intf_ip6': getattr(IpAddr, 'unique_keys', {}),
'vip_intf_ip': getattr(IpAddr, 'unique_keys', {}),
}
class SeList(object):
# all schemas
se_uuid_schema = properties.Schema(
properties.Schema.STRING,
_(" You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=True,
update_allowed=True,
)
is_primary_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(" (Default: True)"),
required=False,
update_allowed=True,
)
is_standby_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(" (Default: False)"),
required=False,
update_allowed=True,
)
is_connected_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(" (Default: True)"),
required=False,
update_allowed=True,
)
delete_in_progress_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(" (Default: False)"),
required=False,
update_allowed=True,
)
vcpus_schema = properties.Schema(
properties.Schema.NUMBER,
_(" (Default: 2)"),
required=False,
update_allowed=True,
)
memory_schema = properties.Schema(
properties.Schema.NUMBER,
_(" (Default: 2001)"),
required=False,
update_allowed=True,
)
vip_intf_mac_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=False,
update_allowed=True,
)
vip_subnet_mask_schema = properties.Schema(
properties.Schema.NUMBER,
_(" (Default: 32)"),
required=False,
update_allowed=True,
)
vnic_item_schema = properties.Schema(
properties.Schema.MAP,
_(""),
schema=VsSeVnic.properties_schema,
required=True,
update_allowed=False,
)
vnic_schema = properties.Schema(
properties.Schema.LIST,
_(""),
schema=vnic_item_schema,
required=False,
update_allowed=True,
)
pending_download_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(" (Default: False)"),
required=False,
update_allowed=True,
)
sec_idx_schema = properties.Schema(
properties.Schema.NUMBER,
_(" (Default: 1)"),
required=False,
update_allowed=True,
)
download_selist_only_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(" (Default: False)"),
required=False,
update_allowed=True,
)
vlan_id_schema = properties.Schema(
properties.Schema.NUMBER,
_(" (Default: 0)"),
required=False,
update_allowed=True,
)
snat_ip_schema = properties.Schema(
properties.Schema.MAP,
_(""),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
vip_intf_ip_schema = properties.Schema(
properties.Schema.MAP,
_(""),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
vip_intf_list_item_schema = properties.Schema(
properties.Schema.MAP,
_(""),
schema=SeVipInterfaceList.properties_schema,
required=True,
update_allowed=False,
)
vip_intf_list_schema = properties.Schema(
properties.Schema.LIST,
_(""),
schema=vip_intf_list_item_schema,
required=False,
update_allowed=True,
)
floating_intf_ip_item_schema = properties.Schema(
properties.Schema.MAP,
_(""),
schema=IpAddr.properties_schema,
required=True,
update_allowed=False,
)
floating_intf_ip_schema = properties.Schema(
properties.Schema.LIST,
_(""),
schema=floating_intf_ip_item_schema,
required=False,
update_allowed=True,
)
is_portchannel_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(" (Default: False)"),
required=False,
update_allowed=True,
)
scalein_in_progress_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(" (Default: False)"),
required=False,
update_allowed=True,
)
admin_down_requested_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(" (Default: False)"),
required=False,
update_allowed=True,
)
at_curr_ver_schema = properties.Schema(
properties.Schema.BOOLEAN,
_(" (Default: True)"),
required=False,
update_allowed=True,
)
version_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=False,
update_allowed=True,
)
gslb_download_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.1.1) This flag indicates whether the gslb, ghm, gs objects have been pushed to the DNS-VS's SE. (Default: False)"),
required=False,
update_allowed=True,
)
geo_download_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.1.1) This flag indicates whether the geo-files have been pushed to the DNS-VS's SE. (Default: False)"),
required=False,
update_allowed=True,
)
geodb_download_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.1.2) This flag indicates whether the geodb object has been pushed to the DNS-VS's SE. (Default: False)"),
required=False,
update_allowed=True,
)
attach_ip_success_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.2.3) (Default: False)"),
required=False,
update_allowed=True,
)
attach_ip_status_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.3) "),
required=False,
update_allowed=True,
)
vip6_subnet_mask_schema = properties.Schema(
properties.Schema.NUMBER,
_("(Introduced in: 18.1.1) (Default: 128)"),
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'se_uuid',
'is_primary',
'is_standby',
'is_connected',
'delete_in_progress',
'vcpus',
'memory',
'vip_intf_mac',
'vip_subnet_mask',
'vnic',
'pending_download',
'sec_idx',
'download_selist_only',
'vlan_id',
'snat_ip',
'vip_intf_ip',
'vip_intf_list',
'floating_intf_ip',
'is_portchannel',
'scalein_in_progress',
'admin_down_requested',
'at_curr_ver',
'version',
'gslb_download',
'geo_download',
'geodb_download',
'attach_ip_success',
'attach_ip_status',
'vip6_subnet_mask',
)
# mapping of properties to their schemas
properties_schema = {
'se_uuid': se_uuid_schema,
'is_primary': is_primary_schema,
'is_standby': is_standby_schema,
'is_connected': is_connected_schema,
'delete_in_progress': delete_in_progress_schema,
'vcpus': vcpus_schema,
'memory': memory_schema,
'vip_intf_mac': vip_intf_mac_schema,
'vip_subnet_mask': vip_subnet_mask_schema,
'vnic': vnic_schema,
'pending_download': pending_download_schema,
'sec_idx': sec_idx_schema,
'download_selist_only': download_selist_only_schema,
'vlan_id': vlan_id_schema,
'snat_ip': snat_ip_schema,
'vip_intf_ip': vip_intf_ip_schema,
'vip_intf_list': vip_intf_list_schema,
'floating_intf_ip': floating_intf_ip_schema,
'is_portchannel': is_portchannel_schema,
'scalein_in_progress': scalein_in_progress_schema,
'admin_down_requested': admin_down_requested_schema,
'at_curr_ver': at_curr_ver_schema,
'version': version_schema,
'gslb_download': gslb_download_schema,
'geo_download': geo_download_schema,
'geodb_download': geodb_download_schema,
'attach_ip_success': attach_ip_success_schema,
'attach_ip_status': attach_ip_status_schema,
'vip6_subnet_mask': vip6_subnet_mask_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'vnic': getattr(VsSeVnic, 'field_references', {}),
'vip_intf_ip': getattr(IpAddr, 'field_references', {}),
'vip_intf_list': getattr(SeVipInterfaceList, 'field_references', {}),
'snat_ip': getattr(IpAddr, 'field_references', {}),
'se_uuid': 'serviceengine',
'floating_intf_ip': getattr(IpAddr, 'field_references', {}),
}
unique_keys = {
'vnic': getattr(VsSeVnic, 'unique_keys', {}),
'vip_intf_ip': getattr(IpAddr, 'unique_keys', {}),
'snat_ip': getattr(IpAddr, 'unique_keys', {}),
'floating_intf_ip': getattr(IpAddr, 'unique_keys', {}),
'vip_intf_list': getattr(SeVipInterfaceList, 'unique_keys', {}),
}
class VipDbExtension(object):
# all schemas
vip_id_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) "),
required=False,
update_allowed=True,
)
se_list_item_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) "),
schema=SeList.properties_schema,
required=True,
update_allowed=False,
)
se_list_schema = properties.Schema(
properties.Schema.LIST,
_("(Introduced in: 17.1.1) "),
schema=se_list_item_schema,
required=False,
update_allowed=True,
)
requested_resource_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) "),
schema=VirtualServiceResource.properties_schema,
required=False,
update_allowed=True,
)
first_se_assigned_time_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) "),
schema=TimeStamp.properties_schema,
required=False,
update_allowed=True,
)
num_additional_se_schema = properties.Schema(
properties.Schema.NUMBER,
_("(Introduced in: 17.1.1) "),
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'vip_id',
'se_list',
'requested_resource',
'first_se_assigned_time',
'num_additional_se',
)
# mapping of properties to their schemas
properties_schema = {
'vip_id': vip_id_schema,
'se_list': se_list_schema,
'requested_resource': requested_resource_schema,
'first_se_assigned_time': first_se_assigned_time_schema,
'num_additional_se': num_additional_se_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'se_list': getattr(SeList, 'field_references', {}),
'first_se_assigned_time': getattr(TimeStamp, 'field_references', {}),
'requested_resource': getattr(VirtualServiceResource, 'field_references', {}),
}
unique_keys = {
'se_list': getattr(SeList, 'unique_keys', {}),
'first_se_assigned_time': getattr(TimeStamp, 'unique_keys', {}),
'requested_resource': getattr(VirtualServiceResource, 'unique_keys', {}),
}
class Vip(object):
# all schemas
vip_id_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) Unique ID associated with the vip."),
required=True,
update_allowed=True,
)
ip_address_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) IPv4 Address of the Vip. For IPv6 address support please use ip6_address field"),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
enabled_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.1.1) Enable or disable the Vip. (Default: True)"),
required=False,
update_allowed=True,
)
network_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) Manually override the network on which the Vip is placed. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
port_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) (internal-use) Network port assigned to the Vip IP address."),
required=False,
update_allowed=True,
)
subnet_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) If auto_allocate_ip is True, then the subnet for the Vip IP address allocation. This field is applicable only if the VirtualService belongs to an Openstack or AWS cloud, in which case it is mandatory, if auto_allocate is selected."),
required=False,
update_allowed=True,
)
subnet_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) Subnet providing reachability for client facing Vip IP."),
schema=IpAddrPrefix.properties_schema,
required=False,
update_allowed=True,
)
discovered_networks_item_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) Discovered networks providing reachability for client facing Vip IP."),
schema=DiscoveredNetwork.properties_schema,
required=True,
update_allowed=False,
)
discovered_networks_schema = properties.Schema(
properties.Schema.LIST,
_("(Introduced in: 17.1.1) Discovered networks providing reachability for client facing Vip IP."),
schema=discovered_networks_item_schema,
required=False,
update_allowed=True,
)
availability_zone_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) Availability-zone to place the Virtual Service."),
required=False,
update_allowed=True,
)
auto_allocate_ip_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.1.1) Auto-allocate VIP from the provided subnet. (Default: False)"),
required=False,
update_allowed=True,
)
floating_ip_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) Floating IPv4 to associate with this Vip."),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
auto_allocate_floating_ip_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.1.1) Auto-allocate floating/elastic IP from the Cloud infrastructure. (Default: False)"),
required=False,
update_allowed=True,
)
floating_subnet_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) If auto_allocate_floating_ip is True and more than one floating-ip subnets exist, then the subnet for the floating IP address allocation."),
required=False,
update_allowed=True,
)
avi_allocated_vip_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.1.1) (internal-use) VIP allocated by Avi in the Cloud infrastructure. (Default: False)"),
required=False,
update_allowed=True,
)
avi_allocated_fip_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.1.1) (internal-use) FIP allocated by Avi in the Cloud infrastructure. (Default: False)"),
required=False,
update_allowed=True,
)
ipam_network_subnet_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) Subnet and/or Network for allocating VirtualService IP by IPAM Provider module."),
schema=IPNetworkSubnet.properties_schema,
required=False,
update_allowed=True,
)
ip6_address_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 18.1.1) IPv6 Address of the Vip."),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
subnet6_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 18.1.1) If auto_allocate_ip is True, then the subnet for the Vip IPv6 address allocation. This field is applicable only if the VirtualService belongs to an Openstack or AWS cloud, in which case it is mandatory, if auto_allocate is selected."),
required=False,
update_allowed=True,
)
subnet6_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 18.1.1) Subnet providing reachability for client facing Vip IPv6."),
schema=IpAddrPrefix.properties_schema,
required=False,
update_allowed=True,
)
floating_ip6_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 18.1.1) Floating IPv6 address to associate with this Vip."),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
floating_subnet6_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 18.1.1) If auto_allocate_floating_ip is True and more than one floating-ip subnets exist, then the subnet for the floating IPv6 address allocation."),
required=False,
update_allowed=True,
)
auto_allocate_ip_type_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 18.1.1) Specifies whether to auto-allocate only a V4 address, only a V6 address, or one of each type. (Default: V4_ONLY)"),
required=False,
update_allowed=True,
constraints=[
constraints.AllowedValues(['V4_ONLY', 'V4_V6', 'V6_ONLY']),
],
)
# properties list
PROPERTIES = (
'vip_id',
'ip_address',
'enabled',
'network_uuid',
'port_uuid',
'subnet_uuid',
'subnet',
'discovered_networks',
'availability_zone',
'auto_allocate_ip',
'floating_ip',
'auto_allocate_floating_ip',
'floating_subnet_uuid',
'avi_allocated_vip',
'avi_allocated_fip',
'ipam_network_subnet',
'ip6_address',
'subnet6_uuid',
'subnet6',
'floating_ip6',
'floating_subnet6_uuid',
'auto_allocate_ip_type',
)
# mapping of properties to their schemas
properties_schema = {
'vip_id': vip_id_schema,
'ip_address': ip_address_schema,
'enabled': enabled_schema,
'network_uuid': network_uuid_schema,
'port_uuid': port_uuid_schema,
'subnet_uuid': subnet_uuid_schema,
'subnet': subnet_schema,
'discovered_networks': discovered_networks_schema,
'availability_zone': availability_zone_schema,
'auto_allocate_ip': auto_allocate_ip_schema,
'floating_ip': floating_ip_schema,
'auto_allocate_floating_ip': auto_allocate_floating_ip_schema,
'floating_subnet_uuid': floating_subnet_uuid_schema,
'avi_allocated_vip': avi_allocated_vip_schema,
'avi_allocated_fip': avi_allocated_fip_schema,
'ipam_network_subnet': ipam_network_subnet_schema,
'ip6_address': ip6_address_schema,
'subnet6_uuid': subnet6_uuid_schema,
'subnet6': subnet6_schema,
'floating_ip6': floating_ip6_schema,
'floating_subnet6_uuid': floating_subnet6_uuid_schema,
'auto_allocate_ip_type': auto_allocate_ip_type_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'subnet': getattr(IpAddrPrefix, 'field_references', {}),
'network_uuid': 'network',
'floating_ip6': getattr(IpAddr, 'field_references', {}),
'discovered_networks': getattr(DiscoveredNetwork, 'field_references', {}),
'floating_ip': getattr(IpAddr, 'field_references', {}),
'ipam_network_subnet': getattr(IPNetworkSubnet, 'field_references', {}),
'ip6_address': getattr(IpAddr, 'field_references', {}),
'subnet6': getattr(IpAddrPrefix, 'field_references', {}),
'ip_address': getattr(IpAddr, 'field_references', {}),
}
unique_keys = {
'subnet': getattr(IpAddrPrefix, 'unique_keys', {}),
'my_key': 'vip_id',
'floating_ip6': getattr(IpAddr, 'unique_keys', {}),
'discovered_networks': getattr(DiscoveredNetwork, 'unique_keys', {}),
'floating_ip': getattr(IpAddr, 'unique_keys', {}),
'ipam_network_subnet': getattr(IPNetworkSubnet, 'unique_keys', {}),
'ip6_address': getattr(IpAddr, 'unique_keys', {}),
'subnet6': getattr(IpAddrPrefix, 'unique_keys', {}),
'ip_address': getattr(IpAddr, 'unique_keys', {}),
}
class VirtualService(AviResource):
resource_name = "virtualservice"
# all schemas
avi_version_schema = properties.Schema(
properties.Schema.STRING,
_("Avi Version to use for the object. Default is 16.4.2. If you plan to use any fields introduced after 16.4.2, then this needs to be explicitly set."),
required=False,
update_allowed=True,
)
name_schema = properties.Schema(
properties.Schema.STRING,
_("Name for the Virtual Service."),
required=True,
update_allowed=True,
)
fqdn_schema = properties.Schema(
properties.Schema.STRING,
_("DNS resolvable, fully qualified domain name of the virtualservice. Only one of 'fqdn' and 'dns_info' configuration is allowed."),
required=False,
update_allowed=True,
)
ip_address_schema = properties.Schema(
properties.Schema.MAP,
_("(Deprecated in: 17.1.1) IP Address of the Virtual Service."),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
enabled_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Enable or disable the Virtual Service. (Default: True)"),
required=False,
update_allowed=True,
)
services_item_schema = properties.Schema(
properties.Schema.MAP,
_("List of Services defined for this Virtual Service."),
schema=Service.properties_schema,
required=True,
update_allowed=False,
)
services_schema = properties.Schema(
properties.Schema.LIST,
_("List of Services defined for this Virtual Service."),
schema=services_item_schema,
required=False,
update_allowed=True,
)
application_profile_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Enable application layer specific features for the Virtual Service. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'. (Default: System-HTTP)"),
required=False,
update_allowed=True,
)
network_profile_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Determines network settings such as protocol, TCP or UDP, and related options for the protocol. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'. (Default: System-TCP-Proxy)"),
required=False,
update_allowed=True,
)
server_network_profile_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Determines the network settings profile for the server side of TCP proxied connections. Leave blank to use the same settings as the client to VS side of the connection. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
pool_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("The pool is an object that contains destination servers and related attributes such as load-balancing and persistence. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
se_group_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("The Service Engine Group to use for this Virtual Service. Moving to a new SE Group is disruptive to existing connections for this VS. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
network_security_policy_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Network security policies for the Virtual Service. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
http_policies_item_schema = properties.Schema(
properties.Schema.MAP,
_("HTTP Policies applied on the data traffic of the Virtual Service"),
schema=HTTPPolicies.properties_schema,
required=True,
update_allowed=False,
)
http_policies_schema = properties.Schema(
properties.Schema.LIST,
_("HTTP Policies applied on the data traffic of the Virtual Service"),
schema=http_policies_item_schema,
required=False,
update_allowed=True,
)
dns_policies_item_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) DNS Policies applied on the dns traffic of the Virtual Service"),
schema=DnsPolicies.properties_schema,
required=True,
update_allowed=False,
)
dns_policies_schema = properties.Schema(
properties.Schema.LIST,
_("(Introduced in: 17.1.1) DNS Policies applied on the dns traffic of the Virtual Service"),
schema=dns_policies_item_schema,
required=False,
update_allowed=True,
)
ssl_key_and_certificate_uuids_item_schema = properties.Schema(
properties.Schema.STRING,
_("Select or create one or two certificates, EC and/or RSA, that will be presented to SSL/TLS terminated connections."),
required=True,
update_allowed=False,
)
ssl_key_and_certificate_uuids_schema = properties.Schema(
properties.Schema.LIST,
_("Select or create one or two certificates, EC and/or RSA, that will be presented to SSL/TLS terminated connections. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
schema=ssl_key_and_certificate_uuids_item_schema,
required=False,
update_allowed=True,
)
ssl_profile_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Determines the set of SSL versions and ciphers to accept for SSL/TLS terminated connections. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
performance_limits_schema = properties.Schema(
properties.Schema.MAP,
_("Optional settings that determine performance limits like max connections or bandwdith etc."),
schema=PerformanceLimits.properties_schema,
required=False,
update_allowed=True,
)
analytics_policy_schema = properties.Schema(
properties.Schema.MAP,
_("Determines analytics settings for the application."),
schema=AnalyticsPolicy.properties_schema,
required=False,
update_allowed=True,
)
network_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Deprecated in: 17.1.1) Manually override the network on which the Virtual Service is placed. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
vrf_context_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Virtual Routing Context that the Virtual Service is bound to. This is used to provide the isolation of the set of networks the application is attached to. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
enable_autogw_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Response traffic to clients will be sent back to the source MAC address of the connection, rather than statically sent to a default gateway. (Default: True)"),
required=False,
update_allowed=True,
)
port_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Deprecated in: 17.1.1) (internal-use) Network port assigned to the Virtual Service IP address."),
required=False,
update_allowed=True,
)
subnet_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Deprecated in: 17.1.1) It represents subnet for the Virtual Service IP address allocation when auto_allocate_ip is True.It is only applicable in OpenStack or AWS cloud. This field is required if auto_allocate_ip is True."),
required=False,
update_allowed=True,
)
analytics_profile_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Specifies settings related to analytics. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'. (Default: System-Analytics-Profile)"),
required=False,
update_allowed=True,
)
discovered_network_uuid_item_schema = properties.Schema(
properties.Schema.STRING,
_("(Deprecated in: 17.1.1) (internal-use) Discovered networks providing reachability for client facing Virtual Service IP. This field is deprecated."),
required=True,
update_allowed=False,
)
discovered_network_uuid_schema = properties.Schema(
properties.Schema.LIST,
_("(Deprecated in: 17.1.1) (internal-use) Discovered networks providing reachability for client facing Virtual Service IP. This field is deprecated. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
schema=discovered_network_uuid_item_schema,
required=False,
update_allowed=True,
)
discovered_subnet_item_schema = properties.Schema(
properties.Schema.MAP,
_("(Deprecated in: 17.1.1) (internal-use) Discovered subnets providing reachability for client facing Virtual Service IP. This field is deprecated."),
schema=IpAddrPrefix.properties_schema,
required=True,
update_allowed=False,
)
discovered_subnet_schema = properties.Schema(
properties.Schema.LIST,
_("(Deprecated in: 17.1.1) (internal-use) Discovered subnets providing reachability for client facing Virtual Service IP. This field is deprecated."),
schema=discovered_subnet_item_schema,
required=False,
update_allowed=True,
)
host_name_xlate_schema = properties.Schema(
properties.Schema.STRING,
_("Translate the host name sent to the servers to this value. Translate the host name sent from servers back to the value used by the client."),
required=False,
update_allowed=True,
)
subnet_schema = properties.Schema(
properties.Schema.MAP,
_("(Deprecated in: 17.1.1) Subnet providing reachability for client facing Virtual Service IP."),
schema=IpAddrPrefix.properties_schema,
required=False,
update_allowed=True,
)
discovered_networks_item_schema = properties.Schema(
properties.Schema.MAP,
_("(Deprecated in: 17.1.1) (internal-use) Discovered networks providing reachability for client facing Virtual Service IP. This field is used internally by Avi, not editable by the user."),
schema=DiscoveredNetwork.properties_schema,
required=True,
update_allowed=False,
)
discovered_networks_schema = properties.Schema(
properties.Schema.LIST,
_("(Deprecated in: 17.1.1) (internal-use) Discovered networks providing reachability for client facing Virtual Service IP. This field is used internally by Avi, not editable by the user."),
schema=discovered_networks_item_schema,
required=False,
update_allowed=True,
)
vs_datascripts_item_schema = properties.Schema(
properties.Schema.MAP,
_("Datascripts applied on the data traffic of the Virtual Service"),
schema=VSDataScripts.properties_schema,
required=True,
update_allowed=False,
)
vs_datascripts_schema = properties.Schema(
properties.Schema.LIST,
_("Datascripts applied on the data traffic of the Virtual Service"),
schema=vs_datascripts_item_schema,
required=False,
update_allowed=True,
)
client_auth_schema = properties.Schema(
properties.Schema.MAP,
_("HTTP authentication configuration for protected resources."),
schema=HTTPClientAuthenticationParams.properties_schema,
required=False,
update_allowed=True,
)
weight_schema = properties.Schema(
properties.Schema.NUMBER,
_("The Quality of Service weight to assign to traffic transmitted from this Virtual Service. A higher weight will prioritize traffic versus other Virtual Services sharing the same Service Engines. (Default: 1)"),
required=False,
update_allowed=True,
)
delay_fairness_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Select the algorithm for QoS fairness. This determines how multiple Virtual Services sharing the same Service Engines will prioritize traffic over a congested network. (Default: False)"),
required=False,
update_allowed=True,
)
max_cps_per_client_schema = properties.Schema(
properties.Schema.NUMBER,
_("Maximum connections per second per client IP. (Default: 0)"),
required=False,
update_allowed=True,
)
limit_doser_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Limit potential DoS attackers who exceed max_cps_per_client significantly to a fraction of max_cps_per_client for a while. (Default: False)"),
required=False,
update_allowed=True,
)
type_schema = properties.Schema(
properties.Schema.STRING,
_("Specify if this is a normal Virtual Service, or if it is the parent or child of an SNI-enabled virtual hosted Virtual Service. (Default: VS_TYPE_NORMAL)"),
required=False,
update_allowed=False,
constraints=[
constraints.AllowedValues(['VS_TYPE_NORMAL', 'VS_TYPE_VH_CHILD', 'VS_TYPE_VH_PARENT']),
],
)
vh_parent_vs_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Specifies the Virtual Service acting as Virtual Hosting (SNI) parent."),
required=False,
update_allowed=True,
)
vh_domain_name_item_schema = properties.Schema(
properties.Schema.STRING,
_("The exact name requested from the client's SNI-enabled TLS hello domain name field. If this is a match, the parent VS will forward the connection to this child VS."),
required=True,
update_allowed=False,
)
vh_domain_name_schema = properties.Schema(
properties.Schema.LIST,
_("The exact name requested from the client's SNI-enabled TLS hello domain name field. If this is a match, the parent VS will forward the connection to this child VS."),
schema=vh_domain_name_item_schema,
required=False,
update_allowed=True,
)
availability_zone_schema = properties.Schema(
properties.Schema.STRING,
_("(Deprecated in: 17.1.1) Availability-zone to place the Virtual Service."),
required=False,
update_allowed=True,
)
auto_allocate_ip_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Deprecated in: 17.1.1) Auto-allocate VIP from the provided subnet."),
required=False,
update_allowed=True,
)
floating_ip_schema = properties.Schema(
properties.Schema.MAP,
_("(Deprecated in: 17.1.1) Floating IP to associate with this Virtual Service."),
schema=IpAddr.properties_schema,
required=False,
update_allowed=True,
)
auto_allocate_floating_ip_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Deprecated in: 17.1.1) Auto-allocate floating/elastic IP from the Cloud infrastructure."),
required=False,
update_allowed=True,
)
floating_subnet_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Deprecated in: 17.1.1) If auto_allocate_floating_ip is True and more than one floating-ip subnets exist, then the subnet for the floating IP address allocation. This field is applicable only if the VirtualService belongs to an OpenStack or AWS cloud. In OpenStack or AWS cloud it is required when auto_allocate_floating_ip is selected."),
required=False,
update_allowed=True,
)
cloud_type_schema = properties.Schema(
properties.Schema.STRING,
_(" (Default: CLOUD_NONE)"),
required=False,
update_allowed=True,
constraints=[
constraints.AllowedValues(['CLOUD_APIC', 'CLOUD_AWS', 'CLOUD_AZURE', 'CLOUD_DOCKER_UCP', 'CLOUD_LINUXSERVER', 'CLOUD_MESOS', 'CLOUD_NONE', 'CLOUD_OPENSTACK', 'CLOUD_OSHIFT_K8S', 'CLOUD_RANCHER', 'CLOUD_VCA', 'CLOUD_VCENTER']),
],
)
avi_allocated_vip_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Deprecated in: 17.1.1) (internal-use) VIP allocated by Avi in the Cloud infrastructure."),
required=False,
update_allowed=True,
)
avi_allocated_fip_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Deprecated in: 17.1.1) (internal-use) FIP allocated by Avi in the Cloud infrastructure."),
required=False,
update_allowed=True,
)
connections_rate_limit_schema = properties.Schema(
properties.Schema.MAP,
_("Rate limit the incoming connections to this virtual service"),
schema=RateProfile.properties_schema,
required=False,
update_allowed=True,
)
requests_rate_limit_schema = properties.Schema(
properties.Schema.MAP,
_("Rate limit the incoming requests to this virtual service"),
schema=RateProfile.properties_schema,
required=False,
update_allowed=True,
)
use_bridge_ip_as_vip_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Use Bridge IP as VIP on each Host in Mesos deployments (Default: False)"),
required=False,
update_allowed=True,
)
flow_dist_schema = properties.Schema(
properties.Schema.STRING,
_("Criteria for flow distribution among SEs. (Default: LOAD_AWARE)"),
required=False,
update_allowed=True,
constraints=[
constraints.AllowedValues(['CONSISTENT_HASH_SOURCE_IP_ADDRESS', 'CONSISTENT_HASH_SOURCE_IP_ADDRESS_AND_PORT', 'LOAD_AWARE']),
],
)
ign_pool_net_reach_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Ignore Pool servers network reachability constraints for Virtual Service placement. (Default: False)"),
required=False,
update_allowed=True,
)
ssl_sess_cache_avg_size_schema = properties.Schema(
properties.Schema.NUMBER,
_("Expected number of SSL session cache entries (may be exceeded). (Default: 1024)"),
required=False,
update_allowed=True,
)
pool_group_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("The pool group is an object that contains pools. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
remove_listening_port_on_vs_down_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Remove listening port if VirtualService is down (Default: False)"),
required=False,
update_allowed=True,
)
close_client_conn_on_config_update_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.2.4) close client connection on vs config update (Default: False)"),
required=False,
update_allowed=True,
)
bulk_sync_kvcache_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 18.1.1) (This is a beta feature). Sync Key-Value cache to the new SEs when VS is scaled out. For ex: SSL sessions are stored using VS's Key-Value cache. When the VS is scaled out, the SSL session information is synced to the new SE, allowing existing SSL sessions to be reused on the new SE. (Default: False)"),
required=False,
update_allowed=True,
)
description_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=False,
update_allowed=True,
)
cloud_uuid_schema = properties.Schema(
properties.Schema.STRING,
_(""),
required=False,
update_allowed=False,
)
east_west_placement_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Force placement on all SE's in service group (Mesos mode only) (Default: False)"),
required=False,
update_allowed=True,
)
scaleout_ecmp_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Disable re-distribution of flows across service engines for a virtual service. Enable if the network itself performs flow hashing with ECMP in environments such as GCP (Default: False)"),
required=False,
update_allowed=True,
)
microservice_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("Microservice representing the virtual service You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
service_pool_select_item_schema = properties.Schema(
properties.Schema.MAP,
_("Select pool based on destination port"),
schema=ServicePoolSelector.properties_schema,
required=True,
update_allowed=False,
)
service_pool_select_schema = properties.Schema(
properties.Schema.LIST,
_("Select pool based on destination port"),
schema=service_pool_select_item_schema,
required=False,
update_allowed=True,
)
created_by_schema = properties.Schema(
properties.Schema.STRING,
_("Creator name"),
required=False,
update_allowed=True,
)
cloud_config_cksum_schema = properties.Schema(
properties.Schema.STRING,
_("Checksum of cloud configuration for VS. Internally set by cloud connector"),
required=False,
update_allowed=True,
)
enable_rhi_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Enable Route Health Injection using the BGP Config in the vrf context"),
required=False,
update_allowed=True,
)
snat_ip_item_schema = properties.Schema(
properties.Schema.MAP,
_("NAT'ted floating source IP Address(es) for upstream connection to servers"),
schema=IpAddr.properties_schema,
required=True,
update_allowed=False,
)
snat_ip_schema = properties.Schema(
properties.Schema.LIST,
_("NAT'ted floating source IP Address(es) for upstream connection to servers"),
schema=snat_ip_item_schema,
required=False,
update_allowed=True,
)
active_standby_se_tag_schema = properties.Schema(
properties.Schema.STRING,
_("This configuration only applies if the VirtualService is in Legacy Active Standby HA mode and Load Distribution among Active Standby is enabled. This field is used to tag the VirtualService so that VirtualServices with the same tag will share the same Active ServiceEngine. VirtualServices with different tags will have different Active ServiceEngines. If one of the ServiceEngine's in the ServiceEngineGroup fails, all VirtualServices will end up using the same Active ServiceEngine. Redistribution of the VirtualServices can be either manual or automated when the failed ServiceEngine recovers. Redistribution is based on the auto redistribute property of the ServiceEngineGroup. (Default: ACTIVE_STANDBY_SE_1)"),
required=False,
update_allowed=True,
constraints=[
constraints.AllowedValues(['ACTIVE_STANDBY_SE_1', 'ACTIVE_STANDBY_SE_2']),
],
)
flow_label_type_schema = properties.Schema(
properties.Schema.STRING,
_("Criteria for flow labelling. (Default: NO_LABEL)"),
required=False,
update_allowed=True,
constraints=[
constraints.AllowedValues(['APPLICATION_LABEL', 'NO_LABEL', 'SERVICE_LABEL']),
],
)
enable_rhi_snat_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("Enable Route Health Injection for Source NAT'ted floating IP Address using the BGP Config in the vrf context"),
required=False,
update_allowed=True,
)
static_dns_records_item_schema = properties.Schema(
properties.Schema.MAP,
_("List of static DNS records applied to this Virtual Service. These are static entries and no health monitoring is performed against the IP addresses."),
schema=DnsRecord.properties_schema,
required=True,
update_allowed=False,
)
static_dns_records_schema = properties.Schema(
properties.Schema.LIST,
_("List of static DNS records applied to this Virtual Service. These are static entries and no health monitoring is performed against the IP addresses."),
schema=static_dns_records_item_schema,
required=False,
update_allowed=True,
)
ipam_network_subnet_schema = properties.Schema(
properties.Schema.MAP,
_("(Deprecated in: 17.1.1) Subnet and/or Network for allocating VirtualService IP by IPAM Provider module."),
schema=IPNetworkSubnet.properties_schema,
required=False,
update_allowed=True,
)
dns_info_item_schema = properties.Schema(
properties.Schema.MAP,
_("Service discovery specific data including fully qualified domain name, type and Time-To-Live of the DNS record. Note that only one of fqdn and dns_info setting is allowed."),
schema=DnsInfo.properties_schema,
required=True,
update_allowed=False,
)
dns_info_schema = properties.Schema(
properties.Schema.LIST,
_("Service discovery specific data including fully qualified domain name, type and Time-To-Live of the DNS record. Note that only one of fqdn and dns_info setting is allowed."),
schema=dns_info_item_schema,
required=False,
update_allowed=True,
)
service_metadata_schema = properties.Schema(
properties.Schema.STRING,
_("Metadata pertaining to the Service provided by this virtual service. In Openshift/Kubernetes environments, egress pod info is stored. Any user input to this field will be overwritten by Avi Vantage."),
required=False,
update_allowed=True,
)
traffic_clone_profile_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) Server network or list of servers for cloning traffic. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
content_rewrite_schema = properties.Schema(
properties.Schema.MAP,
_("Profile used to match and rewrite strings in request and/or response body."),
schema=ContentRewriteProfile.properties_schema,
required=False,
update_allowed=True,
)
sideband_profile_schema = properties.Schema(
properties.Schema.MAP,
_("Sideband configuration to be used for this virtualservice.It can be used for sending traffic to sideband VIPs for external inspection etc."),
schema=SidebandProfile.properties_schema,
required=False,
update_allowed=True,
)
vip_item_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) List of Virtual Service IPs. While creating a 'Shared VS',please use vsvip_ref to point to the shared entities."),
schema=Vip.properties_schema,
required=True,
update_allowed=False,
)
vip_schema = properties.Schema(
properties.Schema.LIST,
_("(Introduced in: 17.1.1) List of Virtual Service IPs. While creating a 'Shared VS',please use vsvip_ref to point to the shared entities."),
schema=vip_item_schema,
required=False,
update_allowed=True,
)
nsx_securitygroup_item_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) A list of NSX Service Groups representing the Clients which can access the Virtual IP of the Virtual Service"),
required=True,
update_allowed=False,
)
nsx_securitygroup_schema = properties.Schema(
properties.Schema.LIST,
_("(Introduced in: 17.1.1) A list of NSX Service Groups representing the Clients which can access the Virtual IP of the Virtual Service"),
schema=nsx_securitygroup_item_schema,
required=False,
update_allowed=True,
)
vsvip_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) Mostly used during the creation of Shared VS, this field refers to entities that can be shared across Virtual Services. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
waf_policy_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.1) WAF policy for the Virtual Service. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
sp_pool_uuids_item_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.2) GSLB pools used to manage site-persistence functionality. Each site-persistence pool contains the virtualservices in all the other sites, that is auto-generated by the GSLB manager. This is a read-only field for the user."),
required=True,
update_allowed=False,
)
sp_pool_uuids_schema = properties.Schema(
properties.Schema.LIST,
_("(Introduced in: 17.2.2) GSLB pools used to manage site-persistence functionality. Each site-persistence pool contains the virtualservices in all the other sites, that is auto-generated by the GSLB manager. This is a read-only field for the user. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
schema=sp_pool_uuids_item_schema,
required=False,
update_allowed=False,
)
use_vip_as_snat_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.1.9,17.2.3) Use the Virtual IP as the SNAT IP for health monitoring and sending traffic to the backend servers instead of the Service Engine interface IP. The caveat of enabling this option is that the VirtualService cannot be configued in an Active-Active HA mode. DNS based Multi VIP solution has to be used for HA & Non-disruptive Upgrade purposes. (Default: False)"),
required=False,
update_allowed=True,
)
error_page_profile_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.4) Error Page Profile to be used for this virtualservice.This profile is used to send the custom error page to the client generated by the proxy You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
l4_policies_item_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.2.7) L4 Policies applied to the data traffic of the Virtual Service"),
schema=L4Policies.properties_schema,
required=True,
update_allowed=False,
)
l4_policies_schema = properties.Schema(
properties.Schema.LIST,
_("(Introduced in: 17.2.7) L4 Policies applied to the data traffic of the Virtual Service"),
schema=l4_policies_item_schema,
required=False,
update_allowed=True,
)
traffic_enabled_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.2.8) Knob to enable the Virtual Service traffic on its assigned service engines. This setting is effective only when the enabled flag is set to True. (Default: True)"),
required=False,
update_allowed=True,
)
apic_contract_graph_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.12,18.1.2) The name of the Contract/Graph associated with the Virtual Service. Should be in the <Contract name>:<Graph name> format. This is applicable only for Service Integration mode with Cisco APIC Controller "),
required=False,
update_allowed=True,
)
vsvip_cloud_config_cksum_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.9) Checksum of cloud configuration for VsVip. Internally set by cloud connector"),
required=False,
update_allowed=True,
)
azure_availability_set_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.12, 18.1.2) (internal-use)Applicable for Azure only. Azure Availability set to which this VS is associated. Internally set by the cloud connector"),
required=False,
update_allowed=False,
)
# properties list
PROPERTIES = (
'avi_version',
'name',
'fqdn',
'ip_address',
'enabled',
'services',
'application_profile_uuid',
'network_profile_uuid',
'server_network_profile_uuid',
'pool_uuid',
'se_group_uuid',
'network_security_policy_uuid',
'http_policies',
'dns_policies',
'ssl_key_and_certificate_uuids',
'ssl_profile_uuid',
'performance_limits',
'analytics_policy',
'network_uuid',
'vrf_context_uuid',
'enable_autogw',
'port_uuid',
'subnet_uuid',
'analytics_profile_uuid',
'discovered_network_uuid',
'discovered_subnet',
'host_name_xlate',
'subnet',
'discovered_networks',
'vs_datascripts',
'client_auth',
'weight',
'delay_fairness',
'max_cps_per_client',
'limit_doser',
'type',
'vh_parent_vs_uuid',
'vh_domain_name',
'availability_zone',
'auto_allocate_ip',
'floating_ip',
'auto_allocate_floating_ip',
'floating_subnet_uuid',
'cloud_type',
'avi_allocated_vip',
'avi_allocated_fip',
'connections_rate_limit',
'requests_rate_limit',
'use_bridge_ip_as_vip',
'flow_dist',
'ign_pool_net_reach',
'ssl_sess_cache_avg_size',
'pool_group_uuid',
'remove_listening_port_on_vs_down',
'close_client_conn_on_config_update',
'bulk_sync_kvcache',
'description',
'cloud_uuid',
'east_west_placement',
'scaleout_ecmp',
'microservice_uuid',
'service_pool_select',
'created_by',
'cloud_config_cksum',
'enable_rhi',
'snat_ip',
'active_standby_se_tag',
'flow_label_type',
'enable_rhi_snat',
'static_dns_records',
'ipam_network_subnet',
'dns_info',
'service_metadata',
'traffic_clone_profile_uuid',
'content_rewrite',
'sideband_profile',
'vip',
'nsx_securitygroup',
'vsvip_uuid',
'waf_policy_uuid',
'sp_pool_uuids',
'use_vip_as_snat',
'error_page_profile_uuid',
'l4_policies',
'traffic_enabled',
'apic_contract_graph',
'vsvip_cloud_config_cksum',
'azure_availability_set',
)
# mapping of properties to their schemas
properties_schema = {
'avi_version': avi_version_schema,
'name': name_schema,
'fqdn': fqdn_schema,
'ip_address': ip_address_schema,
'enabled': enabled_schema,
'services': services_schema,
'application_profile_uuid': application_profile_uuid_schema,
'network_profile_uuid': network_profile_uuid_schema,
'server_network_profile_uuid': server_network_profile_uuid_schema,
'pool_uuid': pool_uuid_schema,
'se_group_uuid': se_group_uuid_schema,
'network_security_policy_uuid': network_security_policy_uuid_schema,
'http_policies': http_policies_schema,
'dns_policies': dns_policies_schema,
'ssl_key_and_certificate_uuids': ssl_key_and_certificate_uuids_schema,
'ssl_profile_uuid': ssl_profile_uuid_schema,
'performance_limits': performance_limits_schema,
'analytics_policy': analytics_policy_schema,
'network_uuid': network_uuid_schema,
'vrf_context_uuid': vrf_context_uuid_schema,
'enable_autogw': enable_autogw_schema,
'port_uuid': port_uuid_schema,
'subnet_uuid': subnet_uuid_schema,
'analytics_profile_uuid': analytics_profile_uuid_schema,
'discovered_network_uuid': discovered_network_uuid_schema,
'discovered_subnet': discovered_subnet_schema,
'host_name_xlate': host_name_xlate_schema,
'subnet': subnet_schema,
'discovered_networks': discovered_networks_schema,
'vs_datascripts': vs_datascripts_schema,
'client_auth': client_auth_schema,
'weight': weight_schema,
'delay_fairness': delay_fairness_schema,
'max_cps_per_client': max_cps_per_client_schema,
'limit_doser': limit_doser_schema,
'type': type_schema,
'vh_parent_vs_uuid': vh_parent_vs_uuid_schema,
'vh_domain_name': vh_domain_name_schema,
'availability_zone': availability_zone_schema,
'auto_allocate_ip': auto_allocate_ip_schema,
'floating_ip': floating_ip_schema,
'auto_allocate_floating_ip': auto_allocate_floating_ip_schema,
'floating_subnet_uuid': floating_subnet_uuid_schema,
'cloud_type': cloud_type_schema,
'avi_allocated_vip': avi_allocated_vip_schema,
'avi_allocated_fip': avi_allocated_fip_schema,
'connections_rate_limit': connections_rate_limit_schema,
'requests_rate_limit': requests_rate_limit_schema,
'use_bridge_ip_as_vip': use_bridge_ip_as_vip_schema,
'flow_dist': flow_dist_schema,
'ign_pool_net_reach': ign_pool_net_reach_schema,
'ssl_sess_cache_avg_size': ssl_sess_cache_avg_size_schema,
'pool_group_uuid': pool_group_uuid_schema,
'remove_listening_port_on_vs_down': remove_listening_port_on_vs_down_schema,
'close_client_conn_on_config_update': close_client_conn_on_config_update_schema,
'bulk_sync_kvcache': bulk_sync_kvcache_schema,
'description': description_schema,
'cloud_uuid': cloud_uuid_schema,
'east_west_placement': east_west_placement_schema,
'scaleout_ecmp': scaleout_ecmp_schema,
'microservice_uuid': microservice_uuid_schema,
'service_pool_select': service_pool_select_schema,
'created_by': created_by_schema,
'cloud_config_cksum': cloud_config_cksum_schema,
'enable_rhi': enable_rhi_schema,
'snat_ip': snat_ip_schema,
'active_standby_se_tag': active_standby_se_tag_schema,
'flow_label_type': flow_label_type_schema,
'enable_rhi_snat': enable_rhi_snat_schema,
'static_dns_records': static_dns_records_schema,
'ipam_network_subnet': ipam_network_subnet_schema,
'dns_info': dns_info_schema,
'service_metadata': service_metadata_schema,
'traffic_clone_profile_uuid': traffic_clone_profile_uuid_schema,
'content_rewrite': content_rewrite_schema,
'sideband_profile': sideband_profile_schema,
'vip': vip_schema,
'nsx_securitygroup': nsx_securitygroup_schema,
'vsvip_uuid': vsvip_uuid_schema,
'waf_policy_uuid': waf_policy_uuid_schema,
'sp_pool_uuids': sp_pool_uuids_schema,
'use_vip_as_snat': use_vip_as_snat_schema,
'error_page_profile_uuid': error_page_profile_uuid_schema,
'l4_policies': l4_policies_schema,
'traffic_enabled': traffic_enabled_schema,
'apic_contract_graph': apic_contract_graph_schema,
'vsvip_cloud_config_cksum': vsvip_cloud_config_cksum_schema,
'azure_availability_set': azure_availability_set_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'client_auth': getattr(HTTPClientAuthenticationParams, 'field_references', {}),
'network_uuid': 'network',
'network_profile_uuid': 'networkprofile',
'dns_info': getattr(DnsInfo, 'field_references', {}),
'vs_datascripts': getattr(VSDataScripts, 'field_references', {}),
'content_rewrite': getattr(ContentRewriteProfile, 'field_references', {}),
'vip': getattr(Vip, 'field_references', {}),
'snat_ip': getattr(IpAddr, 'field_references', {}),
'waf_policy_uuid': 'wafpolicy',
'discovered_network_uuid': 'network',
'sideband_profile': getattr(SidebandProfile, 'field_references', {}),
'vrf_context_uuid': 'vrfcontext',
'subnet': getattr(IpAddrPrefix, 'field_references', {}),
'vsvip_uuid': 'vsvip',
'sp_pool_uuids': 'pool',
'ssl_profile_uuid': 'sslprofile',
'error_page_profile_uuid': 'errorpageprofile',
'traffic_clone_profile_uuid': 'trafficcloneprofile',
'se_group_uuid': 'serviceenginegroup',
'l4_policies': getattr(L4Policies, 'field_references', {}),
'requests_rate_limit': getattr(RateProfile, 'field_references', {}),
'application_profile_uuid': 'applicationprofile',
'pool_group_uuid': 'poolgroup',
'analytics_profile_uuid': 'analyticsprofile',
'performance_limits': getattr(PerformanceLimits, 'field_references', {}),
'http_policies': getattr(HTTPPolicies, 'field_references', {}),
'server_network_profile_uuid': 'networkprofile',
'floating_ip': getattr(IpAddr, 'field_references', {}),
'microservice_uuid': 'microservice',
'services': getattr(Service, 'field_references', {}),
'connections_rate_limit': getattr(RateProfile, 'field_references', {}),
'ip_address': getattr(IpAddr, 'field_references', {}),
'service_pool_select': getattr(ServicePoolSelector, 'field_references', {}),
'network_security_policy_uuid': 'networksecuritypolicy',
'discovered_networks': getattr(DiscoveredNetwork, 'field_references', {}),
'ssl_key_and_certificate_uuids': 'sslkeyandcertificate',
'ipam_network_subnet': getattr(IPNetworkSubnet, 'field_references', {}),
'discovered_subnet': getattr(IpAddrPrefix, 'field_references', {}),
'dns_policies': getattr(DnsPolicies, 'field_references', {}),
'static_dns_records': getattr(DnsRecord, 'field_references', {}),
'analytics_policy': getattr(AnalyticsPolicy, 'field_references', {}),
'pool_uuid': 'pool',
}
unique_keys = {
'client_auth': getattr(HTTPClientAuthenticationParams, 'unique_keys', {}),
'vs_datascripts': getattr(VSDataScripts, 'unique_keys', {}),
'content_rewrite': getattr(ContentRewriteProfile, 'unique_keys', {}),
'vip': getattr(Vip, 'unique_keys', {}),
'static_dns_records': getattr(DnsRecord, 'unique_keys', {}),
'sideband_profile': getattr(SidebandProfile, 'unique_keys', {}),
'requests_rate_limit': getattr(RateProfile, 'unique_keys', {}),
'subnet': getattr(IpAddrPrefix, 'unique_keys', {}),
'l4_policies': getattr(L4Policies, 'unique_keys', {}),
'performance_limits': getattr(PerformanceLimits, 'unique_keys', {}),
'http_policies': getattr(HTTPPolicies, 'unique_keys', {}),
'floating_ip': getattr(IpAddr, 'unique_keys', {}),
'services': getattr(Service, 'unique_keys', {}),
'connections_rate_limit': getattr(RateProfile, 'unique_keys', {}),
'ip_address': getattr(IpAddr, 'unique_keys', {}),
'service_pool_select': getattr(ServicePoolSelector, 'unique_keys', {}),
'discovered_networks': getattr(DiscoveredNetwork, 'unique_keys', {}),
'dns_info': getattr(DnsInfo, 'unique_keys', {}),
'ipam_network_subnet': getattr(IPNetworkSubnet, 'unique_keys', {}),
'discovered_subnet': getattr(IpAddrPrefix, 'unique_keys', {}),
'dns_policies': getattr(DnsPolicies, 'unique_keys', {}),
'snat_ip': getattr(IpAddr, 'unique_keys', {}),
'analytics_policy': getattr(AnalyticsPolicy, 'unique_keys', {}),
}
class VsVip(AviResource):
resource_name = "vsvip"
# all schemas
avi_version_schema = properties.Schema(
properties.Schema.STRING,
_("Avi Version to use for the object. Default is 16.4.2. If you plan to use any fields introduced after 16.4.2, then this needs to be explicitly set."),
required=False,
update_allowed=True,
)
name_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) Name for the VsVip object."),
required=True,
update_allowed=True,
)
vip_item_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) List of Virtual Service IPs and other shareable entities."),
schema=Vip.properties_schema,
required=True,
update_allowed=False,
)
vip_schema = properties.Schema(
properties.Schema.LIST,
_("(Introduced in: 17.1.1) List of Virtual Service IPs and other shareable entities."),
schema=vip_item_schema,
required=False,
update_allowed=True,
)
dns_info_item_schema = properties.Schema(
properties.Schema.MAP,
_("(Introduced in: 17.1.1) Service discovery specific data including fully qualified domain name, type and Time-To-Live of the DNS record."),
schema=DnsInfo.properties_schema,
required=True,
update_allowed=False,
)
dns_info_schema = properties.Schema(
properties.Schema.LIST,
_("(Introduced in: 17.1.1) Service discovery specific data including fully qualified domain name, type and Time-To-Live of the DNS record."),
schema=dns_info_item_schema,
required=False,
update_allowed=True,
)
vrf_context_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) Virtual Routing Context that the Virtual Service is bound to. This is used to provide the isolation of the set of networks the application is attached to. You can either provide UUID or provide a name with the prefix 'get_avi_uuid_by_name:', e.g., 'get_avi_uuid_by_name:my_obj_name'."),
required=False,
update_allowed=True,
)
east_west_placement_schema = properties.Schema(
properties.Schema.BOOLEAN,
_("(Introduced in: 17.1.1) Force placement on all Service Engines in the Service Engine Group (Container clouds only) (Default: False)"),
required=False,
update_allowed=True,
)
cloud_uuid_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.1.1) "),
required=False,
update_allowed=False,
)
vsvip_cloud_config_cksum_schema = properties.Schema(
properties.Schema.STRING,
_("(Introduced in: 17.2.9) Checksum of cloud configuration for VsVip. Internally set by cloud connector"),
required=False,
update_allowed=True,
)
# properties list
PROPERTIES = (
'avi_version',
'name',
'vip',
'dns_info',
'vrf_context_uuid',
'east_west_placement',
'cloud_uuid',
'vsvip_cloud_config_cksum',
)
# mapping of properties to their schemas
properties_schema = {
'avi_version': avi_version_schema,
'name': name_schema,
'vip': vip_schema,
'dns_info': dns_info_schema,
'vrf_context_uuid': vrf_context_uuid_schema,
'east_west_placement': east_west_placement_schema,
'cloud_uuid': cloud_uuid_schema,
'vsvip_cloud_config_cksum': vsvip_cloud_config_cksum_schema,
}
# for supporting get_avi_uuid_by_name functionality
field_references = {
'vrf_context_uuid': 'vrfcontext',
'vip': getattr(Vip, 'field_references', {}),
'dns_info': getattr(DnsInfo, 'field_references', {}),
}
unique_keys = {
'vip': getattr(Vip, 'unique_keys', {}),
'dns_info': getattr(DnsInfo, 'unique_keys', {}),
}
def resource_mapping():
return {
'Avi::LBaaS::VsApicExtension': VsApicExtension,
'Avi::LBaaS::VirtualService': VirtualService,
'Avi::LBaaS::VsVip': VsVip,
}
| apache-2.0 | -6,141,921,522,261,341,000 | 38.296812 | 726 | 0.634036 | false | 3.9318 | false | false | false |
ikoryakovskiy/grlcfg | divyam_leosim_rl_sym_full.py | 1 | 6605 | from __future__ import division
import multiprocessing
import os
import os.path
import sys
import yaml, collections
import numpy as np
from time import sleep
import math
import argparse
import itertools
counter = None
counter_lock = multiprocessing.Lock()
proc_per_processor = 0;
def flatten(x):
if isinstance(x, collections.Iterable):
return [a for i in x for a in flatten(i)]
else:
return [x]
def main():
# parse arguments
parser = argparse.ArgumentParser(description="Parser")
parser.add_argument('-c', '--cores', type=int, help='specify maximum number of cores')
args = parser.parse_args()
if args.cores:
args.cores = min(multiprocessing.cpu_count(), args.cores)
else:
args.cores = min(multiprocessing.cpu_count(), 32)
print 'Using {} cores.'.format(args.cores)
prepare_multiprocessing()
# for walking with yaml files
_mapping_tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
yaml.add_representer(collections.OrderedDict, dict_representer)
yaml.add_constructor(_mapping_tag, dict_constructor)
# Parameters
runs = range(30)
options = []
for r in itertools.product(runs): options.append(r)
options = [flatten(tupl) for tupl in options]
# Main
rl_run_param(args, ["leo/leosim_rl_sym_full/leosim_sarsa_walk_egreedy.yaml", "leo/leosim_rl_sym_full/leosim_sarsa_walk_ou.yaml"], options)
######################################################################################
def rl_run_param(args, list_of_cfgs, options):
list_of_new_cfgs = []
loc = "tmp"
if not os.path.exists(loc):
os.makedirs(loc)
for cfg in list_of_cfgs:
conf = read_cfg(cfg)
# after reading cfg can do anything with the name
fname, fext = os.path.splitext( cfg.replace("/", "_") )
for o in options:
str_o = "-".join(map(lambda x : "{:05d}".format(int(round(10000*x))), o[:-1])) # last element in 'o' is reserved for mp
str_o += "mp{}".format(o[-1])
print "Generating parameters: {}".format(str_o)
# create local filename
list_of_new_cfgs.append( "{}/{}-{}{}".format(loc, fname, str_o, fext) )
# modify options
conf['experiment']['output'] = "{}-{}".format(fname, str_o)
if "exporter" in conf['experiment']['environment']:
conf['experiment']['environment']['exporter']['file'] = "{}-{}".format(fname, str_o)
conf = remove_viz(conf)
write_cfg(list_of_new_cfgs[-1], conf)
#print list_of_new_cfgs
do_multiprocessing_pool(args, list_of_new_cfgs)
######################################################################################
def mp_run(cfg):
# Multiple copies can be run on one computer at the same time, which results in the same seed for a random generator.
# Thus we need to wait for a second or so between runs
global counter
global proc_per_processor
with counter_lock:
wait = counter.value
counter.value += 2
# wait for the specified number of seconds
#print 'floor {0}'.format(math.floor(wait / multiprocessing.cpu_count()))
#wait = wait % multiprocessing.cpu_count() + (1.0/proc_per_processor.value) * math.floor(wait / multiprocessing.cpu_count())
#print 'wait {0}'.format(wait)
sleep(wait)
print 'wait finished {0}'.format(wait)
# Run the experiment
code = os.system('./grld %s' % cfg)
if not code == 0:
errorString = "Exit code is '{0}' ({1})".format(code, cfg)
print errorString
f = open("bailing.out", "a")
try:
f.write(errorString + "\n")
finally:
f.close()
######################################################################################
def init(cnt, num):
''' store the counter for later use '''
global counter
global proc_per_processor
counter = cnt
proc_per_processor = num
######################################################################################
def do_multiprocessing_pool(args, list_of_new_cfgs):
"""Do multiprocesing"""
counter = multiprocessing.Value('i', 0)
proc_per_processor = multiprocessing.Value('d', math.ceil(len(list_of_new_cfgs)/args.cores))
print 'proc_per_processor {0}'.format(proc_per_processor.value)
pool = multiprocessing.Pool(args.cores, initializer = init, initargs = (counter, proc_per_processor))
pool.map(mp_run, list_of_new_cfgs)
pool.close()
######################################################################################
def prepare_multiprocessing():
# clean bailing.out file
f = open("bailing.out", "w")
f.close()
######################################################################################
def read_cfg(cfg):
"""Read configuration file"""
# check if file exists
yfile = '../qt-build/cfg/%s' % cfg
if os.path.isfile(yfile) == False:
print 'File %s not found' % yfile
sys.exit()
# open configuration
stream = file(yfile, 'r')
conf = yaml.load(stream)
stream.close()
return conf
######################################################################################
def write_cfg(outCfg, conf):
"""Write configuration file"""
# create local yaml configuration file
outfile = file(outCfg, 'w')
yaml.dump(conf, outfile)
outfile.close()
######################################################################################
def remove_viz(conf):
"""Remove everything in conf related to visualization"""
if "visualize" in conf['experiment']['environment']:
conf['experiment']['environment']['visualize'] = 0
if "target_env" in conf['experiment']['environment']:
if "visualize" in conf['experiment']['environment']['target_env']:
conf['experiment']['environment']['target_env']['visualize'] = 0
if "visualizer" in conf:
del conf["visualizer"]
if "visualization" in conf:
del conf["visualization"]
if "visualization2" in conf:
del conf["visualization2"]
return conf
######################################################################################
def dict_representer(dumper, data):
return dumper.represent_dict(data.iteritems())
######################################################################################
def dict_constructor(loader, node):
return collections.OrderedDict(loader.construct_pairs(node))
######################################################################################
if __name__ == "__main__":
main()
| gpl-3.0 | 2,749,423,586,040,896,000 | 34.896739 | 142 | 0.539894 | false | 4.13588 | false | false | false |
hbp-brain-charting/public_protocols | rsvp_language/rsvp_language_protocol/langexpy_script/instdisplay.py | 1 | 4700 | # -*- coding: utf-8 -*-
import os
import numpy as np
import pandas as pd
from confparser import load_config
from expyriment import stimuli, misc
def launch_instructions(instructions_ini, exp):
# Select .ini file for instructions
setting = load_config(instructions_ini)
# Define the pathway of the instructions file
instructions_fname = ''.join((setting["inst_filename"], ".csv"))
instructions_dir = os.path.abspath((setting["inputs_dir"]))
instructions_path = os.path.join(instructions_dir, instructions_fname)
# Generate a dataframe containing the instructions
df_inst = pd.read_csv(instructions_path, sep='|')
# Convert the dataframe into a list
instructions = df_inst.values.tolist()
# Convert each element of the dataframe into a string
instructions = [[''.join(instructions[i][j])
for j in np.arange(len(instructions[i]))]
for i in np.arange(len(df_inst))]
# Initialization of variable containing the value of the key pressed
found_key = 0
response_key = 0
# While "h" key to return to main menu is not pressed...
while not (found_key == misc.constants.K_h or response_key == 'h'):
# Read the instructions file, line by line
ldx = 0
while ldx < len(instructions):
line = instructions[ldx]
# ... and item by item
for word in line:
# For lines with one item
if word in ("no_item", "no_probe", "fdbk_yes",
"fdbk_no"):
pass
# For lines corresponding to the examples, i.e. containing
# more than one item
else:
text_display = stimuli.TextBox(
word.decode('utf-8'),
map(int, setting["box_size"]),
position=map(int, setting["box_position"]),
text_size=setting["txtsize"],
text_colour=map(int, setting["txtcolour"]))
text_display.present()
exp.clock.wait(300)
# Check whether "h" key was pressed
found_key = exp.keyboard.check([misc.constants.K_h])
# If yes, breaks the loop
if found_key == misc.constants.K_h:
break
# If "h" key was pressed during the presentation of the example,
# it breaks the loop and return to main menu
if found_key == misc.constants.K_h:
break
# After the display of the last word of sentence's example,
# goes straight to the next line of instructions
elif line[-1] not in ("no_item", "fdbk_yes", "fdbk_no"):
exp.clock.wait(300)
# Waits for the participant's response and gives feedback whether
# the answer was correct or not
elif line[-1] in ("fdbk_yes", "fdbk_no"):
response_key, _ = exp.keyboard.wait_char([setting["YES"],
setting["NO"], 'h'])
if response_key == 'h':
break
elif ((response_key == setting["YES"] and
line[-1] == "fdbk_yes") or
(response_key == setting["NO"] and
line[-1] == "fdbk_no")):
message_display = stimuli.TextLine(
"Correct!", text_size=setting["txtsize"],
text_colour=(0, 204, 0))
message_display.present()
exp.clock.wait(2000)
else:
message_display = stimuli.TextLine(
"Incorrect!", text_size=setting["txtsize"],
text_colour=(255, 0, 0))
message_display.present()
exp.clock.wait(2000)
# Checks whether "ENTER", "LEFT" or m" key were pressed.
# If "ENTER", goes to the next line;
# if "LEFT", goes to the previous slide
# if "h", returns to main menu.
else:
found_key, _ = exp.keyboard.wait([misc.constants.K_RETURN,
misc.constants.K_LEFT,
misc.constants.K_h])
if found_key == misc.constants.K_LEFT:
ldx = ldx - 2
if ldx < 0:
ldx = -1
elif found_key == misc.constants.K_h:
break
ldx = ldx + 1
| bsd-3-clause | -2,440,997,673,011,545,000 | 45.534653 | 78 | 0.494894 | false | 4.567541 | false | false | false |
larsimmisch/capisuite | scons-tools/filesubst.py | 1 | 1231 | # -*- python -*-
"""
File-Content Substitution builder for SCons
"""
__author__ = "Hartmut Goebel <[email protected]>"
import os, re
import SCons
def _action(target, source, env):
def _substitute(matchobj, env=env):
sym = matchobj.group(1)
try:
return env.subst(str(env[sym]))
except: # TypeError: # sym not a string
txt = matchobj.group(0) # the string matched
print 'Not substituting', txt
return txt
delim = re.escape(env['FILESUBSTDELIM'])
# compile a non-greedy pattern
subst_pattern = re.compile('%s(.*?)%s' % (delim, delim))
for t, s in zip(target, source):
t = str(t)
s = s.rstr()
text = open(s, 'rb').read()
text = subst_pattern.sub(_substitute, text)
open(t, 'wb').write(text)
os.chmod(t, os.stat(s)[0])
return None
def _strfunc(target, source, env):
return "generating '%s' from '%s'" % (target[0], source[0])
_builder = SCons.Builder.Builder(
action = SCons.Action.Action(_action, _strfunc),
src_suffix = '.in',
)
def generate(env):
env['BUILDERS']['FileSubst'] = _builder
env['FILESUBSTDELIM'] = '@'
def exists(env):
return 1
| gpl-2.0 | -7,621,911,325,678,123,000 | 25.191489 | 63 | 0.581641 | false | 3.265252 | false | false | false |
dknlght/dkodi | src/plugin.video.DKEvents/default.py | 1 | 5113 | import httplib
import urllib,urllib2,re,sys
import cookielib,os,string,cookielib,StringIO,gzip
import os,time,base64,logging
from t0mm0.common.net import Net
import xml.dom.minidom
import xbmcaddon,xbmcplugin,xbmcgui
from xml.dom.minidom import Document
__settings__ = xbmcaddon.Addon(id='plugin.video.DKEvents')
home = __settings__.getAddonInfo('path')
filename = xbmc.translatePath(os.path.join(home, 'resources', 'DKEvents.xml'))
def SearchXml(SearchText):
if os.path.isfile(filename)==False:
BuildXMl()
f = open(filename, "r")
text = f.read()
if SearchText=='-1':
match=re.compile('<movie name="[^A-Za-z](.+?)" url="(.+?)" year="(.+?)"/>', re.IGNORECASE).findall(text)
SearchText=""
else:
match=re.compile('<movie name="' + SearchText + '(.+?)" url="(.+?)" year="(.+?)"/>', re.IGNORECASE).findall(text)
for i in range(len(match)):
(mName,mNumber,vyear)=match[i]
addDir(SearchText+mName,mNumber,6,"")
def ParseXml(tagname):
f = open(filename, "r")
text = f.read()
xmlcontent=xml.dom.minidom.parseString(text)
items=xmlcontent.getElementsByTagName('channel')
print "calling " + tagname
for channelitem in items:
if(len(channelitem.getElementsByTagName('item'))>=1 and channelitem.getElementsByTagName('name')[0].childNodes[0].data==tagname):
chitems = channelitem.getElementsByTagName('item')
for itemXML in chitems:
vname=itemXML.getElementsByTagName('title')[0].childNodes[0].data.strip()
vurl=itemXML.getElementsByTagName('link')[0].childNodes[0].data.strip()
vimg=itemXML.getElementsByTagName('thumbnail')[0].childNodes[0].data.strip()
addLink(vname,vurl,3,vimg)
def GetXMLChannel():
f = open(filename, "r")
text = f.read()
xmlcontent=xml.dom.minidom.parseString(text)
items=xmlcontent.getElementsByTagName('channel')
for channelitem in items:
vname=channelitem.getElementsByTagName('name')[0].childNodes[0].data.strip()
addDir(vname,"",2,"")
def playVideo(url):
xbmcPlayer = xbmc.Player()
xbmcPlayer.play(url)
def addLink(name,url,mode,iconimage):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
contextMenuItems = []
liz.addContextMenuItems(contextMenuItems, replaceItems=True)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz)
return ok
def addNext(formvar,url,mode,iconimage):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&formvar="+str(formvar)+"&name="+urllib.quote_plus('Next >')
ok=True
liz=xbmcgui.ListItem('Next >', iconImage="DefaultVideo.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": 'Next >' } )
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def addDir(name,url,mode,iconimage):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
params=get_params()
url=None
name=None
mode=None
formvar=None
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
formvar=int(params["formvar"])
except:
pass
sysarg=str(sys.argv[1])
if mode==None:
GetXMLChannel()
elif mode==2:
ParseXml(name)
elif mode==3:
playVideo(url)
xbmcplugin.endOfDirectory(int(sysarg))
| gpl-2.0 | 7,934,995,466,692,362,000 | 35.595588 | 145 | 0.582241 | false | 3.59817 | false | false | false |
paulorauber/rl | examples/grid_obstacles.py | 1 | 4292 | #!/usr/bin/python3
import numpy as np
from learning.model_free import Problem
from learning.model_free import sarsa
from learning.model_free import qlearning
from learning.model_free import mc_value_iteration
from learning.model_free import sarsa_lambda
from learning.model_free import q_lambda
from learning.model_building import dyna_q_learning
from learning.model_building import dyna_q_learning_last_visit
from learning.model_building import dyna_q_learning_stochastic
class GridObstacles(Problem):
def __init__(self):
self.m = 6
self.n = 9
self.obstacles = np.zeros((self.m, self.n), dtype=np.int)
self.obstacles[1:4, 2] = 1.0
self.obstacles[4, 5] = 1.0
self.obstacles[0:3, 7] = 1.0
self.start = self.coord_to_state(2, 0)
self.goal = self.coord_to_state(0, 8)
self.init_actions()
Problem.__init__(self, self.m * self.n, 4)
def sample_initial_state(self):
return self.start
def init_actions(self):
self._actions = []
for s in range(self.m * self.n):
s_actions = []
i, j = self.state_to_coord(s)
if self.valid_coord(i + 1, j):
s_actions.append(0)
if self.valid_coord(i - 1, j):
s_actions.append(1)
if self.valid_coord(i, j + 1):
s_actions.append(2)
if self.valid_coord(i, j - 1):
s_actions.append(3)
self._actions.append(s_actions)
self._action_offsets = [(1, 0), (-1, 0), (0, 1), (0, -1)]
def actions(self, s):
return self._actions[s]
def state_reward(self, s, a):
if a not in self._actions[s]:
raise Exception('State {0} does not allow action {1}'.format(s, a))
i, j = self.state_to_coord(s)
di, dj = self._action_offsets[a]
nexti, nextj = i + di, j + dj
nexts = self.coord_to_state(nexti, nextj)
if not self.is_final(s) and self.is_final(nexts):
return (nexts, 1.0)
else:
return (nexts, 0.0)
def is_final(self, s):
return s == self.goal
def state_to_coord(self, s):
return (s / self.n, s % self.n)
def coord_to_state(self, i, j):
return i * self.n + j
def valid_coord(self, i, j):
return i >= 0 and i < self.m \
and j >= 0 and j < self.n \
and not self.obstacles[i, j]
def print_policy(self, pi):
pi = pi.reshape((self.m, self.n))
actions = ['v', '^', '>', '<']
for i in range(self.m):
for j in range(self.n):
if self.is_final(self.coord_to_state(i, j)):
print("*"),
elif self.start == self.coord_to_state(i, j):
print("*"),
elif self.obstacles[i, j]:
print("-"),
else:
print(actions[pi[i, j]]),
print('')
def print_values(self, v):
np.set_printoptions(precision=2)
print(v.reshape((self.m, self.n)))
def main():
problem = GridObstacles()
pi, v = sarsa(problem, 1000, epsilon=0.1, alpha=0.1, gamma=1.0)
problem.print_policy(pi)
problem.print_values(v)
pi, v = qlearning(problem, 1000, epsilon=0.1, alpha=0.1, gamma=1.0)
problem.print_policy(pi)
problem.print_values(v)
pi, v = sarsa_lambda(problem, 1000, epsilon=0.1, alpha=0.1, gamma=1.0)
problem.print_policy(pi)
problem.print_values(v)
pi, v = q_lambda(problem, 1000, epsilon=0.1, alpha=0.1, gamma=1.0)
problem.print_policy(pi)
problem.print_values(v)
pi, v = mc_value_iteration(problem, 1000, 1000, 0.2)
problem.print_policy(pi)
problem.print_values(v)
pi, v = dyna_q_learning(problem, 30, 50, epsilon=0.1, alpha=0.1, gamma=0.9)
problem.print_policy(pi)
problem.print_values(v)
pi, v = dyna_q_learning_last_visit(
problem, 30, 50, epsilon=0.1, alpha=0.1, gamma=0.9, kappa=0.00)
problem.print_policy(pi)
problem.print_values(v)
pi, v = dyna_q_learning_stochastic(
problem, 30, 50, epsilon=0.1, alpha=0.1, gamma=0.9)
problem.print_policy(pi)
problem.print_values(v)
if __name__ == "__main__":
main()
| mit | 111,703,887,809,199,260 | 26.512821 | 79 | 0.558714 | false | 3.08777 | false | false | false |
sbranko/Resp1 | IIS_NagiosParser.py | 2 | 2307 | import re,os.path,sys,datetime
#Time
t1 = '{0:%y%m%d}'.format(datetime.datetime.now())
#Files and search string in HTTP request
tmpFile = 'C:\\log\\tmpCounter'+ t1 + '.txt'
logfile = 'C:\\inetpub\\logs\\LogFiles\\W3SVC1\\u_ex' + t1 + '_x.log'
searchString = 'some_string_in_url'
#Creating temp file to track checked lines in log file
if not os.path.exists(tmpFile):
with open(tmpFile,'w') as t:
t.write('0')
if not os.path.exists(logfile ):
print('Log file does not exist'.format(logfile))
sys.exit(1)
#Regexp for searched string: 200 0 0 218 => time of execution (218 ms, last value)
regex1 = re.compile(r'\s([0-9]+)\s([0-9]+)\s([0-9]+)\s([0-9]+)\s')
#Line counter
c = 0
#Event counters
j = 0
m = 0
#Time execution array
timeExcecute = []
#Reading line in temp file where to start counting events (point where parsing stopped in previous check)
with open(tmpFile,'r') as t:
intl1 = int(t.readline())
#Parsing log file
with open(logfile,'r') as logIIS:
for line in logIIS:
#If line count is bigger and equal than point of last check time of executions will be added to array
if c >= intl1 :
if searchString in line:
r1 = regex1.search(str(line))
timeExcecute.append(int(r1.group(4)))
c += 1
#Longer execution time count
for k in timeExcecute:
if k >= 30000:
m += 1
#Shorter execution time count (in miliseconds)
for i in timeExcecute:
if i >= 5000:
j += 1
#Writing count of checked lines in log file, next check counting of critical events will start from this line
with open(tmpFile,'w') as t:
t.write(str(c))
#Nagios plugin related part
if m >= 5:
print('URL {} execution time (30000 ms) is CRITICAL'.format(searchString))
sys.exit(2)
if m >= 3 and m < 5:
print('URL {} execution time (30000 ms) is WARNING'.format(searchString))
sys.exit(1)
if j >= 30:
print('URL {} execution time (5000 ms) is CRITICAL'.format(searchString))
sys.exit(2)
if j >= 20 and j < 30:
print('URL {} execution time (5000 ms) is WARNING'.format(searchString))
sys.exit(1)
else:
print('URL {} execution time is OK'.format(searchString))
sys.exit(0)
| apache-2.0 | 2,022,859,079,128,887,600 | 23.921348 | 109 | 0.619853 | false | 3.177686 | false | false | false |
levinas/assembly | lib/assembly/plugins/swap.py | 3 | 1567 | import os
from plugins import BaseAssembler
from yapsy.IPlugin import IPlugin
class SwapAssembler(BaseAssembler, IPlugin):
def run(self, reads):
## Swap only supports a single FastA file.
# Convert Fastq files
fasta_files = []
for f in self.get_files(reads):
if f.endswith('.fq') or f.endswith('.fastq'): #Convert
in_fasta = f.rpartition('.')[0] + '.fasta'
self.arast_popen([self.fastq_to_fasta, '-i', f, '-o', in_fasta])
if not os.path.getsize(in_fasta):
raise Exception('Error converting to FastQ')
fasta_files.append(in_fasta)
else:
fasta_files.append(f)
# Concatenate multiple files
if len(fasta_files) == 1:
reads_file = fasta_files[0]
else:
reads_file = os.path.join(self.outpath, 'reads.fa')
with open(reads_file, 'w') as outfile:
for fa in fasta_files:
with open(fa) as reads:
for line in reads:
outfile.write(line)
## Run assembly
self.arast_popen(['mpirun', '-n', self.process_threads_allowed,
self.executable, '-k', self.k,'-o',
self.outpath + 'swap', '-i', reads_file])
contig = os.path.join(self.outpath, 'swap', 'CEContig.fasta')
if os.path.exists(contig):
return [contig]
else:
return []
| mit | 5,089,824,990,903,519,000 | 37.219512 | 84 | 0.502872 | false | 3.997449 | false | false | false |
angryrancor/ezdmb | View/mainwindow_ui.py | 1 | 11562 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/Users/justinvieira/Documents/ezdmb/View/mainwindow.ui'
#
# Created by: PyQt5 UI code generator 5.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1124, 1333)
MainWindow.setMinimumSize(QtCore.QSize(1124, 1333))
MainWindow.setIconSize(QtCore.QSize(18, 18))
MainWindow.setDocumentMode(False)
self.centralWidget = QtWidgets.QWidget(MainWindow)
self.centralWidget.setStyleSheet("")
self.centralWidget.setObjectName("centralWidget")
self.gridLayout_2 = QtWidgets.QGridLayout(self.centralWidget)
self.gridLayout_2.setContentsMargins(11, 11, 11, 11)
self.gridLayout_2.setSpacing(6)
self.gridLayout_2.setObjectName("gridLayout_2")
self.label = QtWidgets.QLabel(self.centralWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
self.label.setMinimumSize(QtCore.QSize(1050, 73))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(32)
font.setBold(True)
font.setUnderline(False)
font.setWeight(75)
self.label.setFont(font)
self.label.setFrameShape(QtWidgets.QFrame.WinPanel)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setWordWrap(False)
self.label.setObjectName("label")
self.gridLayout_2.addWidget(self.label, 0, 0, 1, 1)
self.groupBox_2 = QtWidgets.QGroupBox(self.centralWidget)
self.groupBox_2.setMinimumSize(QtCore.QSize(1050, 94))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
self.groupBox_2.setPalette(palette)
self.groupBox_2.setTitle("")
self.groupBox_2.setFlat(False)
self.groupBox_2.setObjectName("groupBox_2")
self.label_2 = QtWidgets.QLabel(self.groupBox_2)
self.label_2.setGeometry(QtCore.QRect(130, 20, 981, 101))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
self.label_2.setSizePolicy(sizePolicy)
self.label_2.setMinimumSize(QtCore.QSize(981, 61))
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(24)
font.setBold(False)
font.setItalic(True)
font.setWeight(50)
self.label_2.setFont(font)
self.label_2.setAutoFillBackground(False)
self.label_2.setStyleSheet("QGroupBox {\n"
" border: none;\n"
"}")
self.label_2.setFrameShape(QtWidgets.QFrame.NoFrame)
self.label_2.setFrameShadow(QtWidgets.QFrame.Plain)
self.label_2.setScaledContents(False)
self.label_2.setObjectName("label_2")
self.gridLayout_2.addWidget(self.groupBox_2, 1, 0, 1, 1)
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setSizeConstraint(QtWidgets.QLayout.SetMinAndMaxSize)
self.gridLayout.setSpacing(6)
self.gridLayout.setObjectName("gridLayout")
self.current_menu = QtWidgets.QLabel(self.centralWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.current_menu.sizePolicy().hasHeightForWidth())
self.current_menu.setSizePolicy(sizePolicy)
self.current_menu.setMinimumSize(QtCore.QSize(960, 954))
self.current_menu.setFrameShape(QtWidgets.QFrame.NoFrame)
self.current_menu.setText("")
self.current_menu.setPixmap(QtGui.QPixmap("default.jpg"))
self.current_menu.setScaledContents(True)
self.current_menu.setAlignment(QtCore.Qt.AlignCenter)
self.current_menu.setObjectName("current_menu")
self.gridLayout.addWidget(self.current_menu, 0, 0, 1, 1)
self.gridLayout_2.addLayout(self.gridLayout, 2, 0, 1, 1)
self.groupBox = QtWidgets.QGroupBox(self.centralWidget)
self.groupBox.setMinimumSize(QtCore.QSize(1112, 93))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 85, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
self.groupBox.setPalette(palette)
self.groupBox.setTitle("")
self.groupBox.setObjectName("groupBox")
self.pushButton_2 = QtWidgets.QPushButton(self.groupBox)
self.pushButton_2.setGeometry(QtCore.QRect(90, 20, 951, 91))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_2.sizePolicy().hasHeightForWidth())
self.pushButton_2.setSizePolicy(sizePolicy)
self.pushButton_2.setMinimumSize(QtCore.QSize(951, 91))
font = QtGui.QFont()
font.setFamily("Arial Black")
font.setPointSize(24)
font.setBold(False)
font.setWeight(50)
self.pushButton_2.setFont(font)
self.pushButton_2.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.pushButton_2.setObjectName("pushButton_2")
self.gridLayout_2.addWidget(self.groupBox, 3, 0, 1, 1)
MainWindow.setCentralWidget(self.centralWidget)
self.menuBar = QtWidgets.QMenuBar(MainWindow)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 1124, 17))
self.menuBar.setObjectName("menuBar")
MainWindow.setMenuBar(self.menuBar)
self.statusBar = QtWidgets.QStatusBar(MainWindow)
self.statusBar.setObjectName("statusBar")
MainWindow.setStatusBar(self.statusBar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "DMB Configuration"))
self.label.setText(_translate("MainWindow", "Digital Menu Board Config"))
self.label_2.setText(_translate("MainWindow", "Current Menu:"))
self.pushButton_2.setText(_translate("MainWindow", "Display Settings"))
| lgpl-3.0 | -1,747,725,080,580,646,400 | 52.527778 | 109 | 0.695641 | false | 3.645019 | false | false | false |
mensi/cydra | plugins/activedirectory/cydraplugins/activedirectory/__init__.py | 1 | 6880 | # -*- coding: utf-8 -*-
#
# Copyright 2012 Manuel Stocker <[email protected]>
#
# This file is part of Cydra.
#
# Cydra is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Cydra is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cydra. If not, see http://www.gnu.org/licenses
import os.path
import re
import warnings
import ldap
from ldap.ldapobject import ReconnectLDAPObject
from cydra.component import Component, implements
from cydra.permission import User, Group
from cydra.permission.interfaces import IUserTranslator, IUserAuthenticator
import logging
logger = logging.getLogger(__name__)
LDAP_ESCAPES = {
'*': '\\2A',
'(': '\\28',
')': '\\29',
'\\': '\\5C',
'\0': '\\00',
}
_ldap_escape_pat = re.compile('|'.join(re.escape(k) for k in LDAP_ESCAPES.keys()))
def ldap_escape(s):
return _ldap_escape_pat.sub(lambda x: LDAP_ESCAPES[x.group()], s)
def force_unicode(txt):
try:
return unicode(txt)
except UnicodeDecodeError:
pass
orig = txt
if type(txt) != str:
txt = str(txt)
for args in [('utf-8',), ('latin1',), ('ascii', 'replace')]:
try:
return txt.decode(*args)
except UnicodeDecodeError:
pass
raise ValueError("Unable to force %s object %r to unicode" % (type(orig).__name__, orig))
class LdapLookup(object):
connection = None
uri = None
user = None
password = None
user_searchbase = ''
group_searchbase = ''
user_searchfilter = {'objectClass': 'user'}
group_searchfilter = {'objectClass': 'group'}
def __init__(self, **kw):
for key, item in kw.items():
if hasattr(self, key) and not key.startswith('_'):
setattr(self, key, item)
def connect(self):
try:
self.connection = ReconnectLDAPObject(self.uri)
if self.user is not None:
self.connection.simple_bind_s(self.user, self.password)
except:
logger.exception("LDAP connection failed")
return False
return True
def get_safe(self, basedn, **kw):
return self.get(basedn, **dict([(ldap_escape(k), ldap_escape(v)) for k, v in kw.iteritems()]))
def get(self, basedn, **kw):
search = '(&%s)' % ''.join(['(%s=%s)' % item for item in kw.iteritems()])
result = self.connection.search_s(basedn, ldap.SCOPE_SUBTREE, search)
return result
def get_dn(self, dn):
res = self.connection.search_s(dn, ldap.SCOPE_BASE, '(objectClass=*)')
if len(res) == 0:
return None
else:
return res[0]
def get_users(self):
return self.get(self.user_searchbase, **self.user_searchfilter)
def get_user(self, username):
search = self.user_searchfilter.copy()
if '@' in username:
search['userPrincipalName'] = username
else:
search['sAMAccountName'] = username
res = self.get_safe(self.user_searchbase, **search)
if len(res) == 0:
return None
else:
return res[0]
def get_groups(self):
return self.get(self.group_searchbase, **self.group_searchfilter)
def get_group(self, groupname):
search = self.group_searchfilter.copy()
search['name'] = groupname
res = self.get_safe(self.group_searchbase, **search)
if len(res) == 0:
return None
else:
return res[0]
class ADUser(User):
valid_for_authentication = True
supports_check_password = True
supports_set_password = False
def __init__(self, adusers, userid, **kwargs):
super(ADUser, self).__init__(adusers.compmgr, userid, **kwargs)
self._adusers = adusers
def check_password(self, password):
return self._adusers.user_password(self, password)
class ADUsers(Component):
implements(IUserAuthenticator)
implements(IUserTranslator)
def __init__(self):
config = self.get_component_config()
self.ldap = LdapLookup(**config)
if not self.ldap.connect():
raise Exception('Connection failed')
def username_to_user(self, username):
user = self._ldap_to_user(self.ldap.get_user(username))
if user is None:
logger.error("Translation failed for: %s" % username)
return user
def userid_to_user(self, userid):
if userid is None or userid == '*':
warnings.warn("You should not call this directly. Use cydra.get_user()", DeprecationWarning, stacklevel=2)
return self.compmgr.get_user(userid='*')
user = self._ldap_to_user(self.ldap.get_user(userid))
if user is None:
logger.error("Translation failed for: %s" % userid)
# since the client was looking for a specific ID,
# we return a dummy user object with empty data
return User(self.compmgr, userid, full_name='N/A')
else:
return user
def _ldap_to_user(self, data):
if data is None:
return None
dn, userobj = data
if 'memberOf' in userobj:
groups = [self._ldap_to_group(self.ldap.get_dn(x)) for x in userobj['memberOf']]
else:
groups = []
return ADUser(self,
userobj['userPrincipalName'][0],
username=userobj['sAMAccountName'][0],
full_name=force_unicode(userobj['displayName'][0]), groups=groups)
def groupid_to_group(self, groupid):
group = self._ldap_to_group(self.ldap.get_group(groupid))
if group is None:
logger.error("Group lookup error for %s", groupid)
return group
def _ldap_to_group(self, data):
if data is None:
return None
dn, groupobj = data
return Group(self.compmgr,
groupobj['name'][0],
name=groupobj['name'][0])
def user_password(self, user, password):
if not user or not password:
return False
logger.debug("Trying to perform AD auth for %r" % user)
try:
conn = ldap.initialize(self.get_component_config()['uri'])
conn.simple_bind_s(user.id, password)
conn.unbind_s()
except ldap.INVALID_CREDENTIALS:
logger.exception("Authentication failed")
return False
logger.debug("AD auth complete")
return True
| gpl-3.0 | 854,990,746,114,101,500 | 29.043668 | 118 | 0.601744 | false | 3.854342 | false | false | false |
czcorpus/kontext | scripts/fix/fixsubc_0.9-0.12.py | 1 | 1464 | import sys
import os
sys.path.insert(0, os.path.realpath('%s/..' % os.path.dirname(__file__)))
import autoconf
import plugins
from plugins import lindat_db
plugins.install_plugin('db', lindat_db, autoconf.settings)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Rename subcorpora directories')
parser.add_argument('--old-user_dict-key', dest='old_key', default='username', help='The old '
'key used to name directories, e.g. username in 0.9')
parser.add_argument('--new-user_dict-key', dest='new_key', default='id', help='The new key '
'used to name directories, e.g. id in 0.12')
args = parser.parse_args()
subcpath = autoconf.settings.get('corpora', 'users_subcpath')
redis_db = plugins.runtime.DB.instance
db = redis_db.get_instance('auth')
keys = list([key for key in list(db.keys()) if key != '__user_count'])
users = {db.hash_get(key, args.old_key): db.hash_get_all(key) for key in keys}
for user_subc_dir in [f for f in os.listdir(subcpath) if os.path.isdir(os.path.join(subcpath, f))]:
key = user_subc_dir
if args.old_key == 'id':
key = int(user_subc_dir)
user = users[key]
new_val = user[args.new_key]
if args.new_key == 'id':
new_val = str(user[args.new_key])
os.rename(os.path.join(subcpath, user_subc_dir), os.path.join(subcpath, new_val))
| gpl-2.0 | -5,520,816,718,659,871,000 | 40.828571 | 103 | 0.623634 | false | 3.168831 | false | false | false |
c22n/ion-channel-ABC | docs/examples/human-atrial/experiments/ical_sun.py | 1 | 9835 | from ionchannelABC.experiment import Experiment
import data.ical.Sun1997.data_Sun1997 as data
from ionchannelABC.protocol import availability_linear, recovery
import numpy as np
import myokit
import warnings
from scipy.optimize import OptimizeWarning
import scipy.optimize as so
Q10_cond = 1.6 # [Li1997]
Q10_tau_act = 1.7 # [Li1997]
Q10_tau_inact = 1.3 # [Li1997]
fit_threshold = 0.9
#
# Inactivation kinetics [Sun1997]
#
sun_inact_kin_desc = """
Inactivation kinetics measured using bi-exponential funcion
cf Fig 4B [Sun1997]
"""
vsteps_tf, tf, sd_tf = data.inact_tauf_Sun()
variances_tf = [sd_**2 for sd_ in sd_tf]
sun_inact_kin_tf_dataset = np.array([vsteps_tf, tf, variances_tf])
vsteps_ts, ts, sd_ts = data.inact_taus_Sun()
variances_ts = [sd_**2 for sd_ in sd_ts]
sun_inact_kin_ts_dataset = np.array([vsteps_ts, ts, variances_ts])
vsteps_rel_inact, rel_inact, sd_rel_inact = data.rel_inact_Sun()
variances_rel_inact = [sd_**2 for sd_ in sd_rel_inact]
sun_inact_kin_rel_inact_dataset = np.array([vsteps_rel_inact, rel_inact, variances_rel_inact])
# all voltage protocols are preceded by 500ms prepulse to -40mV from HP -80mV
tpre = 10000 # ms
tstep = 1000
vlower = -10
dv = 10
vupper = 30+dv
sun_inact_kin_protocol = myokit.Protocol()
for v in vsteps_tf:
sun_inact_kin_protocol.add_step(-80, tpre-500)
sun_inact_kin_protocol.add_step(-40, 500)
sun_inact_kin_protocol.add_step(v, 1000)
sun_conditions = {'phys.T': 296.15, # K
'ca_conc.Ca_o': 1} # mM
def sun_inact_kin_sum_stats(data, fast=True, slow=True):
def double_exp(t, tauh, taus, Ah, As, A0):
return A0 + Ah*np.exp(-t/tauh) + As*np.exp(-t/taus)
output_tf = []
output_ts = []
for d in data.split_periodic(11000, adjust=True, closed_intervals=False):
d = d.trim_left(10000, adjust=True)
current = d['ical.i_CaL']
time = d['engine.time']
index = np.argmax(np.abs(current))
# Set time zero to peak current
current = current[index:]
time = time[index:]
t0 = time[0]
time = [t-t0 for t in time]
with warnings.catch_warnings():
warnings.simplefilter('error', OptimizeWarning)
warnings.simplefilter('error', RuntimeWarning)
try:
current = [c/current[0] for c in current]
if len(time)<=1 or len(current)<=1:
raise Exception('Failed simulation')
popt, _ = so.curve_fit(double_exp,
time,
current,
p0=[10,200,0.5,0.5,0],
bounds=(0.,
[np.inf, np.inf, 1.0, 1.0, 1.0]),
max_nfev=1000)
fit = [double_exp(t,popt[0],popt[1],popt[2],popt[3],popt[4]) for t in time]
# Calculate r2
ss_res = np.sum((np.array(current)-np.array(fit))**2)
ss_tot = np.sum((np.array(current)-np.mean(np.array(current)))**2)
r2 = 1 - (ss_res / ss_tot)
tauf = min(popt[0],popt[1])
taus = max(popt[0],popt[1])
if r2 > fit_threshold:
if fast:
output_tf = output_tf+[tauf]
if slow:
output_ts = output_ts+[taus]
else:
raise RuntimeWarning('scipy.optimize.curve_fit found a poor fit')
except:
if fast:
output_tf = output_tf+[float('inf')]
if slow:
output_ts = output_ts+[float('inf')]
output = output_tf+output_ts
return output
def sun_inact_kin_sum_stats_tf(data):
return sun_inact_kin_sum_stats(data, fast=True, slow=False)
def sun_inact_kin_sum_stats_ts(data):
return sun_inact_kin_sum_stats(data, fast=False, slow=True)
def sun_rel_inact_sum_stats(data):
output = []
for d in data.split_periodic(11000, adjust=True, closed_intervals=False):
d = d.trim_left(10000, adjust=True)
current = d['ical.i_CaL']
peak = max(current, key=abs)
ss = current[-1]
try:
output = output + [1-ss/peak]
except:
output = output + [float('inf')]
return output
sun_inact_kin = Experiment(
dataset=[sun_inact_kin_tf_dataset,
sun_inact_kin_ts_dataset],
protocol=sun_inact_kin_protocol,
conditions=sun_conditions,
sum_stats=sun_inact_kin_sum_stats,
description=sun_inact_kin_desc,
Q10=Q10_tau_inact,
Q10_factor=-1)
sun_inact_kin_fast = Experiment(
dataset=sun_inact_kin_tf_dataset,
protocol=sun_inact_kin_protocol,
conditions=sun_conditions,
sum_stats=sun_inact_kin_sum_stats_tf,
description=sun_inact_kin_desc,
Q10=Q10_tau_inact,
Q10_factor=-1)
sun_inact_kin_slow = Experiment(
dataset=sun_inact_kin_ts_dataset,
protocol=sun_inact_kin_protocol,
conditions=sun_conditions,
sum_stats=sun_inact_kin_sum_stats_ts,
description=sun_inact_kin_desc,
Q10=Q10_tau_inact,
Q10_factor=-1)
sun_rel_inact = Experiment(
dataset=sun_inact_kin_rel_inact_dataset,
protocol=sun_inact_kin_protocol,
conditions=sun_conditions,
sum_stats=sun_rel_inact_sum_stats,
description=sun_inact_kin_desc,
Q10=None,
Q10_factor=0)
#
# Inactivation kinetics using monovalent cation [Sun1997]
#
sun_v_inact_kin_desc = """
Voltage-dependent inactivation kinetics measured using bi-exponential funcion
cf Fig 6B-C [Sun1997]
"""
vsteps_tf, tf, sd_tf = data.inact_tauf_Sun()
variances_tf = [sd_**2 for sd_ in sd_tf]
sun_inact_kin_tf_dataset = np.array([vsteps_tf, tf, variances_tf])
vsteps_ts, ts, sd_ts = data.inact_taus_Sun()
variances_ts = [sd_**2 for sd_ in sd_ts]
sun_inact_kin_ts_dataset = np.array([vsteps_ts, ts, variances_ts])
vsteps_rel_inact, rel_inact, sd_rel_inact = data.rel_inact_Sun()
variances_rel_inact = [sd_**2 for sd_ in sd_rel_inact]
sun_inact_kin_rel_inact_dataset = np.array([vsteps_rel_inact, rel_inact, variances_rel_inact])
# all voltage protocols are preceded by 500ms prepulse to -40mV from HP -80mV
tpre = 10000 # ms
tstep = 1000
vlower = -10
dv = 10
vupper = 30+dv
sun_inact_kin_protocol = myokit.Protocol()
for v in vsteps_tf:
sun_inact_kin_protocol.add_step(-80, tpre-500)
sun_inact_kin_protocol.add_step(-40, 500)
sun_inact_kin_protocol.add_step(v, 1000)
sun_conditions = {'phys.T': 296.15, # K
'ca_conc.Ca_o': 1} # mM
def sun_inact_kin_sum_stats(data, fast=True, slow=True):
def double_exp(t, tauh, taus, Ah, As, A0):
return A0 + Ah*np.exp(-t/tauh) + As*np.exp(-t/taus)
output_tf = []
output_ts = []
for d in data.split_periodic(11000, adjust=True, closed_intervals=False):
d = d.trim_left(10000, adjust=True)
current = d['ical.i_CaL']
time = d['engine.time']
index = np.argmax(np.abs(current))
# Set time zero to peak current
current = current[index:]
time = time[index:]
t0 = time[0]
time = [t-t0 for t in time]
with warnings.catch_warnings():
warnings.simplefilter('error', OptimizeWarning)
warnings.simplefilter('error', RuntimeWarning)
try:
current = [c/current[0] for c in current]
if len(time)<=1 or len(current)<=1:
raise Exception('Failed simulation')
popt, _ = so.curve_fit(double_exp,
time,
current,
p0=[10,200,0.5,0.5,0],
bounds=(0.,
[np.inf, np.inf, 1.0, 1.0, 1.0]),
max_nfev=1000)
fit = [double_exp(t,popt[0],popt[1],popt[2],popt[3],popt[4]) for t in time]
# Calculate r2
ss_res = np.sum((np.array(current)-np.array(fit))**2)
ss_tot = np.sum((np.array(current)-np.mean(np.array(current)))**2)
r2 = 1 - (ss_res / ss_tot)
tauf = min(popt[0],popt[1])
taus = max(popt[0],popt[1])
if r2 > fit_threshold:
if fast:
output_tf = output_tf+[tauf]
if slow:
output_ts = output_ts+[taus]
else:
raise RuntimeWarning('scipy.optimize.curve_fit found a poor fit')
except:
if fast:
output_tf = output_tf+[float('inf')]
if slow:
output_ts = output_ts+[float('inf')]
output = output_tf+output_ts
return output
def sun_inact_kin_sum_stats_tf(data):
return sun_inact_kin_sum_stats(data, fast=True, slow=False)
def sun_inact_kin_sum_stats_ts(data):
return sun_inact_kin_sum_stats(data, fast=False, slow=True)
def sun_rel_inact_sum_stats(data):
output = []
for d in data.split_periodic(11000, adjust=True, closed_intervals=False):
d = d.trim_left(10000, adjust=True)
current = d['ical.i_CaL']
peak = max(current, key=abs)
ss = current[-1]
try:
output = output + [1-ss/peak]
except:
output = output + [float('inf')]
return output
sun_inact_kin = Experiment(
dataset=[sun_inact_kin_tf_dataset,
sun_inact_kin_ts_dataset],
protocol=sun_inact_kin_protocol,
conditions=sun_conditions,
sum_stats=sun_inact_kin_sum_stats,
description=sun_inact_kin_desc,
Q10=Q10_tau_inact,
Q10_factor=-1)
| gpl-3.0 | 1,906,253,293,606,763,300 | 32.797251 | 94 | 0.564108 | false | 3.203583 | false | false | false |
geodynamics/snac | Snac/snac2dx/snac_combine.py | 5 | 12995 | #!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
'''
Combine the converted Snac Data
usage: snac_combine.py modelname timestep nodex nodey nodez nprocx nprocy nprocz
'''
class Combine(object):
def __init__(self, grid):
# data storage
self.saved = {}
self.saved["positions"] = [0.0]*(grid['nox'] * grid['noy'] * grid['noz'])
self.saved["velocities"] = [0.0]*(grid['nox'] * grid['noy'] * grid['noz'])
self.saved["force"] = [0.0]*(grid['nox'] * grid['noy'] * grid['noz'])
self.saved["temperature"] = [0.0]*(grid['nox'] * grid['noy'] * grid['noz'])
self.saved["plstrain"] = [0.0]*((grid['nox']-1) * (grid['noy']-1) * (grid['noz']-1))
self.saved["phaseIndex"] = [0]*((grid['nox']-1) * (grid['noy']-1) * (grid['noz']-1))
self.saved["irheology"] = [0]*((grid['nox']-1) * (grid['noy']-1) * (grid['noz']-1))
self.saved["viscosity"] = [0.0]*((grid['nox']-1) * (grid['noy']-1) * (grid['noz']-1))
self.saved["stress"] = [0.0]*((grid['nox']-1) * (grid['noy']-1) * (grid['noz']-1))
self.saved["pressure"] = [0.0]*((grid['nox']-1) * (grid['noy']-1) * (grid['noz']-1))
self.saved["strain_rate"] = [0.0]*((grid['nox']-1) * (grid['noy']-1) * (grid['noz']-1))
self.struct = {"velocities":"","force":"","strain_rate":"","stress":"","pressure":"","temperature":"","plstrain":"","viscosity":"","irheoogy":"","phaseIndex":""}
self.data = {"positions":[],"velocities":[],"strain_rate":[],"stress":[],"pressure":[],"temperature":[],"plstrain":[],"viscosity":[],"irheology":[],"phaseIndex":[]}
self.tempExist = False
self.apsExist = False
self.viscExist = False
self.irhExist = False
return
def readData(self, filename):
fp = file(filename, 'r')
lines = fp.readlines()
m=0
while 1:
if lines[m].startswith("object"):
ids = lines[m-1].split()
keywords = lines[m].split()
for i in range(len(keywords)):
if keywords[i] == "items":
items = int(keywords[i+1])
if ids[2] == "positions":
self.data["positions"] = lines[m+1:m+items+1]
m = m+items+1
elif ids[2] == "velocity":
self.data["velocities"] = lines[m+1:m+items+1]
m = m+items+1
elif ids[2] == "strain":
self.data["strain_rate"] = lines[m+1:m+items+1]
m = m+items+1
elif ids[2] == "stress":
self.data["stress"] = lines[m+1:m+items+1]
m = m+items+1
elif ids[2] == "pressure":
self.data["pressure"] = lines[m+1:m+items+1]
m = m+items+1
elif ids[2] == "force":
self.data["force"] = lines[m+1:m+items+1]
m = m+items+1
elif ids[2] == "phaseIndex":
self.data["phaseIndex"] = lines[m+1:m+items+1]
m = m+items+1
elif ids[2] == "temperature":
self.data["temperature"] = lines[m+1:m+items+1]
self.tempExist = True
m = m+items+1
elif ids[2] == "accumulated":
self.data["plstrain"] = lines[m+1:m+items+1]
self.apsExist = True
m = m+items+1
elif ids[2] == "viscosity":
self.data["viscosity"] = lines[m+1:m+items+1]
self.viscExist = True
m = m+items+1
elif ids[2] == "rheology":
self.data["irheology"] = lines[m+1:m+items+1]
self.irhExist = True
m = m+items+1
elif ids[1] == "construct":
break
else:
m = m + 1
if m >= len(lines):
break
else:
if m >= len(lines):
break
else:
m = m + 1
return self.data
def join(self, data, me, grid, cap):
# processor geometry
nprocx = int(cap['nprocx'])
nprocy = int(cap['nprocy'])
nprocz = int(cap['nprocz'])
mylocx = me % nprocx
mylocy = ((me - mylocx) / nprocx) % nprocy
mylocz = (((me - mylocx) / nprocx - mylocy) / nprocy) % nprocz
print me, nprocx,nprocy,nprocz, mylocx, mylocy, mylocz
# mesh geometry
nox = int(grid['nox'])
noy = int(grid['noy'])
noz = int(grid['noz'])
nex = nox - 1
ney = noy - 1
nez = noz - 1
mynox = 1 + (nox-1)/nprocx
mynoy = 1 + (noy-1)/nprocy
mynoz = 1 + (noz-1)/nprocz
mynex = mynox - 1
myney = mynoy - 1
mynez = mynoz - 1
if not len(data["positions"]) == mynox * mynoy * mynoz:
print mynox, mynoy, mynoz, mynox * mynoy * mynoz, len(data["positions"])
raise ValueError, "data size"
if not len(data["stress"]) == (mynox-1) * (mynoy-1) * (mynoz-1):
print (mynox-1),(mynoy-1),(mynoz-1), len(data["stress"])
raise ValueError, "data size"
mynxs = (mynox - 1) * mylocx
mynys = (mynoy - 1) * mylocy
mynzs = (mynoz - 1) * mylocz
myexs = mynex * mylocx
myeys = myney * mylocy
myezs = mynez * mylocz
n = 0
for i in range(mynzs, mynzs+mynoz):
for j in range(mynys, mynys + mynoy):
for k in range(mynxs, mynxs + mynox):
m = k + j * nox + i * nox * noy
self.saved["positions"][m] = data["positions"][n]
self.saved["velocities"][m] = data["velocities"][n]
self.saved["force"][m] = data["force"][n]
if self.tempExist:
self.saved["temperature"][m] = data["temperature"][n]
n += 1
n = 0
for i in range(myezs, myezs+mynez):
for j in range(myeys, myeys + myney):
for k in range(myexs, myexs + mynex):
m = k + j * nex + i * nex * ney
self.saved["strain_rate"][m] = data["strain_rate"][n]
self.saved["stress"][m] = data["stress"][n]
self.saved["pressure"][m] = data["pressure"][n]
self.saved["phaseIndex"][m] = data["phaseIndex"][n]
if self.apsExist:
self.saved["plstrain"][m] = data["plstrain"][n]
if self.viscExist:
self.saved["viscosity"][m] = data["viscosity"][n]
if self.irhExist:
self.saved["irheology"][m] = data["irheology"][n]
n += 1
return
def write(self, filename, grid, data, type, fp, count):
if type == "positions":
print >> fp, "\n# the positions array"
print >> fp, "object %d class array type float rank 1 shape 3 items %d data follows" % (count, grid['nox']*grid['noy']*grid['noz'])
fp.writelines(data[type])
return count + 1
elif type == "connections":
print >> fp, "\n# the regular connections"
print >> fp, "object %d class gridconnections counts %d %d %d" % (count, grid['noz'],grid['noy'],grid['nox'])
return count + 1
elif type == "velocities":
print >> fp, "\n# the velocities array"
print >> fp, "object %d class array type float rank 1 shape 3 items %d data follows" % (count, grid['nox']*grid['noy']*grid['noz'])
fp.writelines(data["velocities"])
self.struct[type] = '''object "velocities" class field
component "positions" value 1
component "connections" value 2
component "data" value %d
''' % (count)
return count + 1
elif type == "strain_rate":
print >> fp, "\n# the strain rate array"
print >> fp, "object %d class array type float rank 0 items %d data follows" % (count, (grid['nox']-1)*(grid['noy']-1)*(grid['noz']-1))
fp.writelines(data["strain_rate"])
print >> fp, 'attribute "dep" string "connections"'
self.struct[type] = '''object "strain_rate" class field
component "positions" value 1
component "connections" value 2
component "data" value %d
''' % (count)
return count + 1
elif type == "stress":
print >> fp, "\n# the stress array"
print >> fp, "object %d class array type float rank 0 items %d data follows" % (count, (grid['nox']-1)*(grid['noy']-1)*(grid['noz']-1))
fp.writelines(data["stress"])
print >> fp, 'attribute "dep" string "connections"'
self.struct[type] = '''object "stress" class field
component "positions" value 1
component "connections" value 2
component "data" value %d
''' % (count)
return count + 1
elif type == "pressure":
print >> fp, "\n# the pressure array"
print >> fp, "object %d class array type float rank 0 items %d data follows" % (count, (grid['nox']-1)*(grid['noy']-1)*(grid['noz']-1))
fp.writelines(data["pressure"])
print >> fp, 'attribute "dep" string "connections"'
self.struct[type] = '''object "pressure" class field
component "positions" value 1
component "connections" value 2
component "data" value %d
''' % (count)
return count + 1
elif type == "force":
print >> fp, "\n# the force array"
print >> fp, "object %d class array type float rank 1 shape 3 items %d data follows" % (count, grid['nox']*grid['noy']*grid['noz'])
fp.writelines(data["force"])
self.struct[type] = '''object "force" class field
component "positions" value 1
component "connections" value 2
component "data" value %d
''' % (count)
return count + 1
elif type == "phaseIndex":
print >> fp, "\n# the phaseIndex array"
print >> fp, "object %d class array type int rank 0 items %d data follows" % (count, (grid['nox']-1)*(grid['noy']-1)*(grid['noz']-1))
fp.writelines(data["phaseIndex"])
print >> fp, 'attribute "dep" string "connections"'
self.struct[type] = '''object "phaseIndex" class field
component "positions" value 1
component "connections" value 2
component "data" value %d
''' % (count)
return count + 1
elif type == "temperature":
print >> fp, "\n# the temperature array"
print >> fp, "object %d class array type float rank 0 items %d data follows" % (count, grid['nox']*grid['noy']*grid['noz'])
fp.writelines(data["temperature"])
self.struct[type] = '''object "temperature" class field
component "positions" value 1
component "connections" value 2
component "data" value %d
''' % (count)
return count + 1
elif type == "plstrain":
print >> fp, "\n# the accumulated plastic strain array"
print >> fp, "object %d class array type float rank 0 items %d data follows" % (count, (grid['nox']-1)*(grid['noy']-1)*(grid['noz']-1))
fp.writelines(data["plstrain"])
print >> fp, 'attribute "dep" string "connections"'
self.struct[type] = '''object "plstrain" class field
component "positions" value 1
component "connections" value 2
component "data" value %d
''' % (count)
return count + 1
elif type == "viscosity":
print >> fp, "\n# the viscosity array"
print >> fp, "object %d class array type float rank 0 items %d data follows" % (count, (grid['nox']-1)*(grid['noy']-1)*(grid['noz']-1))
fp.writelines(data["viscosity"])
print >> fp, 'attribute "dep" string "connections"'
self.struct[type] = '''object "viscosity" class field
component "positions" value 1
component "connections" value 2
component "data" value %d
''' % (count)
return count + 1
elif type == "irheology":
print >> fp, "\n# the irheology array"
print >> fp, "object %d class array type float rank 0 items %d data follows" % (count, (grid['nox']-1)*(grid['noy']-1)*(grid['noz']-1))
fp.writelines(data["irheology"])
print >> fp, 'attribute "dep" string "connections"'
self.struct[type] = '''object "irheology" class field
component "positions" value 1
component "connections" value 2
component "data" value %d
''' % (count)
return count + 1
elif type == "data_structure":
print >> fp, "\n# construct data structure"
for member in self.struct:
if self.struct[member] != "":
print >> fp, "%s" % (self.struct[member])
print >> fp, "object \"default\" class group"
for member in self.struct:
if self.struct[member] != "":
print >> fp, "member \"%s\" value \"%s\"" % (member,member)
print >> fp, "End"
return
if __name__ == '__main__':
import sys
if not len(sys.argv) == 9:
print __doc__
sys.exit(1)
prefix = sys.argv[1]
step = int(sys.argv[2])
grid = {}
grid['nox'] = int(sys.argv[3])
grid['noy'] = int(sys.argv[4])
grid['noz'] = int(sys.argv[5])
cap = {}
cap['nprocx'] = int(sys.argv[6])
cap['nprocy'] = int(sys.argv[7])
cap['nprocz'] = int(sys.argv[8])
nproc = cap['nprocx'] * cap['nprocy'] * cap['nprocz']
cb = Combine(grid)
for n in range(0, nproc):
filename = 'snac.%d.%06d.dx' % (n, step)
print 'reading', filename
data = cb.readData(filename)
cb.join(data, n, grid, cap)
filename = '%s.%06d.dx' % (prefix,step)
print 'writing', filename
fp = open(filename, 'w')
print >> fp, "# OpenDX DataFile Snac simulation output ASCII"
count = 1
count = cb.write(filename, grid, cb.saved, "positions", fp, count)
count = cb.write(filename, grid, cb.saved, "connections", fp, count)
count = cb.write(filename, grid, cb.saved, "velocities", fp, count)
count = cb.write(filename, grid, cb.saved, "strain_rate", fp, count)
count = cb.write(filename, grid, cb.saved, "stress", fp, count)
count = cb.write(filename, grid, cb.saved, "pressure", fp, count)
count = cb.write(filename, grid, cb.saved, "force", fp, count)
count = cb.write(filename, grid, cb.saved, "phaseIndex", fp, count)
if cb.tempExist:
count = cb.write(filename, grid, cb.saved, "temperature", fp, count)
if cb.apsExist:
count = cb.write(filename, grid, cb.saved, "plstrain", fp, count)
if cb.viscExist:
count = cb.write(filename, grid, cb.saved, "viscosity", fp, count)
if cb.irhExist:
count = cb.write(filename, grid, cb.saved, "irheology", fp, count)
cb.write(filename, grid, cb.saved, "data_structure", fp, count)
fp.close()
# End of file
| gpl-2.0 | -100,081,764,741,577,950 | 34.89779 | 166 | 0.602001 | false | 2.746777 | false | false | false |
t-cas/JumpSSH | jumpssh/exception.py | 2 | 1793 |
class SSHException(Exception):
"""Generic exception for jumpssh
Allow to chain exceptions keeping track of origin exception
"""
def __init__(self, msg, original_exception=None):
message = msg
if original_exception:
message += ": %s" % original_exception
super(SSHException, self).__init__(message)
self.__cause__ = original_exception
self.__suppress_context__ = True
class ConnectionError(SSHException):
"""Exception raised when unable to establish SSHSession with remote host"""
pass
class TimeoutError(SSHException):
"""Exception raised when remote command execution reached specified timeout"""
pass
class RestClientError(SSHException):
"""Exception raised when error occurs during rest ssh calls"""
pass
class RunCmdError(SSHException):
"""Exception raised when remote command return a non success exit code
:ivar int exit_code: The exit code from the run command.
:ivar list(int): List of expected success exit codes for run command.
:ivar str command: The command that is generating this exception.
:ivar str error: The error captured from the command output.
"""
def __init__(self, exit_code, success_exit_code, command, error, runs_nb=1):
message = 'Command (%s) returned exit status (%s), expected [%s]' \
% (command, exit_code, ','.join(map(str, success_exit_code)))
if runs_nb > 1:
message += " after %s runs" % runs_nb
if error:
message += ": %s" % error
super(RunCmdError, self).__init__(message)
self.exit_code = exit_code
self.success_exit_code = success_exit_code
self.command = command
self.error = error
self.runs_nb = runs_nb
| mit | 5,257,936,135,727,758,000 | 32.203704 | 82 | 0.643614 | false | 4.383863 | false | false | false |
ilastikdev/ilastik | ilastik/applets/splitBodyCarving/splitBodyCarvingSerializer.py | 4 | 3655 | ###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
from ilastik.applets.base.appletSerializer import getOrCreateGroup, deleteIfPresent
from ilastik.workflows.carving.carvingSerializer import CarvingSerializer
from opSplitBodyCarving import OpSplitBodyCarving
class SplitBodyCarvingSerializer(CarvingSerializer):
def __init__(self, topLevelOperator, *args, **kwargs):
super( SplitBodyCarvingSerializer, self ).__init__(topLevelOperator, *args, **kwargs)
self._topLevelOperator = topLevelOperator
# Set up dirty tracking...
def setDirty(*args):
self.__dirty = True
def doMulti(slot, index, size):
slot[index].notifyDirty(setDirty)
slot[index].notifyValueChanged(setDirty)
topLevelOperator.AnnotationFilepath.notifyInserted(doMulti)
topLevelOperator.AnnotationFilepath.notifyRemoved(setDirty)
def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
split_settings_grp = getOrCreateGroup(topGroup, "split_settings")
for laneIndex in range(len( self._topLevelOperator )):
lane_grp = getOrCreateGroup(split_settings_grp, "{}".format( laneIndex ))
opLaneView = self._topLevelOperator.getLane(laneIndex)
if opLaneView.AnnotationFilepath.ready():
annotation_filepath = opLaneView.AnnotationFilepath.value
deleteIfPresent( lane_grp, "annotation_filepath" )
lane_grp.create_dataset("annotation_filepath", data=annotation_filepath)
# Now save the regular the carving data.
super( SplitBodyCarvingSerializer, self )._serializeToHdf5( topGroup, hdf5File, projectFilePath )
self.__dirty = False
def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File, projectFilePath):
try:
split_settings_grp = topGroup["split_settings"]
except KeyError:
pass
else:
for laneIndex, grp_name in enumerate( sorted(split_settings_grp.keys()) ):
opLaneView = self._topLevelOperator.getLane(laneIndex)
lane_grp = split_settings_grp[grp_name]
try:
annotation_filepath = lane_grp["annotation_filepath"].value
except KeyError:
pass
else:
opLaneView.AnnotationFilepath.setValue( annotation_filepath )
# Now load the regular carving data.
super( SplitBodyCarvingSerializer, self )._deserializeFromHdf5( topGroup, groupVersion, hdf5File, projectFilePath )
self.__dirty = False
def isDirty(self):
return self.__dirty or super( SplitBodyCarvingSerializer, self ).isDirty()
| gpl-3.0 | 486,842,752,296,672,640 | 44.123457 | 123 | 0.641587 | false | 4.330569 | false | false | false |
hyphenliu/cnminlangwebcollect | src/crawler/siteInfoMain.py | 1 | 3436 | # -*- coding:utf-8 -*-
'''
Created on 2014年2月2日
@author: Hyphen.Liu
'''
import Queue
import os
import globalvar.guiGlobalVar as ggv
import globalvar.crawlerGlobalVar as cgv
from crawler.siteInfoThread import SiteInfoThread
class SiteInfoMain():
'''
:对所有的网页连接识别该网页的语言
:对已经识别为相应语言的网站获取该网站建站详细信息
'''
def __init__(self,flag,infile,outfile=None):
'''
:初始化参数
:param flag:区别是进行搜索引擎结果的网站信息获取还是广度遍历的网站信息获取。标志取值:engine,wide
:param infile:需要从文本中读入待识别网页的超链接
:param outfile:结果写入的文档
'''
self.flag = flag
self.infile = infile
self.outfile = outfile
self.window = ggv.window
self.threadnum = ggv.systemSetList[1]
self.inqueue = Queue.Queue()
self.outqueue = Queue.Queue() #保存收集到的网站结果,供网站广度扫描使用,在收集网站广度扫描得到的网站时只做计数器
if self.flag == 'engine':m = '2/4' #第二步
else: m = '4/4' #第四步
self.window.SetStatusText(u'%s网页语言识别中..0.0%%'%m,1) #设置状态栏显示
self.window.SetStatusText(u'收集网站数:0',2)
self.window.gauge.Show() #设置进度条为显示状态
self.siteinfos()
if self.outfile:self.saveresults() #程序第二阶段需要保存结果
def siteinfos(self):
'''
:检测网页语言并获取识别出的语言的网页所属网站的详细信息
:param infile: 给定预处理后的url文档,该文档含有很多域名的url及其分段剥离后的url段
'''
if not os.path.isfile(self.infile):return None #获取搜索引擎程序被强行终止不会生成infile文件,在此检测是否为强制终止程序
ggv.scanindex = 0 #重置序号
lines = open(self.infile,'r').readlines() #读取保存好的txt文件内容
for line in lines:
if line:
self.inqueue.put(line) #带检测url队列生成
ggv.gaugesize = self.inqueue.qsize() #进度条的分母值
for i in range(self.threadnum):
if ggv.pterminate:break #程序被强制终止
gsit = SiteInfoThread(self.inqueue,self.outqueue,self.flag) #语言识别和网站信息获取主要类
cgv.threadlist.append(gsit)
# gsit.setDaemon(True)
gsit.start() #启动线程
self.inqueue.join() #等待输入队列为空再执行其他操作
if os.path.isfile(self.infile):os.remove(self.infile)
self.window.gauge.Hide()
def saveresults(self):
'''
:保存结果到文件
'''
if ggv.pterminate:return None
ofile = open(self.outfile,'w+')
while True:
if self.outqueue.empty():break
lk = self.outqueue.get()
if lk:
ofile.write(lk+'\r\n')
ofile.close() | gpl-3.0 | 3,439,359,229,629,306,400 | 35.173333 | 102 | 0.534661 | false | 2.304163 | false | false | false |
patwat/python-unitex | unitex/utils/fsa.py | 1 | 15087 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from io import open
from unitex import *
_LOGGER = logging.getLogger(__name__)
class FSAConstants:
EPSILON = "<E>"
DEPTH_FIRST_SEARCH = "dfs"
BREADTH_FIRST_SEARCH = "bfs"
class Edge(object):
def __init__(self, label, targets=None, source=None):
self.__label = label
self.__source = source
self.__targets = targets
if self.__targets is not None:
self.__tids = set([target.get_id() for target in targets])
def __len__(self):
return len(self.__targets)
def __str__(self):
label = self.get_label()
label = label.encode(UnitexConstants.DEFAULT_ENCODING)
return label
def __unicode__(self):
return u"%s" % self.get_label()
def __hash__(self):
return hash(self.get_label())
def __cmp__(self, e):
return cmp(self.get_label(), self.get_label())
def __iter__(self):
for target in self.__targets:
yield target
def __contains__(self, target):
return True if target.get_id() in self.__tids else False
def __getitem__(self, i):
return self.__targets[i]
def get_label(self):
return self.__label
def get_source(self):
return self.__source
def set_source(self, source):
self.__source = source
def get_targets(self):
return self.__targets
def set_targets(self, targets):
self.__targets = targets
self.__tids = set([target.get_id() for target in targets])
def add_target(self, target):
if target.get_id() in self.__tids:
return
self.__targets.append(target)
def del_target(self, target):
if target.get_id() not in self.__tids:
return
self.__tids.remove(target.get_id())
for i in range(len(self.__targets)):
_target = self.__targets[i]
if _target.get_id() == target.get_id():
del self.__targets[i]
break
class Node(object):
def __init__(self, _id, final=False):
self.__id = _id
self.__final = final
self.__edges = {}
self.__depth = 0
self.__visited = False
def __len__(self):
return len(self.__edges)
def __contains__(self, label):
return label in self.__edges
def __getitem__(self, label):
return self.__edges.get(label, None)
def __iter__(self):
for label in self.__edges:
yield label
def __str__(self):
node = self.__unicode__()
node = node.encode(UnitexConstants.DEFAULT_ENCODING)
return node
def __unicode__(self):
s = u"NODE[%s]" % str(self.get_id())
if self.is_final():
s += u" -- FINAL"
for label in self:
targets = u" | ".join([str(target.get_id()) for target in self[label]])
s += u"\n\t%s -> (%s)" % (label, targets)
return s
def get_id(self):
return self.__id
def set_id(self, i):
self.__id = i
def is_deterministic(self):
if FSAConstants.EPSILON in self.__edges:
return False
for label in self.__edges:
if len(self[label]) > 1:
return False
return True
def exists(self, label, node=None):
if not label in self:
return False
if node is not None and node not in self[label]:
return False
return True
def add(self, label, target):
if self.exists(label, target) is True:
return
if self.exists(label) is False:
edge = Edge(label, [target], self)
self.__edges[label] = edge
else:
self[label].add_target(target)
def delete(self, label, node=None):
if not self.exists(label, node):
raise UnitexException("Edge not found: %s" % label)
if node is None:
del self.__edges[label]
else:
self[label].del_target(node)
def set_depth(self, depth):
self.__depth = depth
def get_depth(self):
return self.__depth
def is_visited(self):
return self.__visited
def set_visited(self, visited=True):
self.__visited = visited
def is_final(self):
return self.__final
def set_final(self, final=True):
self.__final = final
class NodeSets(object):
def __init__ (self):
self.__sets = {}
def __getitem__(self, _id):
return self.__sets[_id]
def __contains__(self, s):
return s in self.all()
def __iter__ (self):
return iter(self.all())
def all(self):
return set([tuple(l) for l in self.__sets.values()])
def add(self, s):
_set = tuple(sorted(set(s)))
for _id in s:
self.__sets[_id] = _set
class Automaton(object):
def __init__(self, name="Automaton"):
self.__name = name
self.__nodes = []
self.__initial = 0
self.__finals = []
self.__nodes.append(Node(self.__initial, False))
def __len__(self):
return len(self.__nodes)
def __getitem__(self, _id):
try:
return self.__nodes[_id]
except IndexError:
return None
def __iter__(self):
for node in self.__nodes:
yield node
def __str__(self):
automaton = self.__unicode__()
automaton = automaton.encode(UnitexConstants.DEFAULT_ENCODING)
return automaton
def __unicode__(self):
title = u"# FSA -- %s #" % self.get_name()
s = u"%s\n%s\n%s\n\n" % ("#" * len(title), title, "#" * len(title))
for node in self:
s += u"%s\n\n" % node
return s
def get_name(self):
return self.__name
def set_name(self, name):
self.__name = name
def get_depth(self):
depth = 0
for nid in self.__finals:
final = self.__nodes[nid]
if final.get_depth() > depth:
depth = final.get_depth()
return depth
def get_initial(self):
return self.__initial
def set_initial(self, initial):
self.__initial = initial
def get_finals(self):
return self.__finals
def set_finals(self, finals):
self.__finals = finals
def get_nodes(self):
return self.__nodes
def set_nodes(self, nodes):
self.__nodes = nodes
def add_edge(self, label, sid, tid):
source = self[sid]
target = self[tid]
target.set_depth(source.get_depth() + 1)
source.add(label, target)
def add_node(self, initial=False, final=False):
if initial is True:
return self.__initial
elif final is True:
self.__finals.append(len(self.__nodes))
self.__nodes.append(Node(self.__finals[-1], True))
return self.__finals[-1]
nid = len(self.__nodes)
self.__nodes.append(Node(nid, final))
return nid
def add_path(self, path):
if len(path) == 0:
raise UnitexException("Empty path!")
sid = self.add_node(initial=True, final=False)
for label in path[:-1]:
tid = self.add_node(initial=False, final=False)
self.add_edge(label, sid, tid)
sid = tid
else:
self.add_edge(path[-1], sid, self.add_node(initial=False, final=True))
def get_alphabet(self):
alphabet = set()
for node in self:
for label in node:
alphabet.add(label)
return tuple(alphabet)
def is_deterministic(self):
for node in self:
if not node.is_deterministic():
return False
return True
def __closure(self, nid):
stack = [nid]
result = set(stack)
while len(stack) > 0:
current = stack.pop()
if FSAConstants.EPSILON in self[current]:
edge = self[current][FSAConstants.EPSILON]
if edge not in result:
stack.append(edge)
result.add(edge)
return tuple(result)
def determinize(self):
dfa = Automaton("DETERMINIZED(%s)" % self.get_name())
alphabet = self.get_alphabet()
initials = self.__closure(self.get_initial())
hid = dfa.add_node(initial=True, final=False)
visited = {}
visited[initials] = hid
stack = [initials]
while len(stack) > 0:
current = stack.pop()
for label in alphabet:
new = set()
for node in current:
if not label in self[node]:
continue
for next in self[node][label]:
new.update(self.__closure(next.get_id()))
new = tuple(new)
if len(new) == 0:
continue
if new not in visited:
stack.append(new)
final = True in [self[_id].is_final() for _id in new]
nid = dfa.add_node(final=final)
visited[new] = nid
dfa.add_edge(label, visited[current], visited[new])
self.set_name(dfa.get_name())
self.set_initial(dfa.get_initial())
self.set_finals(dfa.get_finals())
self.set_nodes(dfa.get_nodes())
def minimize(self):
min = Automaton("MINIMIZED(%s)" % self.get_name())
alphabet = self.get_alphabet()
nodetoset = {}
settonode = {}
sets = NodeSets()
rest, final = [], []
for node in self:
if node.is_final():
final.append(node.get_id())
else:
rest.append(node.get_id())
sets.add(rest)
sets.add(final)
stack = [s for s in sets if len(s) > 1]
def target_set(_id, label):
edge = self[_id][label]
if edge is None:
return None
else:
return sets[edge[0].get_id()]
while len(stack) > 0:
current = stack.pop()
for label in alphabet:
target = target_set(current[0], label)
one, two = [current[0]], []
for _id in current[1:]:
if target_set(_id, label) == target:
one.append(_id)
else:
two.append(_id)
if len(two) > 0:
sets.add(one)
sets.add(two)
if len(one) > 1:
stack.append(one)
if len(two) > 1:
stack.append(two)
break
for s in sets:
initial = self.get_initial() in s
final = True in [self[_id].is_final() for _id in s]
_id = min.add_node(initial=initial, final=final)
nodetoset[_id] = s
settonode[s] = _id
for node in min:
done = set()
s = nodetoset[node.get_id()]
source = self[s[0]]
for label in source:
edge = source[label]
if label in done:
continue
done.add(label)
for target in edge:
t = sets[target.get_id()]
min.add_edge(label, node.get_id(), settonode[t])
self.set_name(min.get_name())
self.set_initial(min.get_initial())
self.set_finals(min.get_finals())
self.set_nodes(min.get_nodes())
def reset(self):
for node in self:
node.set_visited(False)
def __expand(self, source):
L = []
source.set_visited(True)
for label in source:
edge = source[label]
for target in source[label]:
L.append((edge.get_label(), source.get_id(), target.get_id()))
return L
def iter(self, iter_type=None):
if iter_type is None:
iter_type = FSAConstants.BREADTH_FIRST_SEARCH
if len(self[self.get_initial()]) == 0:
raise UnitexException("Empty FSA")
i = None
if iter_type == FSAConstants.DEPTH_FIRST_SEARCH:
i = -1
elif iter_type == FSAConstants.BREADTH_FIRST_SEARCH:
i = 0
else:
raise UnitexException("Unknown iter type: %s" % iter_type)
root = self[self.get_initial()]
if root.is_visited():
self.reset()
L = self.__expand(root)
while L:
edge, sid, tid = L.pop(i)
yield (edge, sid, tid)
if not self[tid].is_visited():
L += self.__expand(self[tid])
def save(self, file, encoding=None):
if encoding is None:
encoding = UnitexConstants.DEFAULT_ENCODING
with open(file, "w", encoding=encoding) as output:
output.write("digraph Automaton {\n\n")
output.write("\tcenter = 1;\n")
output.write("\tcharset = \"%s\";\n" % encoding)
output.write("\trankdir = LR;\n")
output.write("\tranksep = 1;\n")
output.write("\tedge [arrowhead = vee];\n\n")
nodes = set()
edges = set()
for node in self:
sid = node.get_id()
n1 = "node%s" % sid
if not sid in nodes:
nodes.add(sid)
if node.get_id() == self.get_initial():
output.write("\t%s[shape = circle, label = \"\"];\n" % n1)
elif node.is_final():
output.write("\t%s[shape = doublecircle, label = \"\"];\n" % n1)
else:
output.write("\t%s[shape = point, label = \"\"];\n" % n1)
for label in node:
for target in node[label]:
if (node.get_id(), label, target.get_id()) in edges:
continue
edges.add((node.get_id(), label, target.get_id()))
tid = target.get_id()
n2 = "node%s" % tid
if not tid in nodes:
nodes.add(tid)
if target.get_id() == self.get_initial():
output.write("\t%s[shape = circle, label = \"\"];\n" % n2)
elif target.is_final():
output.write("\t%s[shape = doublecircle, label = \"\"];\n" % n2)
else:
output.write("\t%s[shape = point, label = \"\"];\n" % n2)
output.write("\t%s -> %s [label = \"%s\"];\n" % (n1, n2, label))
output.write("\n")
output.write("}\n")
| gpl-3.0 | -4,207,266,120,021,869,000 | 24.527919 | 96 | 0.485782 | false | 3.964004 | false | false | false |
trilkk/dnload | dnload/assembler.py | 2 | 3594 | import os
from dnload.common import is_listing
from dnload.common import listify
from dnload.common import run_command
########################################
# Assembler ############################
########################################
class Assembler:
"""Class used to generate assembler output."""
def __init__(self, op):
"""Constructor."""
self.__executable = op
self.__comment = "#"
self.__byte = ".byte"
self.__short = ".short"
self.__word = ".long"
self.__quad = ".quad"
self.__string = ".ascii"
self.__assembler_flags_extra = []
op = os.path.basename(op)
if op.startswith("nasm"):
self.__comment = ";"
self.__byte = "db"
self.__short = "dw"
self.__word = "dd"
self.__string = "db"
def addExtraFlags(self, op):
"""Add extra flags to use when assembling."""
if is_listing(op):
for ii in op:
self.addExtraFlags(ii)
return
if not (op in self.__assembler_flags_extra):
self.__assembler_flags_extra += [op]
def assemble(self, src, dst):
"""Assemble a file."""
cmd = [self.__executable, src, "-o", dst] + self.__assembler_flags_extra
(so, se) = run_command(cmd)
if 0 < len(se) and is_verbose():
print(se)
def format_align(self, op):
"""Get alignmen string."""
return (".balign %i\n" % (op))
def format_block_comment(self, desc, length=40):
"""Get a block-formatted comment."""
block_text = ""
for ii in range(length):
block_text += self.__comment
block_text += "\n"
ret = self.__comment
if desc:
ret += " " + desc + " "
for ii in range(len(ret), length):
ret += self.__comment
return block_text + ret + "\n" + block_text
def format_comment(self, op, indent=""):
"""Get comment string."""
ret = ""
for ii in listify(op):
if ii:
ret += indent + self.__comment + " " + ii + "\n"
return ret
def format_data(self, size, value, indent=""):
"""Get data element."""
size = int(size)
value_strings = []
for ii in listify(value):
if isinstance(ii, int):
value_strings += ["0x%x" % (ii)]
else:
value_strings += [str(ii)]
if not value_strings:
raise RuntimeError("unable to format value: '%s'" % (str(value)))
value = ", ".join(value_strings)
if value.startswith("\"") and 1 == size:
return indent + self.__string + " " + value + "\n"
if 1 == size:
return indent + self.__byte + " " + value + "\n"
elif 2 == size:
return indent + self.__short + " " + value + "\n"
elif 4 == size:
return indent + self.__word + " " + value + "\n"
elif 8 == size:
return indent + self.__quad + " " + value + "\n"
else:
raise NotImplementedError("exporting assembler value of size %i", size)
def format_equ(self, name, value):
return ".globl %s\n.equ %s, %s\n" % (name, name, value)
def format_label(self, op):
"""Generate name labels."""
if not op:
return ""
ret = ""
if is_listing(op):
for ii in op:
ret += format_label(ii)
else:
ret += ".globl %s\n%s:\n" % (op, op)
return ret
| bsd-3-clause | 4,423,154,596,944,982,000 | 31.378378 | 83 | 0.470506 | false | 3.975664 | false | false | false |
mineo/dnf-plugins-core | plugins/copr.py | 1 | 14955 | # supplies the 'copr' command.
#
# Copyright (C) 2014-2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import print_function
from dnf.pycomp import PY3
from subprocess import call
from dnfpluginscore import _, logger
from dnf.i18n import ucd
import dnfpluginscore.lib
import dnf
import glob
import json
import os
import platform
import shutil
import stat
YES = set([_('yes'), _('y')])
NO = set([_('no'), _('n'), ''])
# compatibility with Py2 and Py3 - rename raw_input() to input() on Py2
try:
input = raw_input
except NameError:
pass
class Copr(dnf.Plugin):
"""DNF plugin supplying the 'copr' command."""
name = 'copr'
def __init__(self, base, cli):
"""Initialize the plugin instance."""
super(Copr, self).__init__(base, cli)
if cli is not None:
cli.register_command(CoprCommand)
class CoprCommand(dnf.cli.Command):
""" Copr plugin for DNF """
copr_url = "https://copr.fedoraproject.org"
aliases = ("copr",)
summary = _("Interact with Copr repositories.")
usage = _("""
enable name/project [chroot]
disable name/project
remove name/project
list name
search project
Examples:
copr enable rhscl/perl516 epel-6-x86_64
copr enable ignatenkobrain/ocltoys
copr disable rhscl/perl516
copr remove rhscl/perl516
copr list ignatenkobrain
copr search tests
""")
def run(self, extcmds):
try:
subcommand = extcmds[0]
except (ValueError, IndexError):
dnf.cli.commands.err_mini_usage(self.cli, self.cli.base.basecmd)
return 0
if subcommand == "help":
dnf.cli.commands.err_mini_usage(self.cli, self.cli.base.basecmd)
return 0
try:
project_name = extcmds[1]
except (ValueError, IndexError):
logger.critical(
_('Error: ') +
_('exactly two additional parameters to '
'copr command are required'))
dnf.cli.commands.err_mini_usage(self.cli, self.cli.base.basecmd)
raise dnf.cli.CliError(
_('exactly two additional parameters to '
'copr command are required'))
try:
chroot = extcmds[2]
except IndexError:
chroot = self._guess_chroot()
# commands without defined copr_username/copr_projectname
if subcommand == "list":
self._list_user_projects(project_name)
return
if subcommand == "search":
self._search(project_name)
return
try:
copr_username, copr_projectname = project_name.split("/")
except ValueError:
logger.critical(
_('Error: ') +
_('use format `copr_username/copr_projectname` '
'to reference copr project'))
raise dnf.cli.CliError(_('bad copr project format'))
repo_filename = "/etc/yum.repos.d/_copr_{}-{}.repo" \
.format(copr_username, copr_projectname)
if subcommand == "enable":
self._need_root()
self._ask_user("""
You are about to enable a Copr repository. Please note that this
repository is not part of the main Fedora distribution, and quality
may vary.
The Fedora Project does not exercise any power over the contents of
this repository beyond the rules outlined in the Copr FAQ at
<https://fedorahosted.org/copr/wiki/UserDocs#WhatIcanbuildinCopr>, and
packages are not held to any quality or security level.
Please do not file bug reports about these packages in Fedora
Bugzilla. In case of problems, contact the owner of this repository.
Do you want to continue? [y/N]: """)
self._download_repo(project_name, repo_filename, chroot)
logger.info(_("Repository successfully enabled."))
elif subcommand == "disable":
self._need_root()
self._disable_repo(copr_username, copr_projectname)
logger.info(_("Repository successfully disabled."))
elif subcommand == "remove":
self._need_root()
self._remove_repo(repo_filename)
logger.info(_("Repository successfully removed."))
else:
raise dnf.exceptions.Error(
_('Unknown subcommand {}.').format(subcommand))
def _list_user_projects(self, user_name):
# http://copr.fedoraproject.org/api/coprs/ignatenkobrain/
api_path = "/api/coprs/{}/".format(user_name)
res = dnfpluginscore.lib.urlopen(self, None, self.copr_url + api_path, 'w+')
try:
json_parse = json.loads(res.read())
except ValueError:
raise dnf.exceptions.Error(
_("Can't parse repositories for username '{}'.")
.format(user_name))
self._check_json_output(json_parse)
section_text = _("List of {} coprs").format(user_name)
self._print_match_section(section_text)
i = 0
while i < len(json_parse["repos"]):
msg = "{0}/{1} : ".format(user_name,
json_parse["repos"][i]["name"])
desc = json_parse["repos"][i]["description"]
if not desc:
desc = _("No description given")
msg = self.base.output.fmtKeyValFill(ucd(msg), desc)
print(msg)
i += 1
def _search(self, query):
# http://copr.fedoraproject.org/api/coprs/search/tests/
api_path = "/api/coprs/search/{}/".format(query)
res = dnfpluginscore.lib.urlopen(self, None, self.copr_url + api_path, 'w+')
try:
json_parse = json.loads(res.read())
except ValueError:
raise dnf.exceptions.Error(_("Can't parse search for '{}'."
).format(query))
self._check_json_output(json_parse)
section_text = _("Matched: {}").format(query)
self._print_match_section(section_text)
i = 0
while i < len(json_parse["repos"]):
msg = "{0}/{1} : ".format(json_parse["repos"][i]["username"],
json_parse["repos"][i]["coprname"])
desc = json_parse["repos"][i]["description"]
if not desc:
desc = _("No description given.")
msg = self.base.output.fmtKeyValFill(ucd(msg), desc)
print(msg)
i += 1
def _print_match_section(self, text):
formatted = self.base.output.fmtSection(text)
print(formatted)
def _ask_user(self, question):
if self.base.conf.assumeyes and not self.base.conf.assumeno:
return
elif self.base.conf.assumeno and not self.base.conf.assumeyes:
raise dnf.exceptions.Error(_('Safe and good answer. Exiting.'))
answer = None
while not ((answer in YES) or (answer in NO)):
answer = ucd(input(question)).lower()
answer = _(answer)
if answer in YES:
return
else:
raise dnf.exceptions.Error(_('Safe and good answer. Exiting.'))
@classmethod
def _need_root(cls):
# FIXME this should do dnf itself (BZ#1062889)
if os.geteuid() != 0:
raise dnf.exceptions.Error(
_('This command has to be run under the root user.'))
@classmethod
def _guess_chroot(cls):
""" Guess which choot is equivalent to this machine """
# FIXME Copr should generate non-specific arch repo
dist = platform.linux_distribution()
if "Fedora" in dist:
# x86_64 because repo-file is same for all arch
# ($basearch is used)
if "Rawhide" in dist:
chroot = ("fedora-rawhide-x86_64")
else:
chroot = ("fedora-{}-x86_64".format(dist[1]))
else:
chroot = ("epel-%s-x86_64" % dist[1].split(".", 1)[0])
return chroot
def _download_repo(self, project_name, repo_filename, chroot=None):
if chroot is None:
chroot = self._guess_chroot()
short_chroot = '-'.join(chroot.split('-')[:2])
#http://copr.fedoraproject.org/coprs/larsks/rcm/repo/epel-7-x86_64/
api_path = "/coprs/{0}/repo/{1}/".format(project_name, short_chroot)
try:
f = dnfpluginscore.lib.urlopen(self, None, self.copr_url + api_path)
except IOError as e:
if os.path.exists(repo_filename):
os.remove(repo_filename)
if '404' in str(e):
if PY3:
import urllib.request
try:
res = urllib.request.urlopen(self.copr_url + "/coprs/" + project_name)
status_code = res.getcode()
except urllib.error.HTTPError as e:
status_code = e.getcode()
else:
import urllib
res = urllib.urlopen(self.copr_url + "/coprs/" + project_name)
status_code = res.getcode()
if str(status_code) != '404':
raise dnf.exceptions.Error(_("This repository does not have"\
" any builds yet so you cannot enable it now."))
else:
raise dnf.exceptions.Error(_("Such repository does not exists."))
raise
shutil.copy2(f.name, repo_filename)
os.chmod(repo_filename, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
@classmethod
def _remove_repo(cls, repo_filename):
# FIXME is it Copr repo ?
try:
os.remove(repo_filename)
except OSError as e:
raise dnf.exceptions.Error(str(e))
@classmethod
def _disable_repo(cls, copr_username, copr_projectname):
exit_code = call(["dnf", "config-manager", "--set-disabled",
"{}-{}".format(copr_username, copr_projectname)])
if exit_code != 0:
raise dnf.exceptions.Error(
_("Failed to disable copr repo {}/{}"
.format(copr_username, copr_projectname)))
@classmethod
def _get_data(cls, f):
""" Wrapper around response from server
check data and print nice error in case of some error (and return None)
otherwise return json object.
"""
try:
output = json.loads(f.read())
except ValueError:
dnf.cli.CliError(_("Unknown response from server."))
return
return output
@classmethod
def _check_json_output(cls, json_obj):
if json_obj["output"] != "ok":
raise dnf.exceptions.Error("{}".format(json_obj["error"]))
class Playground(dnf.Plugin):
"""DNF plugin supplying the 'playground' command."""
name = 'playground'
def __init__(self, base, cli):
"""Initialize the plugin instance."""
super(Playground, self).__init__(base, cli)
if cli is not None:
cli.register_command(PlaygroundCommand)
class PlaygroundCommand(CoprCommand):
""" Playground plugin for DNF """
aliases = ("playground",)
summary = _("Interact with Playground repository.")
usage = " [enable|disable|upgrade]"
def _cmd_enable(self, chroot):
self._need_root()
self._ask_user("""
You are about to enable a Playground repository.
Do you want to continue? [y/N]: """)
api_url = "{0}/api/playground/list/".format(
self.copr_url)
f = dnfpluginscore.lib.urlopen(self, None, api_url)
output = self._get_data(f)
f.close()
if output["output"] != "ok":
raise dnf.cli.CliError(_("Unknown response from server."))
for repo in output["repos"]:
project_name = "{0}/{1}".format(repo["username"],
repo["coprname"])
repo_filename = "/etc/yum.repos.d/_playground_{}.repo" \
.format(project_name.replace("/", "-"))
try:
# check if that repo exist? but that will result in twice
# up calls
api_url = "{0}/api/coprs/{1}/detail/{2}/".format(
self.copr_url, project_name, chroot)
f = dnfpluginscore.lib.urlopen(self, None, api_url)
output2 = self._get_data(f)
f.close()
if (output2 and ("output" in output2)
and (output2["output"] == "ok")):
self._download_repo(project_name, repo_filename, chroot)
except dnf.exceptions.Error:
# likely 404 and that repo does not exist
pass
def _cmd_disable(self):
self._need_root()
for repo_filename in glob.glob('/etc/yum.repos.d/_playground_*.repo'):
self._remove_repo(repo_filename)
def run(self, extcmds):
try:
subcommand = extcmds[0]
except (ValueError, IndexError):
logger.critical(
_('Error: ') +
_('exactly one parameter to '
'playground command are required'))
dnf.cli.commands.err_mini_usage(self.cli, self.cli.base.basecmd)
raise dnf.cli.CliError(
_('exactly one parameter to '
'playground command are required'))
chroot = self._guess_chroot()
if subcommand == "enable":
self._cmd_enable(chroot)
logger.info(_("Playground repositories successfully enabled."))
elif subcommand == "disable":
self._cmd_disable()
logger.info(_("Playground repositories successfully disabled."))
elif subcommand == "upgrade":
self._cmd_disable()
self._cmd_enable(chroot)
logger.info(_("Playground repositories successfully updated."))
else:
raise dnf.exceptions.Error(
_('Unknown subcommand {}.').format(subcommand))
| gpl-2.0 | -8,652,930,053,118,489,000 | 37.053435 | 94 | 0.570043 | false | 4.092775 | false | false | false |
rtavenar/tslearn | tslearn/hdftools.py | 1 | 4495 | import h5py
import os
import numpy as np
import traceback
from warnings import warn
def save_dict(d, filename, group, raise_type_fail=True):
"""
Recursively save a dict to an hdf5 group in a new file.
Parameters
----------
d: dict
dict to save as an hdf5 file
filename : str
Full path to save the file to. File must not already exist.
group : str
group name to save the dict to
raise_type_fail : bool
If True: raise an exception if saving a part of the dict fails.
If False: prints a warning instead and saves the
object's __str__() return value.
Returns
-------
None
Raises
------
FileExistsError
If the path specified by the `filename` parameter already exists.
TypeError
If a particular entry within the dict cannot be saved to hdf5 AND
the argument `raise_type_fail` is set to `True`
"""
if os.path.isfile(filename):
raise FileExistsError
with h5py.File(filename, 'w') as h5file:
_dicts_to_group(h5file, "{}/".format(group), d,
raise_meta_fail=raise_type_fail)
def _dicts_to_group(h5file, path, d, raise_meta_fail):
for key, item in d.items():
if isinstance(item, np.ndarray):
if item.dtype == np.dtype('O'):
# see if h5py is ok with it
try:
h5file[path + key] = item
# h5file[path + key].attrs['dtype'] = item.dtype.str
except TypeError:
msg = "numpy dtype 'O' for item:\n{}\n" \
"not supported by HDF5\n{}" \
"".format(item, traceback.format_exc())
if raise_meta_fail:
raise TypeError(msg)
else:
h5file[path + key] = str(item)
warn("{}, storing whatever str(obj) returns"
"".format(msg))
# numpy array of unicode strings
elif item.dtype.str.startswith('<U'):
h5file[path + key] = item.astype(h5py.special_dtype(vlen=str))
# otherwise h5py doesn't restore the right dtype for str types
h5file[path + key].attrs['dtype'] = item.dtype.str
# other types
else:
h5file[path + key] = item
# h5file[path + key].attrs['dtype'] = item.dtype.str
# single pieces of data
elif isinstance(item, (str, np.int, np.int8,
np.int16, np.int32, np.int64, np.float,
np.float16, np.float32, np.float64)):
h5file[path + key] = item
elif isinstance(item, dict):
_dicts_to_group(
h5file, "{}{}/".format(path, key), item, raise_meta_fail
)
# last resort, try to convert this object
# to a dict and save its attributes
elif hasattr(item, '__dict__'):
_dicts_to_group(
h5file,
"{}{}/".format(path, key),
item.__dict__,
raise_meta_fail
)
else:
msg = "{} for item: {} not supported by HDF5" \
"".format(type(item), item)
if raise_meta_fail:
raise TypeError(msg)
else:
h5file[path + key] = str(item)
warn("{}, storing whatever str(obj) returns"
"".format(msg))
def load_dict(filename, group):
"""
Recursively load a dict from an hdf5 group in a file.
Parameters
----------
filename : str
full path to the hdf5 file
group : str
Name of the group that contains the dict to load
Returns
-------
d : dict
dict loaded from the specified hdf5 group.
"""
with h5py.File(filename, 'r') as h5file:
return _dicts_from_group(h5file, "{}/".format(group))
def _dicts_from_group(h5file, path):
ans = {}
for key, item in h5file[path].items():
if isinstance(item, h5py._hl.dataset.Dataset):
if item.attrs.__contains__('dtype'):
ans[key] = item[()].astype(item.attrs['dtype'])
else:
ans[key] = item[()]
elif isinstance(item, h5py._hl.group.Group):
ans[key] = _dicts_from_group(h5file, path + key + '/')
return ans
| bsd-2-clause | -8,905,089,988,840,970,000 | 28.768212 | 78 | 0.511012 | false | 4.082652 | false | false | false |
marauder37/PokemonGo-Map | pogom/pgoapi/pgoapi.py | 28 | 6203 | """
pgoapi - Pokemon Go API
Copyright (c) 2016 tjado <https://github.com/tejado>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
Author: tjado <https://github.com/tejado>
"""
import logging
import re
import requests
from utilities import f2i, h2f
from rpc_api import RpcApi
from auth_ptc import AuthPtc
from auth_google import AuthGoogle
from exceptions import AuthException, NotLoggedInException, ServerBusyOrOfflineException
import protos.RpcEnum_pb2 as RpcEnum
logger = logging.getLogger(__name__)
class PGoApi:
API_ENTRY = 'https://pgorelease.nianticlabs.com/plfe/rpc'
def __init__(self):
self.log = logging.getLogger(__name__)
self._auth_provider = None
self._api_endpoint = None
self._position_lat = 0
self._position_lng = 0
self._position_alt = 0
self._req_method_list = []
def call(self):
if not self._req_method_list:
return False
if self._auth_provider is None or not self._auth_provider.is_login():
self.log.info('Not logged in')
return False
player_position = self.get_position()
request = RpcApi(self._auth_provider)
if self._api_endpoint:
api_endpoint = self._api_endpoint
else:
api_endpoint = self.API_ENTRY
self.log.info('Execution of RPC')
response = None
try:
response = request.request(api_endpoint, self._req_method_list, player_position)
except ServerBusyOrOfflineException as e:
self.log.info('Server seems to be busy or offline - try again!')
# cleanup after call execution
self.log.info('Cleanup of request!')
self._req_method_list = []
return response
#def get_player(self):
def list_curr_methods(self):
for i in self._req_method_list:
print("{} ({})".format(RpcEnum.RequestMethod.Name(i),i))
def set_logger(self, logger):
self._ = logger or logging.getLogger(__name__)
def get_position(self):
return (self._position_lat, self._position_lng, self._position_alt)
def set_position(self, lat, lng, alt):
self.log.debug('Set Position - Lat: %s Long: %s Alt: %s', lat, lng, alt)
self._position_lat = f2i(lat)
self._position_lng = f2i(lng)
self._position_alt = f2i(alt)
def __getattr__(self, func):
def function(**kwargs):
if not self._req_method_list:
self.log.info('Create new request...')
name = func.upper()
if kwargs:
self._req_method_list.append( { RpcEnum.RequestMethod.Value(name): kwargs } )
self.log.info("Adding '%s' to RPC request including arguments", name)
self.log.debug("Arguments of '%s': \n\r%s", name, kwargs)
else:
self._req_method_list.append( RpcEnum.RequestMethod.Value(name) )
self.log.info("Adding '%s' to RPC request", name)
return self
if func.upper() in RpcEnum.RequestMethod.keys():
return function
else:
raise AttributeError
def login(self, provider, username, password):
if not isinstance(username, basestring) or not isinstance(password, basestring):
raise AuthException("Username/password not correctly specified")
if provider == 'ptc':
self._auth_provider = AuthPtc()
elif provider == 'google':
self._auth_provider = AuthGoogle()
else:
raise AuthException("Invalid authentication provider - only ptc/google available.")
self.log.debug('Auth provider: %s', provider)
if not self._auth_provider.login(username, password):
self.log.info('Login process failed')
return False
self.log.info('Starting RPC login sequence (app simulation)')
# making a standard call, like it is also done by the client
self.get_player()
self.get_hatched_eggs()
self.get_inventory()
self.check_awarded_badges()
self.download_settings(hash="4a2e9bc330dae60e7b74fc85b98868ab4700802e")
response = self.call()
if not response:
self.log.info('Login failed!')
return False
if 'api_url' in response:
self._api_endpoint = ('https://{}/rpc'.format(response['api_url']))
self.log.debug('Setting API endpoint to: %s', self._api_endpoint)
elif 'auth_ticket' in response:
auth_ticket = response['auth_ticket']
self._auth_provider.set_ticket([auth_ticket['expire_timestamp_ms'], auth_ticket['start'], auth_ticket['end']])
else:
self.log.error('Login failed - unexpected server response!')
return False
self.log.info('Finished RPC login sequence (app simulation)')
self.log.info('Login process completed')
return True
| agpl-3.0 | -3,666,100,056,309,610,000 | 33.653631 | 122 | 0.612929 | false | 4.219728 | false | false | false |
vtexier/duniter-python-api | duniterpy/documents/document.py | 1 | 2367 | import base64
import re
import logging
import hashlib
from .constants import signature_regex
class MalformedDocumentError(Exception):
"""
Malformed document exception
"""
def __init__(self, field_name):
super().__init__("Could not parse field {0}".format(field_name))
class Document:
re_version = re.compile("Version: ([0-9]+)\n")
re_currency = re.compile("Currency: ([^\n]+)\n")
re_signature = re.compile("({signature_regex})\n".format(signature_regex=signature_regex))
fields_parsers = {
"Version": re_version,
"Currency": re_currency,
"Signature": re_signature
}
@classmethod
def parse_field(cls, field_name, line):
"""
:param field_name:
:param line:
:return:
"""
try:
value = cls.fields_parsers[field_name].match(line).group(1)
except AttributeError:
raise MalformedDocumentError(field_name)
return value
def __init__(self, version, currency, signatures):
if version < 2:
raise MalformedDocumentError("Version 1 documents are not handled by duniterpy>0.2")
self.version = version
self.currency = currency
if signatures:
self.signatures = [s for s in signatures if s is not None]
else:
self.signatures = []
def sign(self, keys):
"""
Sign the current document.
Warning : current signatures will be replaced with the new ones.
"""
self.signatures = []
for key in keys:
signing = base64.b64encode(key.signature(bytes(self.raw(), 'ascii')))
logging.debug("Signature : \n{0}".format(signing.decode("ascii")))
self.signatures.append(signing.decode("ascii"))
def raw(self):
"""
Returns the raw document in string format
"""
raise NotImplementedError()
def signed_raw(self):
"""
If keys are None, returns the raw + current signatures
If keys are present, returns the raw signed by these keys
"""
raw = self.raw()
signed = "\n".join(self.signatures)
signed_raw = raw + signed + "\n"
return signed_raw
@property
def sha_hash(self):
return hashlib.sha256(self.signed_raw().encode("ascii")).hexdigest().upper()
| gpl-3.0 | -2,618,608,365,082,055,700 | 28.5875 | 96 | 0.588931 | false | 4.264865 | false | false | false |
AHAAAAAAA/HackCU2016 | param_util.py | 5 | 1642 | """
Utility for model parameter
"""
import os
try:
from cPickle import load
except ImportError:
from pickle import load
class Params(object):
pass
def load_dcnn_model_params(path, param_str = None):
"""
>>> p = load_dcnn_model_params("models/filter_widths=8,6,,batch_size=10,,ks=20,8,,fold=1,1,,conv_layer_n=2,,ebd_dm=48,,l2_regs=1e-06,1e-06,1e-06,0.0001,,dr=0.5,0.5,,nkerns=7,12.pkl")
>>> p.ks
(20, 8)
>>> len(p.W)
2
>>> type(p.logreg_W)
<type 'numpy.ndarray'>
"""
if param_str is None:
param_str = os.path.basename(path).split('.')[0]
p = parse_param_string(param_str)
stuff = load(open(path, "r"))
for name, value in stuff:
if not hasattr(p, name):
setattr(p, name, value)
else:
# if appear multiple times,
# make it a list
setattr(p, name, [getattr(p, name), value])
return p
def parse_param_string(s, desired_fields = {"ks", "fold", "conv_layer_n"}):
"""
>>> p = parse_param_string("twitter4,,filter_widths=8,6,,batch_size=10,,ks=20,8,,fold=1,1,,conv_layer_n=2,,ebd_dm=48,,l2_regs=1e-06,1e-06,1e-06,0.0001,,dr=0.5,0.5,,nkerns=7,12")
>>> p.ks
(20, 8)
>>> p.fold
(1, 1)
>>> p.conv_layer_n
2
"""
p = Params()
segs = s.split(',,')
for s in segs:
if "=" in s:
key, value = s.split('=')
if key in desired_fields:
if not ',' in value:
setattr(p, key, int(value))
else:
setattr(p, key, tuple(map(int, value.split(','))))
return p
| mit | 684,665,900,532,379,400 | 26.366667 | 186 | 0.523752 | false | 2.921708 | false | false | false |
alexign/botty_clicker | botty_clicker.py | 1 | 121096 | # MIT License
#
# Copyright (c) 2017 Alex Ignatov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -*- coding: utf-8 -*-
import ctypes
import json
import multiprocessing
import os
import os.path
import re
import shutil
import tempfile
import time
import random
from collections import OrderedDict
from ctypes import *
from ctypes.wintypes import RECT, DWORD, LONG, WORD, LPVOID, LPCWSTR, HWND, POINT, UINT, INT
from multiprocessing import Process
from operator import itemgetter
import cv2
import itertools
import numpy as np
DEBUG = False
STATS_DIR = 'stats'
PATTERNS_DIR = 'patterns'
FARMING_INTERVAL = 30
PROGRESSION_INTERVAL = 30
FISHING_INTERVAL = 30
ASCENSION_INTERVAL = 10800
TOP_HEROES_UPGRADE_INTERVAL = 120
ALL_HEROES_UPGRADE_INTERVAL = 10
ALL_HEROES_UPGRADE_MAX_TIMER = 3600
MAX_NUMBER_OF_VISIBLE_HEROES = 5
WM_MOUSEWHEEL = 0x020A
WHEEL_DOWN = -1
WHEEL_UP = 1
WHEEL_DELTA = 120
WM_LBUTTONDOWN = 0x0201
WM_LBUTTONUP = 0x0202
WM_MOUSEMOVE = 0x0200
WM_MOUSELEAVE = 0x02A3
WM_MOUSEHOVER = 0x02A1
HTCLIENT = 1
WM_SETCURSOR = 0x0020
WM_CHAR = 0x0102
WM_KEYDOWN = 0x0100
WM_KEYUP = 0x0101
HWND_TOP = 0x0
SWP_NOMOVE = 0x0002
VK_CONTROL = 0x11
VK_SHIFT = 0x10
BI_RGB = 0x0000
DIB_RGB_COLORS = 0x00
SRCCOPY = 0xCC0020
SW_SHOWMINIMIZED = 0x2
SW_SHOWNORMAL = 0x1
SW_SHOWMAXIMIZED = 0x3
SW_RESTORE = 0x9
SW_MINIMIZE = 6
SW_SHOWMINNOACTIVE = 7
SW_SHOWNOACTIVATE = 4
SW_HIDE = 0x0
GWL_EXSTYLE = -20
GWL_STYLE = -16
WS_EX_LAYERED = 0x00080000
LWA_ALPHA = 0x00000002
SPI_GETANIMATION = 0x0048
SPI_SETANIMATION = 0x0049
SPIF_SENDCHANGE = 2
WS_MINIMIZEBOX = 0x00020000
WS_MAXIMIZEBOX = 0x00010000
WS_VSCROLL = 0x00200000
WS_HSCROLL = 0x00100000
WS_SIZEBOX = 0x00040000
WS_CAPTION = 0x00C00000
WS_SYSMENU = 0x00080000
SendMessage = ctypes.windll.user32.SendMessageW
FindWindow = ctypes.windll.user32.FindWindowW
FindWindow.argtypes = [LPCWSTR, LPCWSTR]
FindWindow.restype = HWND
SetForegroundWindow = ctypes.windll.user32.SetForegroundWindow
SetWindowPos = ctypes.windll.user32.SetWindowPos
GetWindowRect = ctypes.windll.user32.GetWindowRect
AdjustWindowRect = ctypes.windll.user32.AdjustWindowRect
GetwDesktopWindow = ctypes.windll.user32.GetDesktopWindow
GetWindowRect = ctypes.windll.user32.GetWindowRect
ClientToScreen = ctypes.windll.user32.ClientToScreen
GetClientRect = ctypes.windll.user32.GetClientRect
GetWindowDC = ctypes.windll.user32.GetWindowDC
GetDC = ctypes.windll.user32.GetDC
GetDIBits = ctypes.windll.gdi32.GetDIBits
GetObject = ctypes.windll.gdi32.GetObjectW
CreateBitmap = ctypes.windll.Gdi32.CreateBitmap
CreateCompatibleDC = ctypes.windll.Gdi32.CreateCompatibleDC
CreateCompatibleBitmap = ctypes.windll.Gdi32.CreateCompatibleBitmap
EnumWindows = ctypes.windll.user32.EnumWindows
BitBlt = ctypes.windll.Gdi32.BitBlt
SelectObject = ctypes.windll.Gdi32.SelectObject
GetWindowPlacement = ctypes.windll.user32.GetWindowPlacement
ShowWindow = ctypes.windll.user32.ShowWindow
PrintWindow = ctypes.windll.user32.PrintWindow
GetWindowLong = ctypes.windll.user32.GetWindowLongW
SetWindowLong = ctypes.windll.user32.SetWindowLongW
SetLayeredWindowAttributes = ctypes.windll.user32.SetLayeredWindowAttributes
SystemParametersInfo = ctypes.windll.user32.SystemParametersInfoW
IsIconic = ctypes.windll.user32.IsIconic
ReleaseDC = ctypes.windll.user32.ReleaseDC
DeleteObject = ctypes.windll.gdi32.DeleteObject
DeleteDC = ctypes.windll.Gdi32.DeleteDC
def charToKeyCode(char):
if char in ('1', '2', '3', '4', '5', '6', '7', '8', '9', '0'):
return 0x30 + (ord(char) - ord('0'))
if char == 'ctrl':
return VK_CONTROL
if char == 'shift':
return VK_SHIFT
if 'a' <= char <= 'z':
return 0x41 + (ord(char) - ord('a'))
return None
class BITMAPINFOHEADER(Structure):
_fields_ = [("biSize", DWORD),
("biWidth", LONG),
("biHeight", LONG),
("biPlanes", WORD),
("biBitCount", WORD),
("biCompression", DWORD),
("biSizeImage", DWORD),
("biXPelsPerMeter", LONG),
("biYPelsPerMeter", LONG),
("biClrUsed", DWORD),
("biClrImportant", DWORD)]
class BITMAP(Structure):
_fields_ = [("bmType", LONG),
("bmWidth", LONG),
("bmHeight", LONG),
("bmWidthBytes", LONG),
("bmPlanes", WORD),
("bmBitsPixel", WORD),
("bmBits", LPVOID)]
class WINDOWPLACEMENT(Structure):
_fields_ = [("length", UINT),
("flags", UINT),
("showCmd", UINT),
("ptMinPosition", POINT),
("ptMaxPosition", POINT),
("rcNormalPosition", RECT)]
class ANIMATIONINFO(Structure):
_fields_ = [("cbSize", UINT),
("iMinAnimate", INT)]
def find_single_grey_old(image, pattern, method=cv2.TM_CCOEFF_NORMED, threshold=0.8):
height_pattern, width_pattern = pattern.getSize()
height_image, width_image = image.getSize()
if height_pattern > height_image or width_pattern > width_image:
if DEBUG:
print('find_single_grey: Pattern size if greater than image size ')
return None
pattern_array = pattern.get_grey_array()
image_array = image.get_grey_array()
try:
res = cv2.matchTemplate(image_array, pattern_array, method)
except cv2.error as e:
print('find_single_grey: catch cv2 exception!!! %s ' % str(e))
# cv2.imshow('image', image)
# cv2.imshow('pimage', pimage)
# cv2.waitKey()
return None
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED] and min_val <= 1 - threshold:
top_left = min_loc
elif max_val >= threshold:
top_left = max_loc
else:
# if image.name == '123456':
# cv2.imshow('image', image_array)
# cv2.imshow('pimage', pattern_array)
# cv2.waitKey(50)
return None
# cv2.rectangle(image.get_array(), top_left,
# (top_left[0] + width, top_left[1] + height),
# (0, 0, 255),
# 1)
return [Region(top_left[0], top_left[1], width_pattern, height_pattern)]
def find_lvlup(image, pattern, all=False):
t_min = 128
t_max = 255
reg = find_single_grey(image, pattern)
if not reg:
return None
reg = reg[0]
pat_max = pattern.get_threshold(t_min, t_max).get_array().max()
tcc = image.crop_copy(reg).get_threshold(t_min, t_max).get_array().max()
if tcc != pat_max:
return None
return [reg]
def find_progress_button(image, pattern):
return find_single_grey(image, pattern, threshold=0.9, all=all)
def find_single_grey_90(image, pattern, all=False):
return find_single_grey(image, pattern, threshold=0.9, all=all)
def find_single_grey_95(image, pattern, all=False):
return find_single_grey(image, pattern, threshold=0.95, all=all)
def find_single_grey_97(image, pattern, all=False):
return find_single_grey(image, pattern, threshold=0.97, all=all)
def find_level(image, pattern, all=False):
image = image.get_threshold(235, 255)
pattern = pattern.get_threshold(235, 255)
return find_single_grey(image, pattern, threshold=0.96, all=all)
def find_checked_skills(image, pattern, all=False, parts=4):
# image = image.get_threshold(128, 255)
# pattern = pattern.get_threshold(128, 255)
topLeft = None
if parts == 1:
return find_single_grey(image, pattern)
cv2.imshow("find_checked_skills:image", image.get_array())
for sect in np.array_split(pattern.get_array(), parts, axis=1):
sect_img = Image.fromArray(sect)
sect_reg = find_single_grey(image, sect_img)
cv2.imshow("find_checked_skills:pattern", sect)
cv2.waitKey(50)
if not sect_reg:
return None
sect_reg = sect_reg[0]
if topLeft is None and sect_reg:
topLeft = sect_reg.getTopLeft()
bottomRight = sect_reg.getBottomRight()
return [Region.from2Location(topLeft, bottomRight)]
# return find_single_grey(image, pattern, threshold=0.90)
def find_single_grey(image, pattern, method=cv2.TM_CCOEFF_NORMED, threshold=0.8, all=False):
pattern_height, pattern_width = pattern.getSize()
height_image, width_image = image.getSize()
if pattern_height > height_image or pattern_width > width_image:
if DEBUG:
print('find_single_grey: Pattern size if greater than image size ')
return None
pattern_grey = pattern.get_grey_array()
image_grey = image.get_grey_array()
if all:
image_grey = image_grey.copy()
regions = []
while 1:
try:
res = cv2.matchTemplate(image_grey, pattern_grey, method)
except cv2.error as e:
print('find_single_grey: catch cv2 exception!!! %s ' % str(e))
# cv2.imshow('image', image)
# cv2.imshow('pimage', pimage)
# cv2.waitKey()
return None
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED] and min_val <= 1 - threshold:
top_left = min_loc
elif max_val >= threshold:
top_left = max_loc
else:
# if image.name == '123456':
# cv2.imshow('image', image_array)
# cv2.imshow('pimage', pattern_array)
# cv2.waitKey(50)
break
regions.append(Region(top_left[0], top_left[1], pattern_width, pattern_height))
if not all:
break
cv2.rectangle(image_grey, top_left,
(top_left[0] + pattern_width, top_left[1] + pattern_width),
(0, 0, 0),
cv2.FILLED)
# return [Region(top_left[0], top_left[1], pattern_width, pattern_height)]
return regions
def find_all_grey_old(image, pattern, method=cv2.TM_CCOEFF_NORMED, threshold=0.8):
height, width = pattern.getSize()
# pimage = pattern.get_canny_array()
# image = image.get_canny_array()
pimage = pattern.get_grey_array()
image = image.get_grey_array()
res = cv2.matchTemplate(image, pimage, method)
# res = ((res - res.min()) / (res.max() - res.min()))
cv2.imshow('find_all_grey', res)
cv2.waitKey(500)
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]:
loc = np.where(res <= 1 - threshold)
else:
loc = np.where(res >= threshold)
regArr = []
for pt in zip(*loc[::-1]):
val = res.item(pt[1], pt[0])
print("find_all_grey: val %s, location %s " % (val, pt))
reg = Region(pt[0], pt[1], width, height)
regArr.append((reg, val))
regRet = []
valRet = []
while regArr:
# Get first region from regArr
cr = regArr[0][0]
# Create array of regions which intersect with cr
intersect_reg = [r for r in regArr if cr.is_intersect(r[0])]
# Sort it by res value in descending order
intersect_reg.sort(key=itemgetter(1), reverse=True)
# Append to returned array region with highest res value
reg = intersect_reg[0][0]
val = intersect_reg[0][1]
regRet.append(reg)
valRet.append(val)
# cv2.rectangle(image.img, (reg.x, reg.y),
# (reg.getRight(), reg.getBottom()),
# (0, 0, 255),
# 1)
# Keep region regArr which is not intersected with crfor pt in zip(*loc[::-1]):
# reg = Region(pt[0], pt[1], width, height)
# cv2.rectangle(image.img, (reg.x, reg.y),
# (reg.getRight(), reg.getBottom()),
# color,
# thickness)
regArr = [r for r in regArr if r not in intersect_reg]
return regRet
def find_pattern_hist(image, pattern, method=cv2.TM_CCOEFF_NORMED, threshold=0.8, corr_coeff=0.9, all=False):
pattern_grey = pattern.get_grey()
image_grey = image.get_grey()
reg_list = find_single_grey(image_grey, pattern_grey, method=method, threshold=threshold, all=all)
print('find_pattern_hist: reg_list %s' % (reg_list))
pattern_region = None
if reg_list:
img1 = None
img2 = None
corr_prev = None
for reg in reg_list:
img1 = pattern
img2 = image.crop(reg)
# hist_img1 = cv2.calcHist([img1.get_grey_array()], [0], None, [32], [0, 256])
# hist_img2 = cv2.calcHist([img2.get_grey_array()], [0], None, [32], [0, 256])
# corr_color = []
# corr_grey=[]
# for i in range(1):
hist_img1 = cv2.calcHist([img1.get_hsv_array()], [0, 1], None, [180, 256], [0, 180, 0, 256])
# # hist_img1 = cv2.calcHist([img1.get_array()], [i], None, [256], [0, 256])
# # hist_img2 = cv2.calcHist([img2.get_array()], [i], None, [256], [0, 256])
# corr_color.append(cv2.compareHist(hist_img1, hist_img2, cv2.HISTCMP_CORREL))
#
hist_img2 = cv2.calcHist([img2.get_hsv_array()], [0, 1], None, [180, 256], [0, 180, 0, 256])
# # hist_img1 = cv2.calcHist([img1.get_array()], [0], None, [8], [0, 256])
# # hist_img2 = cv2.calcHist([img2.get_array()], [0], None, [8], [0, 256])
# # hist_img1 = cv2.calcHist([(cv2.medianBlur(img1.get_grey_array(), 3))], [0], None, [256],
# # [0, 256])
# # hist_img2 = cv2.calcHist([(cv2.medianBlur(img2.get_grey_array(), 3))], [0], None, [256],
# # [0, 256])
# # hist_img1 = cv2.calcHist([img1.get_grey_array()], [0], None, [256],
# # [0, 256])
# # hist_img2 = cv2.calcHist([img2.get_grey_array()], [0], None, [256],
# # [0, 256])
corr_grey = cv2.compareHist(hist_img1, hist_img2, cv2.HISTCMP_CORREL)
# print('find_pattern_hist: %s to %s corr_color is B %s G %s R %s corr_grey =%s' % (
print('find_pattern_hist: %s to %s corr_grey =%s' % (
# img1.get_name(), img2.get_name(), corr_color[0], corr_color[1], corr_color[2], corr_grey))
img1.get_name(), img2.get_name(), corr_grey))
print('find_pattern_hist: img1.getSize() %s to img2.getSize() %s' % (img1.getSize(), img2.getSize()))
# if pattern.get_name()=='.\\patterns\\main\\fish_1.png':
#
# x_size = 300
#
# for img in (img1, img2):
# y_size = int(x_size / img.get_array().shape[1] * img.get_array().shape[0])
# cv2.namedWindow(img.get_name(), cv2.WINDOW_NORMAL)
# cv2.resizeWindow(img.get_name(), x_size, y_size)
# cv2.imshow(img.get_name(),cv2.medianBlur(img.get_array(),3))
# cv2.waitKey(500)
# pass
# if min(corr_color) >= corr_coeff or corr_grey >= corr_coeff:
# if corr_grey >= corr_coeff:
if corr_grey >= corr_coeff:
pattern_region = reg_list
# corr_prev = corr_color
# # print('find_pattern_hist: %s to %s corr is %s' % (img1.get_name(), img2.get_name(), corr_prev))
# # If pattern_region is not already create do it
# if not pattern_region:
# pattern_region = []
#
# if not pattern_region or not pattern_region[-1].is_intersect(reg):
# pattern_region.append(reg)
# # cv2.rectangle(self.img, (reg.getLeft(), reg.getTop()),
# # (reg.getRight(), reg.getBottom()),
# # (0, 0, 255),
# # 1)
# # if corr_prev and corr_prev <= corr:
# # reg_ret = reg
print('find_pattern_hist: %s to %s is %s. pattern_region ' % (img1.get_name(), img2.get_name(), corr_prev))
return pattern_region
def find_all_grey_multi(image, pattern, method=cv2.TM_CCOEFF_NORMED, threshold=0.8):
height, width = pattern.getSize()
# pimage = cv2.medianBlur(pattern.get_canny_array(), 3)
# image = cv2.medianBlur(image.get_canny_array(),3)
# pimage = cv2.blur(pattern.get_array(), (3, 3))
# image = cv2.blur(image.get_array(), (3, 3))
# pimage=cv2.cvtColor(pimage, cv2.COLOR_BGR2GRAY)
# image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# pimage = cv2.Canny(pimage, 100, 200)
# image = cv2.Canny(image, 100, 200)
# pimage = pattern.get_grey_array()
# image = image.get_grey_array()
pimage = pattern.get_canny_array()
image = image.get_canny_array()
set_list = []
for method in [cv2.TM_SQDIFF, cv2.TM_CCOEFF]:
res = cv2.matchTemplate(image, pimage, method)
# cv2.normalize(res,res,0,1,norm_type=cv2.NORM_MINMAX)
# Normilized res values in 0..1
res = ((res - res.min()) / (res.max() - res.min()))
# if pattern.get_name() == '.\\patterns\\main\\heroes_menu_active.PNG':
# cv2.imshow(pattern.get_name(), pattern.get_array())
# cv2.imshow(image.get_name(), image.get_array())
# cv2.waitKey()
# cv2.imshow('res', res)
# cv2.imshow('template', pimage)
# cv2.waitKey(300)
if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]:
# Find the most minimal value in res
sort = np.sort(res, axis=None)[0:10]
sort = np.where(sort < 1 - threshold)
else:
# Find the maximum values in res
sort = np.sort(res, axis=None)[:-10:-1]
# Find sorted res indext
ix = np.in1d(res.ravel(), sort).reshape(res.shape)
loc = np.where(ix)
regArr = []
if method == cv2.TM_SQDIFF_NORMED:
color = (0, 0, 255)
thickness = 3
elif method == cv2.TM_CCOEFF_NORMED:
color = (0, 255, 0)
thickness = 2
elif method == cv2.TM_CCORR_NORMED:
color = (255, 0, 0)
thickness = 1
# return regArr
for pt in zip(*loc[::-1]):
val = res.item(pt[1], pt[0])
print("res %s %s " % (val, pt))
reg = Region(pt[0], pt[1], width, height)
regArr.append((reg, res.item(pt[1], pt[0])))
# return regArr
regRet = []
while regArr:
# Get first region from regArr
cr = regArr[0][0]
# Create array of regions which intersect with cr
intersect_reg = [r for r in regArr if cr.is_intersect(r[0])]
# Sort it by res value in descending order
intersect_reg.sort(key=itemgetter(1), reverse=True)
# Append to returned array region with highest res value
reg = intersect_reg[0][0]
regRet.append(reg)
# cv2.rectangle(image.img, (reg.x, reg.y),
# (reg.getRight(), reg.getBottom()),
# (0, 0, 255),
# 1)
# Keep region regArr which is not intersected with crfor pt in zip(*loc[::-1]):
# reg = Region(pt[0], pt[1], width, height)
# cv2.rectangle(image.img, (reg.x, reg.y),
# (reg.getRight(), reg.getBottom()),
# color,
# thickness)
regArr = [r for r in regArr if r not in intersect_reg]
#
set_list.append(set(regRet))
# Find instersection of regions created by different methods
# Suppose that this
regRet = set.intersection(*set_list)
for reg in regRet:
cv2.rectangle(image.img, (reg.x, reg.y),
(reg.getRight(), reg.getBottom()),
(255, 255, 255),
4)
if not regRet:
return None
return regRet
class Image:
def __init__(self, name=None, find_in_func=find_single_grey):
self.img = None
self.img_buffer = None
self.name = name
self.pattern_finder = find_in_func
# print("Image:__init__: name %s pattern_finder %s", name, self.pattern_finder)
pass
@classmethod
def fromFile(cls, path, name=None, find_in_func=find_single_grey, method=cv2.IMREAD_COLOR):
image = Image(name=(name if name is not None else path), find_in_func=find_in_func)
image.img = cv2.imread(path, method)
return image
@classmethod
def fromArray(cls, arr):
image = Image('from array %s' % (id(arr)))
image.img = arr
return image
def set_pattern_finder(self, find_in_func):
self.pattern_finder = find_in_func
def crop(self, region):
arr = self.get_array()[region.getTop():region.getBottom(), region.getLeft():region.getRight()]
img = Image.fromArray(arr)
img.set_name('cropped at top %s bottom %s left %s rigth %s of %s' % (
region.getTop(), region.getBottom(), region.getLeft(), region.getRight(), self.get_name()))
return img
def crop_copy(self, region):
arr = self.get_array()[region.getTop():region.getBottom(), region.getLeft():region.getRight()].copy()
img = Image.fromArray(arr)
img.set_name('cropped at top %s bottom %s left %s rigth %s of %s' % (
region.getTop(), region.getBottom(), region.getLeft(), region.getRight(), self.get_name()))
return img
def show(self, time=0):
cv2.imshow(self.get_name(), self.img)
cv2.waitKey(time)
def get_array(self):
return self.img
def get_threshold(self, low, high, method=cv2.THRESH_BINARY):
ret, thresh1 = cv2.threshold(self.get_grey_array(), low, high, method)
return self.fromArray(thresh1)
def get_grey(self):
if self.is_grey():
return self
return self.fromArray(cv2.cvtColor(self.img, cv2.COLOR_BGR2GRAY))
def get_grey_array(self):
if self.is_grey():
return self.img
return cv2.cvtColor(self.img, cv2.COLOR_BGR2GRAY)
def get_canny_array(self):
return cv2.Canny(self.img, 100, 200)
def get_hsv_array(self):
return cv2.cvtColor(self.img, cv2.COLOR_BGR2HSV)
def get_width(self):
return self.img.shape[:2][1]
def get_height(self):
return self.img.shape[:2][0]
def getSize(self):
return self.img.shape[:2]
def is_grey(self):
return len(self.img.shape) == 2
def resize(self, w, h):
if (self.get_height() == h and self.get_width() == w):
return
w_c = float(w) / self.get_width()
h_c = float(h) / self.get_height()
if w_c < 1 or h_c < 1:
method = cv2.INTER_AREA
else:
method = cv2.INTER_LINEAR
self.img = cv2.resize(self.img, None, fx=w_c, fy=h_c, interpolation=method)
def get_resized_copy(self, w=None, h=None):
if w is None and h is None:
raise AttributeError("Image:get_resize_copy: Width and height cant be None both simultaneously")
if (self.get_height() == h and self.get_width() == w):
return
if w:
w_c = float(w) / self.get_width()
else:
w_c = float(h) / self.get_height()
if h:
h_c = float(h) / self.get_height()
else:
h_c = float(w) / self.get_width()
if w_c < 1 or h_c < 1:
method = cv2.INTER_AREA
else:
method = cv2.INTER_LINEAR
return Image.fromArray(cv2.resize(self.img, None, fx=w_c, fy=h_c, interpolation=method))
def resize(self, scale):
if scale == 1:
return
elif scale < 1:
method = cv2.INTER_AREA
elif scale > 1:
method = cv2.INTER_LINEAR
self.img = cv2.resize(self.img, None, fx=scale, fy=scale, interpolation=method)
def get_name(self):
return self.name
def set_name(self, name=None):
if not name:
self.name = 'image id:' + id(self)
return
self.name = name
def cvtColor(self, method=cv2.COLOR_BGR2GRAY):
self.img = cv2.cvtColor(self.img, method)
def find_pattern_from_list(self, pat_list, cache=False):
reg = None
for pat in pat_list:
reg = self.find_pattern(pat)
if reg:
break
return reg
def find_pattern(self, pattern, all=False):
return pattern.pattern_finder(self, pattern, all=all)
# Search for all occurence of pattern in source image
class App(object):
def __init__(self, name='Clicker Heroes', width: int = None):
print("init App")
FindWindow.argtypes = [LPCWSTR, LPCWSTR]
FindWindow.restype = HWND
self.name = name
self.window = Window(FindWindow(None, name))
if width:
self.window.resizeCliPropW(width)
# FindWindow.argtypes = [ctypes.c_wchar_p,ctypes.c_wchar_p]
# FindWindow.restype = ctypes.c_void_p
def getWindow(self):
return self.window
class SingletonMetaClass(type):
def __init__(cls, name, bases, dict):
super(SingletonMetaClass, cls) \
.__init__(name, bases, dict)
original_new = cls.__new__
def my_new(cls, *args, **kwds):
if cls.instance == None:
cls.instance = \
original_new(cls, *args, **kwds)
return cls.instance
cls.instance = None
cls.__new__ = staticmethod(my_new)
class Singleton(type):
instance = None
def __call__(cls, *args, **kw):
if not cls.instance:
cls.instance = super(Singleton, cls).__call__(*args, **kw)
return cls.instance
class MouseClick:
def __init__(self, window, x, y):
self.hwnd = window.hwnd
self.x = x
self.y = y
def apply(self):
self.click(self.x, self.y)
def click(self, x, y, park=True, cps=30):
x = int(x)
y = int(y)
self.last_click_location = (x, y)
tmp = (y << 16) | x
delay = 1 / cps
if park:
delay /= 2
err = 0
err += SendMessage(self.hwnd, WM_LBUTTONDOWN, 0, tmp)
time.sleep(delay)
err += SendMessage(self.hwnd, WM_LBUTTONUP, 0, tmp)
if park:
x = 1
y = 1
tmp = (y << 16) | x
time.sleep(delay)
err += SendMessage(self.hwnd, WM_MOUSEMOVE, 0, tmp)
if err > 0:
return None
return True
def move(self, x, y, park=True, cps=30):
l_x, ly = self.last_click_location
x = int(x)
y = int(y)
tmp = (y << 16) | x
delay = 1 / cps
if park:
delay /= 2
err = 0
err += SendMessage(self.hwnd, WM_LBUTTONDOWN, 0, tmp)
time.sleep(delay)
err += SendMessage(self.hwnd, WM_LBUTTONUP, 0, tmp)
if park:
x = 1
y = 1
tmp = (y << 16) | x
time.sleep(delay)
err += SendMessage(self.hwnd, WM_MOUSEMOVE, 0, tmp)
if err > 0:
return None
return True
class MouseScroll:
def __init__(self, window, direction):
self.hwnd = window.hwnd
self.direction = direction
def apply(self):
self.scroll(direction=self.direction)
def scroll(self, direction, x=1, y=1, park=True, cps=30):
tmp = (y << 16) | x
delay = 1 / cps
if park:
delay /= 2
err = 0
err += SendMessage(self.hwnd, WM_MOUSEWHEEL,
(WHEEL_DELTA * direction) << 16, tmp)
time.sleep(delay)
if park:
x = 1
y = 1
tmp = (y << 16) | x
time.sleep(delay)
err += SendMessage(self.hwnd, WM_MOUSEMOVE, 0, tmp)
if err > 0:
return None
return True
class ClickerHeroes(metaclass=Singleton):
# class ClickerHeroes(App):
# __metaclass__ = Singleton
def __init__(self, lock, width: int = None) -> None:
if DEBUG:
print("init ClickerHeroes")
name = 'Clicker Heroes'
self.name = name
self.window = Window(FindWindow(None, name), lock)
if width:
self.window.resizeCliPropW(width)
self.lock = lock
self.fish_time = -1000000
self.newyear = -1000000
self.farm_mode_start_time = -1000000
self.ascend_time = 0
self.ascend_checker_time = 0
self.got_heroes_souls = False
self.relic_ooze_collected = False
self.reindex_heroes_list_time = 0
self.patterns = {}
self.menus = {}
self.hero_patterns_location_cache = {}
self.patterns_location_cache = {}
self.patterns_cache = {}
self.click_monster_location = None
self.starter_clicks = True
self.lvlup_all_heroes_time = 0
self.boss_time = None
self.boss_check_time = 0
self.levels_region = None
self.levels_region_scrshot = None
self.progress_button_time = -1000000
self.farm_mode_start_time = -1000000
self.cache_state = False
self.reindex_heroes_list_time = -1000000
self.skills_upgrades_time = 0
width, height = self.window.get_size()
sss = MouseClick(self.window, 1, 1)
scale = 1
if width > height * 16.0 / 9:
scale = height / (1600.0 * 9 / 16)
if height > width * 9.0 / 16:
scale = width / 1600.0
self.script_path = os.path.realpath(__file__)
self.script_dir = os.path.dirname(self.script_path)
self.stats_dir = os.path.join(self.script_dir, STATS_DIR)
self.patterns_path = os.path.join(self.script_dir, PATTERNS_DIR)
self.load_patterns(self.patterns_path, self.patterns, scale)
self.hero_patterns_location_cache = {}
for menu_name in ('heroes', 'ancients'):
self.menus[menu_name] = {}
# self.menus[menu_name]['sorted_heroes_list'] = self.load_sorted_heroes_list(menu_name)
self.menus[menu_name]['sorted_heroes_list'] = self.load_container(menu_name, "sorted_heroes_list", [])
self.menus[menu_name]['sb_min_position'] = None
self.menus[menu_name]['sb_max_position'] = None
self.menus[menu_name]['sb_position'] = 0
self.menus[menu_name]['last_available_hero'] = None
self.menus[menu_name]['max_seen_hero'] = None
self.menus[menu_name]['heroes_list'] = None
self.menus[menu_name]['visible_heroes_cache'] = None
# self.menus[menu_name]['hero_level'] = self.load_heroes_levels(menu_name)
self.menus[menu_name]['hero_level'] = self.load_container(menu_name, "hero_level", {})
self.menus[menu_name]['heroes_upgraded_list'] = self.load_container(menu_name,
"heroes_upgraded_list",
[])
self.menus[menu_name]['last_ascend_seen_heroes'] = set()
self.window.makeScreenshotClientAreaRegion()
# self.set_monster_click_location()
def do(self):
self.screenShot = self.window.getScreenshot()
self.lvlup_top_heroes()
# self.buyHeroesUpgrade()
self.lvl_progress()
# if self.ascensionNeed():
# self.ascend()
# self.lvlUpAncient()
# if self.transcendNeed():
# self.trascend()
# self.lvlUpOutsiders()
# Loading image patterns structure in self.patterns
def get_sorted_heroes_list(self, menu_name):
return self.menus[menu_name]['sorted_heroes_list']
def load_patterns(self, path, patterns, scale):
bbb = patterns
for root, dirs, files in os.walk(path):
for fn in files:
name, ext = os.path.splitext(os.path.basename(fn))
nm = root[root.find(path) + len(path):]
# Change os path sepatator to /
nm = re.sub(re.escape(os.sep), '/', os.path.join(nm, name))
if 'lvlup_' in nm:
find_in_func = find_lvlup
elif 'button_progression' in nm:
find_in_func = find_single_grey_90
# elif '_c' in nm and 'heroes_skills' in nm:
elif all(x in nm for x in ['_c', 'heroes_skills']):
find_in_func = find_checked_skills
# find_in_func = find_pattern_hist
else:
find_in_func = find_single_grey
img = Image.fromFile(path=os.path.join(root, fn), name=name, find_in_func=find_in_func)
img.resize(scale)
bbb[nm] = img
pass
def find_pattern(self, pat):
return self.find_pattern_cached(pat)
def find_pattern_from_list(self, pat_list, cache=True, all=False):
regions = []
for pat in pat_list:
if cache:
reg = self.find_pattern_cached(pat, all=all)
else:
reg = self.window.getScreenshot().find_pattern(pat, all=all)
if reg:
regions.extend(reg)
if not all:
break
return regions
def find_pattern_reg_name(self, pat_list):
reg = None
reg_name = []
for pat in pat_list:
regs = self.find_pattern(pat)
if regs:
for r in regs:
reg_name.append((r, pat.get_name()))
return reg_name
def find_pattern_reg_name_single(self, reg, pat_list):
reg_name = None
for pat in pat_list:
regs = self.window.getScreenshot(reg).find_pattern(pat)
if regs:
regs = regs[0]
reg_name = (regs, pat.get_name())
break
# if not reg_name:
# return None
return reg_name
def find_pattern_cached(self, pat, all=False):
# pat_id = id(pat)
pat_id = pat.get_name()
if pat_id not in self.patterns_location_cache.keys():
self.patterns_location_cache[pat_id] = {}
# pattern location cache
plc = self.patterns_location_cache[pat_id]
regions = []
if plc:
# print("find_pattern_cached: Pattern %s has %s entries in cache location" % (pat.get_name(), len(plc)))
# Quickly scan pattern location cache
cnt = 0
for cached_location in plc.keys():
reg = self.window.getScreenshot(cached_location).find_pattern(pat, all=False)
cnt += 1
# If location exist in cache and pattern is on screen add location to retrun
if reg:
# print("find_pattern_cached: Cache hit!! Pattern %s" % (pat.get_name()))
plc[cached_location] += 1
if DEBUG and cnt > 1:
print("find_pattern_cached: Pattern %s : Cache hit on %s cache entry" % (pat_id, cnt))
regions.append(cached_location)
break
# If pattern dont exists on locations from cache scan the whole screen and cache it
if not regions:
# Scan the whole screen
# print("find_pattern_cached: Cache missed!! Searching for %s " % (pat.get_name()))
reg = self.window.getScreenshot().find_pattern(pat, all=all)
# print("find_pattern_cached: Found reg %s " % (reg))
# If location found add it to cache and
if reg:
# hit_count = [1] * len(reg)
# cache_entry = zip(reg, hit_count)
plc = self.patterns_location_cache[pat_id]
plc.update(dict.fromkeys(reg, 1))
regions.extend(reg)
else:
# Nothing found in cache and on screen
return None
if plc:
if len(plc) != 1:
# Sort by cache hit count
plc = OrderedDict(sorted(plc.items(), key=lambda t: t[1], reverse=True))
self.patterns_location_cache[pat_id] = plc
# print(plc)
return regions
def scroll_to_last_available_hero(self, menu_name):
self.scroll_to_position(menu_name, self.menus[menu_name]['sorted_heroes_list'])
return
def get_prev_hero_name(self, menu_name, hero_name):
if hero_name is None:
return None
unsorted_heroes_list = self.get_unsorted_hero_list(menu_name)
sorted_heroes_list = self.get_sorted_heroes_list(menu_name)
# Previous hero index
try:
if sorted_heroes_list:
hindex = sorted_heroes_list.index(hero_name) - 1
if hindex >= 0:
# Can definitely be deterrmine previous heroe name
ret_hlist = [sorted_heroes_list[hindex]]
else:
# Return heroes that can be possible be previous in list and dont sits in sorted_heroes_list
# Hero oredered list doest contains hero_name so we return all heroes that dont sit in hol
# and dont equal to hero_name
# ret_hlist = [name for name in unsorted_heroes_list if name not in sorted_heroes_list and name != hero_name]
ret_hlist = [name for name in unsorted_heroes_list if name != hero_name]
else:
# Hero ordered list is empty so return all heroes from hero list except hero_name
ret_hlist = [name for name in unsorted_heroes_list if name != hero_name]
except ValueError as e:
ret_hlist = None
return ret_hlist
def get_next_hero_name(self, menu_name, hero_name):
if hero_name is None:
return None
unsorted_heroes_list = self.get_unsorted_hero_list(menu_name)
sorted_heroes_list = self.get_sorted_heroes_list(menu_name)
ret_hlist = None
# Next hero index
try:
if sorted_heroes_list:
hindex = sorted_heroes_list.index(hero_name) + 1
if hindex >= len(sorted_heroes_list):
# Return heroes that can be possible be next in list and dont sits in sorted_heroes_list
# Hero oredered list doest contains hero_name so we return all heroes that dont sit in hol
# and dont equal to hero_name
# ret_hlist = [name for name in unsorted_heroes_list if name not in sorted_heroes_list and name != hero_name]
ret_hlist = [name for name in unsorted_heroes_list if name != hero_name]
else:
# Can definitely be determine next heroes name
ret_hlist = [sorted_heroes_list[hindex]]
else:
# Hero ordered list is empty so return all heroes from hero list except hero_name
ret_hlist = [name for name in unsorted_heroes_list if name != hero_name]
except ValueError as e:
ret_hlist = None
return ret_hlist
def get_max_seen_hero(self, menu_name):
return self.menus[menu_name]['max_seen_hero']
def set_max_seen_hero(self, menu_name, hero_name):
self.menus[menu_name]['max_seen_hero'] = hero_name
def lvlup_all_heroes(self, menu_name, max_level=200, timer=180):
self.window.makeScreenshotClientAreaRegion()
curr_time = time.clock()
if curr_time - self.lvlup_all_heroes_time < timer:
return None
self.lvlup_all_heroes_time = curr_time
sorted_hero_list = self.get_sorted_heroes_list(menu_name)
if sorted_hero_list is None:
return None
last_available_hero = self.get_last_available_hero(menu_name)
if last_available_hero:
last_available_hero_index = sorted_hero_list.index(last_available_hero)
else:
return None
heroes_upgraded_list = self.menus[menu_name]['heroes_upgraded_list']
heroes_to_lvlup = [hero_name for hero_name in sorted_hero_list if
self.get_hero_level(menu_name, hero_name) < max_level
and sorted_hero_list.index(hero_name) <= last_available_hero_index
and hero_name not in heroes_upgraded_list]
for hero_name in heroes_to_lvlup:
self.lvlup_hero(menu_name, hero_name, max_level=max_level)
return True
###Buy heroes skill except ascension
# hero_reg = self.scroll_to_hero(menu_name, hero_name)
# hero_reg_scr = self.window.makeScreenshotClientAreaRegion(hero_reg)
# skills_reg = hero_reg_scr.find_pattern_from_list(self.get_pattern('heroes_skills', '%s_c' % hero_name))
# if skills_reg:
# continue
#
# if hero_name == 'amenhotep':
# ascend_skill_reg = hero_reg_scr.find_pattern_from_list(
# self.get_pattern('heroes_skills', 'amenhotep_ascend'),
# cache=False)
# if ascend_skill_reg:
# ascend_skill_reg = ascend_skill_reg[0]
# else:
# continue
# else:
# ascend_skill_reg = None
# button_edge_reg = hero_reg_scr.find_pattern_from_list(self.get_pattern('heroes_button', 'edge_'),
# cache=False)
# if button_edge_reg is None:
# continue
# button_edge_reg = button_edge_reg[0]
# hero_name_reg = hero_reg_scr.find_pattern_from_list(self.get_pattern(menu_name, hero_name))
# if hero_name_reg is None:
# continue
# hero_name_reg = hero_name_reg[0]
# skills_reg_left_x, skills_reg_left_y = button_edge_reg.center().get_xy()
# skills_reg_right_x = hero_name_reg.getRight()
# y = hero_reg.getTop() + skills_reg_left_y
# for i in range(100):
# x = hero_reg.getLeft() + skills_reg_left_x + int(
# random.random() * (skills_reg_right_x - skills_reg_left_x))
# if ascend_skill_reg and ascend_skill_reg.contains((x - hero_reg.getLeft(), y - hero_reg.getTop())):
# continue
# hero_reg_scr = self.window.makeScreenshotClientAreaRegion(hero_reg)
# cv2.imshow("hero_reg_scr", hero_reg_scr.get_array())
# cv2.waitKey(50)
# # skills_reg = hero_reg_scr.find_pattern_from_list(self.get_pattern('heroes_skills', '%s_c' % hero_name))
# # if skills_reg:
# # break
# self.window.click(x, y, cps=5)
def lvlup_top_heroes(self, menu_name, dist=0):
self.window.makeScreenshotClientAreaRegion()
img = self.window.getScreenshot().get_resized_copy(w=300).get_array()
cv2.imshow('lvlup_top_heroes:img', img)
cv2.waitKey(50)
hero_name = self.get_last_available_hero(menu_name)
if hero_name is None:
return None
i = 0
while i <= dist:
if hero_name:
res = self.lvlup_hero(menu_name, hero_name)
hero_lst = self.get_prev_hero_name(menu_name, hero_name)
if hero_lst:
hero_name = hero_lst[0]
else:
break
i += 1
def set_last_available_hero(self, menu_name, hero_name):
self.menus[menu_name]['last_available_hero'] = hero_name
def get_last_available_hero(self, menu_name):
hol = self.get_sorted_heroes_list(menu_name)
hol_length = len(hol)
if hol is None:
return None
lah = None
# pah = None
lahp = self.menus[menu_name]['last_available_hero']
if lahp:
# Check that lahp is the last hero in the list
if lahp == hol[-1]:
return lahp
else:
next_lah_index = hol.index(lahp) + 1
# pah = self.get_next_hero_name(menu_name, lahp)
else:
next_lah_index = 0
max_seen_hero = self.get_max_seen_hero(menu_name)
if not max_seen_hero:
return None
max_seen_hero_index = hol.index(max_seen_hero) + 1
if max_seen_hero_index >= next_lah_index:
to_check_heroes = hol[next_lah_index:max_seen_hero_index]
for h in reversed(to_check_heroes):
reg = self.scroll_to_hero(menu_name, h)
if reg is None:
return None
if self.is_hero_lvlup_button_active(reg):
lah = h
cv2.imshow("get_last_available_hero:lah_reg", self.window.getScreenshot(reg).get_array())
cv2.waitKey(50)
break
if lah:
lah_index = hol.index(lah)
else:
lah_index = 0
if lah_index >= next_lah_index:
self.set_last_available_hero(menu_name, lah)
return self.menus[menu_name]['last_available_hero']
def find_hero_lvlup_button(self, hero_reg):
if hero_reg is None:
return None
cv2.imshow("find_hero_lvlup_button:hero_reg", self.window.getScreenshot(hero_reg).get_array())
cv2.waitKey(50)
return self.find_lvlup_button(hero_reg)
def find_hero_level_reg(self, reg):
if reg is None:
return None
level_mark_patterns = self.get_pattern('heroes_button', 'level_mark')
reg_name = self.find_pattern_reg_name_single(reg, level_mark_patterns)
if reg_name is None:
return None
# Absolute region of level mark
level_mark_reg = reg_name[0]
loc1 = Location(reg.getLeft() + level_mark_reg.getLeft(), reg.getTop() + level_mark_reg.getTop())
loc2 = Location(reg.getRight(), reg.getTop() + level_mark_reg.getBottom())
level_mark_reg = Region.from2Location(loc1, loc2)
return level_mark_reg
###################################
# def find_hero_region(self, menu_name, hero_name):
# reg = self.scroll_to_hero(menu_name, hero_name)
# return reg
###################################
def find_lvlup_button(self, reg):
button_patterns = self.get_pattern('heroes_button', 'lvlup_')
reg_name = self.find_pattern_reg_name_single(reg, button_patterns)
if reg_name is None:
return None
# Absolute region
reg = reg_name[0] + (reg.x, reg.y)
cv2.imshow("find_lvlup_button:reg_name[0]", self.window.getScreenshot(reg).get_array())
cv2.waitKey(50)
butt_reg_name = (reg, reg_name[1])
if 'hire_inactive' in butt_reg_name[1]:
# if all(x in butt_reg_name[1] for x in ['inactive', 'hire']):
status = False
else:
status = True
return (butt_reg_name[0], status)
def find_level_reg(self, reg):
return
def is_hero_lvlup_button_active(self, hero_reg):
self.window.makeScreenshotClientAreaRegion()
lvl_button = self.find_hero_lvlup_button(hero_reg)
if lvl_button is None:
return None
status = lvl_button[1]
return status
def get_hero_level(self, menu_name, hero_name):
hero_level_dict = self.menus[menu_name]['hero_level']
if hero_name not in hero_level_dict.keys():
hero_level_dict[hero_name] = 0
return hero_level_dict[hero_name]
def set_hero_level(self, menu_name, hero_name, level):
hero_level_dict = self.menus[menu_name]['hero_level']
if hero_name not in hero_level_dict.keys():
hero_level_dict[hero_name] = 0
hero_level_dict[hero_name] = level
def add_hero_level(self, menu_name, hero_name, level):
hero_level_dict = self.menus[menu_name]['hero_level']
if hero_name not in hero_level_dict.keys():
hero_level_dict[hero_name] = 0
hero_level_dict[hero_name] += level
def get_heroes_level_dict(self, menu_name):
return self.menus[menu_name]['hero_level']
# def save_heroes_levels(self, menu_name, hld):
# try:
# hld_filename = STATS_DIR + '/%s_heroes_levels.dat' % menu_name
# with tempfile.NamedTemporaryFile(mode='w+t', delete=False, dir=STATS_DIR) as temp_file:
# json.dump(hld, temp_file)
# if os.path.isfile(hld_filename):
# shutil.copy(hld_filename, hld_filename + '.bck')
#
# os.replace(temp_file.name, hld_filename)
# except OSError:
# raise
def save_sorted_heroes_list(self, menu_name, shl):
try:
shl_filename = os.path.join(self.stats_dir, '%s_sorted_heroes_list.dat' % menu_name)
with tempfile.NamedTemporaryFile(mode='w+t', delete=False, dir=STATS_DIR) as temp_file:
json.dump(shl, temp_file)
if os.path.isfile(shl_filename):
shutil.copy(shl_filename, shl_filename + '.bck')
os.replace(temp_file.name, shl_filename)
except OSError:
raise
def load_sorted_heroes_list(self, menu_name):
try:
fn = STATS_DIR + '/%s_sorted_heroes_list.dat' % menu_name
with open(fn, 'r') as f:
return json.load(f)
except FileNotFoundError:
return []
def save_container(self, menu_name, container_name, container):
try:
shl_filename = os.path.join(self.stats_dir, '%s_%s' % (menu_name, container_name))
with tempfile.NamedTemporaryFile(mode='w+t', delete=False, dir=self.stats_dir) as temp_file:
json.dump(container, temp_file)
if os.path.isfile(shl_filename):
shutil.copy(shl_filename, shl_filename + '.bck')
os.replace(temp_file.name, shl_filename)
except OSError:
raise
def load_container(self, menu_name, container_name, default_container):
try:
fn = os.path.join(self.stats_dir, '%s_%s' % (menu_name, container_name))
# fn = STATS_DIR + '/%s_%s' % (menu_name, container_name)
with open(fn, 'r') as f:
return json.load(f)
except FileNotFoundError:
return default_container
def save_heroes_levels(self, menu_name, hld):
try:
hld_filename = STATS_DIR + '/%s_heroes_levels.dat' % menu_name
with tempfile.NamedTemporaryFile(mode='w+t', delete=False, dir=STATS_DIR) as temp_file:
json.dump(hld, temp_file)
if os.path.isfile(hld_filename):
shutil.copy(hld_filename, hld_filename + '.bck')
os.replace(temp_file.name, hld_filename)
except OSError:
raise
def load_heroes_levels(self, menu_name):
try:
fn = STATS_DIR + '/%s_heroes_levels.dat' % menu_name
with open(fn, 'r') as f:
return json.load(f)
except FileNotFoundError:
return {}
def lvlup_hero(self, menu_name, hero_name, lvl_count=None, max_level=None):
self.open_menu(menu_name)
hero_reg = self.scroll_to_hero(menu_name, hero_name=hero_name)
# hero_reg_scr= self.window.makeScreenshotClientAreaRegion(hero_reg)
# button_edge_reg=hero_reg_scr.find_pattern(self.get_pattern('heroes_button','edge_'))
# skills_reg_left_x,skills_reg_left_y=button_edge_reg.center().get_xy()
# hero_name_reg=hero_reg_scr.find_pattern(self.get_pattern(menu_name,hero_name))
# skills_reg_right_x=hero_name_reg.getRight()
# for i in range(100):
# x=skills_reg_left_x+int(random.random()*(skills_reg_right_x-skills_reg_left_x))
# y=skills_reg_left_y
# self.window.click(hero_reg.getRight()+x,hero_reg.getTop()+y)
if hero_reg is None:
return None
button = self.find_hero_lvlup_button(hero_reg)
if button is None:
return None
hero_level = self.get_hero_level(menu_name, hero_name)
levelup_button = button[0]
if lvl_count is None:
lvl_count = 1000 * 1000 * 1000
if max_level:
lvl_count = max_level - hero_level
time_1 = time.clock()
start_time = time.clock()
cnt = 0
# hold_key = 'shift'
# if max_level is None:
# hold_key = 'q'
# self.window.pressAndHoldKey(hold_key)
while True:
# time.sleep(0.2)
# if menu_name == 'heroes':
# For speed make screenshot of lvlup button area
time_chk = time.clock()
# delay = 0.01
# max = 0
# while max == 0 and delay<=1: # time.clock() - time_chk < 0.3:
# scrshot = self.window.makeScreenshotClientAreaRegion(reg)
# # Quick and dirty check for active button
# max = scrshot.get_threshold(128, 255).get_array().max()
#
# time.sleep(delay)
# delay *= 2
# reg, status = self.find_hero_lvlup_button(menu_name, hero_name)
self.window.makeScreenshotClientAreaRegion()
scr_levelup = self.window.makeScreenshotClientAreaRegion(levelup_button)
max = scr_levelup.get_threshold(128, 255).get_array().max()
if max == 0:
break
level_reg = self.find_hero_level_reg(hero_reg)
if level_reg is None:
check_reg = levelup_button
pattern_finder = find_lvlup
else:
check_reg = level_reg
pattern_finder = find_level
scr_level_before = self.window.makeScreenshotClientAreaRegion(check_reg) # .get_threshold(235,255)
self.click_region(levelup_button)
delay = 0.01
total_delay = 0
# Wait a little after click applied
while total_delay <= 1: # time.clock() - time_chk < 0.3:
time.sleep(delay)
total_delay += delay
scr_level_after = self.window.makeScreenshotClientAreaRegion(check_reg) # .get_threshold(235,255)
scr_level_after.set_pattern_finder(pattern_finder)
cv2.imshow('scr_level_before', scr_level_before.get_array())
cv2.imshow('scr_level_after', scr_level_after.get_array())
cv2.waitKey(25)
# comp=cv2.compare(scr_level_before.get_array(),scr_level_after.get_array(),cv2.CMP_EQ)
# comp = cv2.bitwise_xor(scr_level_before.get_array(), scr_level_after.get_array())
# if comp.min()==0:
if not scr_level_before.find_pattern(scr_level_after):
break
delay *= 2
if total_delay > 1 and check_reg == level_reg:
break
# cnt += 10
cnt += 1
if time.clock() - start_time > 120 or cnt >= lvl_count:
break
# self.window.releaseKey(hold_key)
# self.window.move_mouse(y=10)
time.sleep(0.5)
time_2 = time.clock()
# self.menus[menu_name][hero_name]['lvl'] += 1
self.add_hero_level(menu_name, hero_name, cnt)
if cnt == 0:
return None
if DEBUG:
print("lvlup_hero:lvl/sec=%s" % (cnt / (time_2 - time_1)))
# self.save_heroes_levels(menu_name, self.get_heroes_level_dict(menu_name))
self.save_container(menu_name, 'hero_level', self.menus[menu_name]['hero_level'])
return cnt
def click_location(self, loc, refresh=False):
# x, y = loc.get_xy()
# mouse_event = MouseClick(self.window, x, y)
# # self.mouse_event_queue.put((mouse_event, self.mp_event))
# l=list()
# # self.mouse_event_queue.put(mouse_event)
# self.mouse_event_queue.put(l)
# # self.mouse_event_queue.put('123123123')
# self.mp_event.wait()
return self.window.click_location(loc, refresh=refresh)
def click_region(self, reg, refresh=False):
# x, y = reg.center().get_xy()
# mouse_event = MouseClick(self.window, x, y)
# self.mouse_event_queue.put((mouse_event, self.mp_event))
# self.mp_event.wait()
ret = self.window.click_region(reg, refresh=refresh)
return ret
def scrollDownMenu(self, name):
self.menus[name]['scrollPosition'] += 1
scrPos = self.menus[name]['scrollPosition']
if scrPos > self.menus[name]['maxScrollPosition']:
self.menus[name]['maxScrollPosition'] = scrPos
def get_pattern_old(self, menu_name, pattern_name):
if pattern_name not in self.patterns[menu_name].keys():
return None
return self.patterns[menu_name][pattern_name]
def get_pattern(self, menu_name, pattern_name):
path = '/%s/%s' % (menu_name, pattern_name)
if path in self.patterns_cache.keys():
patterns_list = self.patterns_cache[path]
else:
patterns_list = [self.patterns[key] for key in self.patterns.keys() if key.startswith(path)]
self.patterns_cache[path] = patterns_list
return patterns_list
def find_hero_location_old(self, menu_name, hero_name):
if hero_name not in self.hero_patterns_location_cache[menu_name]:
self.hero_patterns_location_cache[menu_name][hero_name] = []
# hero pattern location cache
hplc = self.hero_patterns_location_cache[menu_name][hero_name]
for cached_location in hplc:
if cached_location is None:
break
pat = self.get_pattern(menu_name, hero_name)
for img in pat:
if self.window.getScreenshot(cached_location).find_pattern(img) is not None:
return cached_location
location = None
for pat in self.get_pattern(menu_name, hero_name):
location = self.window.getScreenshot().find_pattern(pat)
if location not in hplc and location is not None:
hplc.append(location)
break
return location
def find_hero_region(self, menu_name, hero_name):
pat = self.get_pattern(menu_name, hero_name)
return self.find_pattern_from_list(pat)
def get_last_hero(self, menu_name):
return self.get_sorted_heroes_list(menu_name)[-1]
def get_hero_list(self, menu_name):
path = '/%s/' % (menu_name)
#
hl = self.menus[menu_name]['heroes_list']
if hl is None:
hl = set(
[key.rpartition('/')[2].rpartition('_')[0] for key in self.patterns.keys() if key.startswith(path)])
self.menus[menu_name]['heroes_list'] = hl
return hl
def get_unsorted_hero_list(self, menu_name):
hero_list = self.get_hero_list(menu_name)
sorted_heroes_list = self.get_sorted_heroes_list(menu_name)
return [name for name in hero_list if name not in sorted_heroes_list]
def get_visible_heroes_cached_old(self, menu_name):
vhc = self.menus[menu_name]['visible_heroes_cache']
sp = self.get_scroll_pos(menu_name)
if sp not in vhc.keys():
return None
return vhc[sp]
def cache_visible_heroes_old(self, menu_name: str, hero_name_list):
vhc = self.menus[menu_name]['visible_heroes_cache']
sp = self.get_scroll_pos(menu_name)
if sp not in vhc.keys():
vhc[sp] = set()
# vhc[sp].update(hero_name_list)
vhc[sp] = hero_name_list
def get_visible_heroes_cached(self, menu_name):
vhc = self.menus[menu_name]['visible_heroes_cache']
return vhc
def cache_visible_heroes(self, menu_name: str, hero_name_list):
self.menus[menu_name]['visible_heroes_cache'] = hero_name_list
self.validate_cache_state()
def get_visible_heroes_old(self, menu_name):
self.open_menu(menu_name)
visible_heroes = []
hl = self.get_visible_heroes_cached(menu_name)
loop = 1
while not visible_heroes:
if not hl or loop > 1:
hl = self.get_hero_list(menu_name)
if DEBUG:
print("get_visible_heroes: visible_heroes_cache missed")
for name in hl:
reg = self.find_hero_region(menu_name, name)
if reg:
visible_heroes.append((name, reg[0]))
loop += 1
visible_heroes_names = list(zip(*visible_heroes))[0]
self.cache_visible_heroes(menu_name, visible_heroes_names)
# Sort visible heroes list by y position
return sorted(visible_heroes, key=lambda x: x[1].y)
def get_last_ascend_seen_heroes(self, menu_name):
#
#
# shl=self.get_sorted_heroes_list(menu_name)
# if not shl:
# return None
#
# hl=self.get_hero_list(menu_name)
# if not hl:
# return None
# msh=self.get_max_seen_hero(menu_name)
# if not msh:
# return None
#
# mshi=hl.index(msh)`
# return shl[:hl.index(self.get_max_seen_hero(menu_name))]
return self.menus[menu_name]['last_ascend_seen_heroes']
def add_last_ascend_seen_heroes(self, menu_name, hero_name):
self.menus[menu_name]['last_ascend_seen_heroes'].update(hero_name)
def get_visible_heroes(self, menu_name, number_of_vh=MAX_NUMBER_OF_VISIBLE_HEROES):
self.open_menu(menu_name)
visible_heroes = []
hl = self.get_hero_list(menu_name)
hlc = self.get_visible_heroes_cached(menu_name)
hol = self.get_sorted_heroes_list(menu_name)
check_remain_heroes = True
cache_state = self.get_cache_state()
if hlc:
# if self.cache_state_is_valid():
# return hlc
# Get hero name list from cache
hero_name_cached = list(zip(*hlc))[0]
for name in hero_name_cached:
reg = self.find_hero_region(menu_name, name)
if reg:
visible_heroes.append((name, reg[0]))
if len(visible_heroes) >= number_of_vh:
check_remain_heroes = False
break
visible_heroes = sorted(visible_heroes, key=lambda x: x[1].y)
if visible_heroes and check_remain_heroes:
top_hero_name = visible_heroes[0][0]
bottom_hero_name = visible_heroes[-1][0]
for dir in [(top_hero_name, self.get_prev_hero_name), (bottom_hero_name, self.get_next_hero_name)]:
name = dir[0]
func = dir[1]
can_change_edge = False
if check_remain_heroes:
while 1:
# name_list = self.get_prev_hero_name(menu_name, name)
pass
name = func(menu_name, name)
if not name:
break
pass
for n in name:
reg = self.find_hero_region(menu_name, n)
if reg:
visible_heroes.append((n, reg[0]))
else:
can_change_edge = True
if len(visible_heroes) >= number_of_vh:
check_remain_heroes = False
break
if len(name) > 1 or not check_remain_heroes:
break
if len(name) == 1 and can_change_edge == True:
break
if len(name) == 1:
name = name[0]
# name_list = self.get_prev_hero_name(menu_name, name)
# if name_list and check_remain_heroes:
# for name in name_list:
# reg = self.find_hero_region(menu_name, name)
# if reg:
# visible_heroes.append((name, reg[0]))
# if len(visible_heroes) >= number_of_vh:
# check_remain_heroes = False
# break
# name = bhn
# while 1:
# name_list = self.get_next_hero_name(menu_name, name)
# for name in name_list:
# reg = self.find_hero_region(menu_name, name)
# if reg:
# visible_heroes.append((name, reg[0]))
# if len(visible_heroes) >= number_of_vh:
# check_remain_heroes = False
# break
# if len(name_list)>1:
# break
if not visible_heroes:
name_list = hl
if name_list and check_remain_heroes:
for name in name_list:
reg = self.find_hero_region(menu_name, name)
if reg:
visible_heroes.append((name, reg[0]))
if len(visible_heroes) >= number_of_vh:
check_remain_heroes = False
break
visible_heroes = set(visible_heroes)
visible_heroes = sorted(visible_heroes, key=lambda x: x[1].y)
if visible_heroes:
visible_heroes_names = list(zip(*visible_heroes))[0]
self.add_last_ascend_seen_heroes(menu_name, visible_heroes_names)
self.cache_visible_heroes(menu_name, visible_heroes)
# Sort visible heroes list by y position
# self.add_last_ascend_seen_heroes( menu_name, visible_heroes_names)
return visible_heroes
def set_max_scroll_position(self, menu_name, pos):
self.menus[menu_name]['sb_max_position'] = pos
def set_min_scroll_position(self, menu_name, pos):
self.menus[menu_name]['sb_min_position'] = pos
def get_scroll_pos(self, menu_name):
sp = self.menus[menu_name]['sb_position']
# spx = self.get_scroll_max_pos(menu_name)
# spm = self.get_scroll_min_pos(menu_name)
# if spm and sp < spm:
# return spm
# if spx and sp > spx:
# return spx
return sp
def set_scroll_pos(self, menu_name, sp):
self.menus[menu_name]['sb_position'] = sp
def reindex_heroes_list(self, menu_name, reindex_timer=30):
self.window.makeScreenshotClientAreaRegion()
img = self.window.getScreenshot().get_resized_copy(w=300).get_array()
cv2.imshow('reindex_heroes_list:img', img)
cv2.waitKey(50)
curr_time = time.clock()
if curr_time - self.reindex_heroes_list_time < reindex_timer:
return False
self.reindex_heroes_list_time = curr_time
self.open_menu(menu_name)
if self.get_sorted_heroes_list(menu_name) is None:
dir_list = [WHEEL_UP, WHEEL_DOWN]
else:
dir_list = [WHEEL_DOWN]
# self.scroll_to_last(menu_name)
# dir_list = [WHEEL_UP, WHEEL_DOWN]
# Start scrolling to find location of heroes
for direction in dir_list:
visible_heroes = None
bug_scroll_heroes = None
while True:
# if direction == WHEEL_UP:
# op_dir=WHEEL_DOWN
# else:
# op_dir=WHEEL_UP
#
# self.scroll_menu(menu_name, op_dir)
# self.scroll_menu(menu_name, direction)
prev_vis_heroes = visible_heroes
visible_heroes = self.get_visible_heroes(menu_name)
if not visible_heroes:
return None
# if (visible_heroes and prev_vis_heroes):
# print("reindex_heroes_list: set==set %s" % (set(visible_heroes) == set(prev_vis_heroes)))
if (visible_heroes and prev_vis_heroes) and set(visible_heroes) == set(prev_vis_heroes):
if direction == WHEEL_DOWN:
self.scroll_menu(menu_name, WHEEL_UP)
self.scroll_menu(menu_name, WHEEL_DOWN)
bug_scroll_heroes = self.get_visible_heroes(menu_name)
if bug_scroll_heroes == None:
return None
if set(visible_heroes) != set(bug_scroll_heroes):
continue
if direction == WHEEL_DOWN:
self.set_scroll_pos(menu_name, self.get_scroll_pos(menu_name) - 1)
self.set_max_scroll_position(menu_name, self.get_scroll_pos(menu_name))
else:
self.set_scroll_pos(menu_name, self.get_scroll_pos(menu_name) + 1)
self.set_min_scroll_position(menu_name, self.get_scroll_pos(menu_name))
break
hol = self.menus[menu_name]['sorted_heroes_list']
visible_heroes_names = list(zip(*visible_heroes))[0]
if direction == WHEEL_UP:
# Adding heroes in front of sorted_heroes_list
hol[0:0] = [item for item in visible_heroes_names if item not in hol]
else:
# Adding heroes in the end of of sorted_heroes_list
hol.extend([item for item in visible_heroes_names if item not in hol])
local_max_seen_hero = visible_heroes_names[-1]
global_max_seen_hero = self.get_max_seen_hero(menu_name)
if global_max_seen_hero:
if hol.index(local_max_seen_hero) > hol.index(global_max_seen_hero):
self.set_max_seen_hero(menu_name, local_max_seen_hero)
else:
self.set_max_seen_hero(menu_name, local_max_seen_hero)
# Check if we need to scroll
# if self.find_pattern_from_list(self.get_pattern('main', 'scroll_up')):
self.scroll_menu(menu_name, direction)
# else:
# # Just make screenshot
# self.window.makeScreenshotClientAreaRegion()
self.invalidate_cache_state()
self.validate_cache_state()
# self.save_sorted_heroes_list(menu_name, shl=self.menus[menu_name]['sorted_heroes_list'])
self.save_container(menu_name, 'sorted_heroes_list', self.menus[menu_name]['sorted_heroes_list'])
return True
def get_hero_scroll_position(self, menu_name, hero_name):
return self.menus[menu_name]['sb_position'][hero_name]
def set_hero_scroll_position(self, menu_name, hero_name):
sbp = self.get_scroll_pos(menu_name)
hsbp = self.get_hero_scroll_position(self, menu_name, hero_name)
if hsbp is None:
self.init_hero_scroll_position(menu_name, hero_name)
self.menus[menu_name][hero_name]['sb_position']
self.get_scroll_pos(menu_name)
def scroll_to_hero(self, menu_name, hero_name):
if hero_name is None:
return None
self.open_menu(menu_name)
sorted_heroes_list = self.get_sorted_heroes_list(menu_name)
if sorted_heroes_list is None:
return None
direction = None
while True:
visible_heroes = self.get_visible_heroes(menu_name)
if not visible_heroes:
return None
pass
hero_reg_dict = dict(visible_heroes)
visible_heroes_names = list(zip(*visible_heroes))[0]
top_vh = visible_heroes_names[0]
bottom_vh = visible_heroes_names[-1]
if direction == WHEEL_DOWN or direction is None:
# Adding heroes in the end of of sorted_heroes_list
lst = [name for name in visible_heroes_names if name not in sorted_heroes_list]
sorted_heroes_list.extend(lst)
next_hero_name_list = self.get_next_hero_name(menu_name, hero_name)
if not next_hero_name_list:
return None
if set(next_hero_name_list).issubset(sorted_heroes_list) and hero_name != bottom_vh:
next_hero_name = next_hero_name_list[0]
# We need that next_hero_name is visible also as hero_name
# So lvlup button is between them
if sorted_heroes_list.index(next_hero_name) > sorted_heroes_list.index(bottom_vh):
direction = WHEEL_DOWN
elif sorted_heroes_list.index(hero_name) < sorted_heroes_list.index(top_vh):
direction = WHEEL_UP
if all(h in visible_heroes_names for h in (hero_name, next_hero_name)):
hero_name_reg = hero_reg_dict[hero_name]
next_hero_name_reg = hero_reg_dict[next_hero_name]
hero_reg_height = next_hero_name_reg.y - hero_name_reg.y
hero_reg = Region(0, hero_name_reg.y, hero_name_reg.getRight(), hero_reg_height)
break
else:
# May be we are at the end of heroes list
# So we need that lvlup button is visible below hero name
direction = WHEEL_DOWN
if hero_name in visible_heroes_names:
# button_patterns = self.get_pattern('heroes_button', 'lvlup_')
bottom_patterns = self.get_pattern('heroes_button', 'edge_')
hero_name_reg = hero_reg_dict[hero_name]
hero_reg_height = self.window.getClientRegion().getHeight() - hero_name_reg.y
hero_reg = Region(0, hero_name_reg.y, hero_name_reg.getRight(), hero_reg_height)
# Check that we have lvlup button in butt_reg
if self.find_pattern_reg_name_single(hero_reg, bottom_patterns):
break
# if direction == WHEEL_UP:
# # Adding heroes in front of sorted_heroes_list
# hol[0:0] = [item for item in visible_heroes_names if item not in hol]
# elif direction == WHEEL_UP:
# # Adding heroes in the end of of sorted_heroes_list
# hol.extend([item for item in visible_heroes_names if item not in hol])
if direction:
if direction == WHEEL_DOWN:
self.scroll_menu(menu_name, WHEEL_UP)
self.scroll_menu(menu_name, WHEEL_DOWN)
self.scroll_menu(menu_name, direction)
img = self.window.getScreenshot(hero_reg).get_array()
cv2.imshow('scroll_to_hero:hero_reg', img)
cv2.waitKey(50)
return hero_reg
def get_lvlup_toggle(self):
return 'z'
def needToUpgrade(self):
numberOfSkill = [len(v) for v in self.menus['mainMenu']['skills'].values()]
if numberOfSkill == SKILL_NUMBER:
return False
return True
def get_scroll_min_pos(self, menu_name):
return self.menus[menu_name]['sb_min_position']
def get_scroll_max_pos(self, menu_name):
return self.menus[menu_name]['sb_max_position']
def set_scroll_pos(self, menu_name, pos):
self.menus[menu_name]['sb_position'] = pos
def scroll_pos_inc(self, menu_name, count=1):
self.set_scroll_pos(self, menu_name, self.get_scroll_pos(menu_name) + count)
def scroll_menu(self, menu_name, direction, count=1):
self.open_menu(menu_name)
# if not self.find_pattern_from_list(self.get_pattern('main', 'scroll_')):
# return None
for i in range(count):
# mouse_event = MouseScroll(self.window, direction)
# self.mouse_event_queue.put((mouse_event, self.mp_event))
# self.mp_event.wait()
self.window.scroll(direction)
self.menus[menu_name]['sb_position'] -= direction
time.sleep(0.3)
self.window.makeScreenshotClientAreaRegion()
self.invalidate_cache_state()
def scroll_to_position(self, menu_name, position):
if position is None:
return
cur_pos = self.get_scroll_pos(menu_name)
if position < cur_pos:
direction = WHEEL_UP
elif position > cur_pos:
direction = WHEEL_DOWN
else:
return
self.scroll_menu(menu_name, direction, abs(position - cur_pos))
def scroll_to_start(self, menu_name):
self.scroll_to_position(menu_name, self.get_scroll_min_pos(menu_name))
def scroll_to_last(self, menu_name):
self.scroll_to_position(menu_name, self.get_scroll_max_pos(menu_name))
def findItem(self, item):
if item['pattern']['location'] is None:
self.window.getScreenshot('').find_pattern(item['pattern'])
def nextLvl(self):
self.open_menu('mainMenu')
self.clickItem(self.findItem(self.menus['mainMenu']['nextLvlButton']))
def prevLvl(self):
self.open_menu('mainMenu')
self.clickItem(self.findItem(self.menus['mainMenu']['prevLvlButton']))
def upgradeTopHero(self, offset=0):
self.open_menu('heroesTab')
h = self.findTopHero()
self.lvlUpHero(h)
def get_current_menu(self):
name = None
for menu_name in ['news', 'ancients_summon', 'settings', 'shop', 'heroes', 'ancients', 'relics', 'clan',
'merceneries', 'transcendence']:
# Check that
reg = self.find_pattern_from_list(self.get_pattern('main', menu_name + '_menu_active'))
if reg:
name = menu_name
break
return name
def open_menu(self, menu_name):
cur_menu = self.get_current_menu()
if DEBUG:
print('open_menu: menu name is %s ' % (cur_menu))
if cur_menu == menu_name:
return
self.close_menu(wait=None)
pat_list = self.get_pattern('main', menu_name + '_menu')
reg = self.find_pattern_from_list(pat_list)
if not reg:
return None
self.click_location(reg[0].center())
def getCurrentMenu(self):
return self.currentmenu_name
def close_menu(self, menu_name=None, wait=1):
# self.wait_for_pattern_name(menu_name, 'close_menu')
while 1:
self.wait_for_pattern_list(self.get_pattern('buttons', 'button_close_menu'), wait=wait)
if not self.click_pattern('buttons', 'button_close_menu', all=True):
break
# self.click_pattern(menu_name, 'close_menu', all=False)
def close_popups(self, menu_name):
self.wait_for_pattern_name(menu_name, 'close_menu')
self.click_pattern(menu_name, 'close_menu', all=False)
def wait_for_pattern_name(self, menu_name, pat_name):
pat_list = self.get_pattern(menu_name, pat_name)
return self.wait_for_pattern_list(pat_list)
def wait_for_pattern_list(self, pat_list, wait=1):
delay = 0.05
wait_start = time.clock()
total_delay = 0
while wait is None or wait == -1 or total_delay <= wait:
self.window.makeScreenshotClientAreaRegion()
reg = self.find_pattern_from_list(pat_list)
if reg:
return reg
if wait is None:
return None
time.sleep(delay)
# if time.clock() - wait_start >= wait:
# return None
total_delay += delay
delay *= 2
return None
def click_pattern(self, menu_name, pattern_name, all=False, refresh=True):
if refresh:
self.window.makeScreenshotClientAreaRegion()
patt_list = self.get_pattern(menu_name, pattern_name)
if patt_list:
regs = self.find_pattern_from_list(patt_list, all=all)
if regs:
for reg in regs:
self.click_region(reg)
if not all:
break
if refresh:
self.window.makeScreenshotClientAreaRegion()
return True
return None
def get_monster_click_location(self):
if not self.click_monster_location:
next_lvl_button = self.find_pattern_from_list(self.get_pattern('main', 'levelnext'))
if next_lvl_button:
next_lvl_button = next_lvl_button[0]
else:
return None
prev_lvl_button = self.find_pattern_from_list(self.get_pattern('main', 'levelprev'))
if prev_lvl_button:
prev_lvl_button = prev_lvl_button[0]
else:
return None
skull_hp = self.find_pattern_from_list(self.get_pattern('main', 'skull_hp'))
if skull_hp:
skull_hp = skull_hp[0]
else:
return None
x_n, y_n = next_lvl_button.center().get_xy()
x_p, y_p = prev_lvl_button.center().get_xy()
shop_y = skull_hp.center().get_y()
# click_x is halfway between next and previous level button
click_x = (x_p + x_n) // 2
# click_y is halfway between current level rect and shop button
click_y = (shop_y + y_n) // 2
self.click_monster_location = Location(click_x, click_y)
return self.click_monster_location
# def get_monster_click_location(self):
# if self.click_monster_location is None:
# self.set_monster_click_location()
return self.click_monster_location
def click_monster(self, cps=10):
mcl = self.get_monster_click_location()
if not mcl:
return None
return self.click_location(mcl)
def collect_fish(self, timer=15):
curr_time = time.clock()
if curr_time - self.fish_time >= timer:
self.window.makeScreenshotClientAreaRegion()
self.fish_time = curr_time
return self.click_pattern('main', 'fish')
def collect_newyear(self, timer=30):
curr_time = time.clock()
if curr_time - self.newyear >= timer:
self.newyear = curr_time
return self.click_pattern('main', 'new_year')
return None
def collect_relic_ooze(self):
# if not self.relic_ooze_collected:
self.window.makeScreenshotClientAreaRegion()
if self.find_pattern_from_list(self.get_pattern('main', 'relic_ooze')):
with self.lock:
if self.click_location(self.window.getClientRegion().center(), refresh=True):
self.close_menu('relic_ooze')
self.relic_ooze_collected = True
def lvlup(self):
self.click_pattern('heroes_button', 'lvlup_active')
def ascend(self, ascension_life=3600, check_timer=60, check_progress=True, check_hero_souls=True):
self.window.makeScreenshotClientAreaRegion()
self.click_location(Location(1, 1), refresh=True)
curr_time = time.clock()
if curr_time - self.ascend_checker_time < check_timer:
return None
self.ascend_checker_time = curr_time
if curr_time - self.ascend_time < ascension_life:
return None
if self.got_heroes_souls == False and check_hero_souls:
got_heroes_souls = self.find_pattern_from_list(self.get_pattern('main', 'got_heroes_souls'))
if got_heroes_souls:
self.got_heroes_souls = True
else:
return None
progress_on = self.find_pattern_from_list(self.get_pattern('main', 'button_progression_on'))
if progress_on and check_progress:
return None
# if not self.find_pattern_from_list(self.get_pattern('main', 'button_ascend')):
# if self.get_hero_level('heroes', 'amenhotep') < 200:
# if not self.lvlup_hero('heroes', 'amenhotep', max_level=200):
# return None
cnt = 0
while not self.wait_for_pattern_list(self.get_pattern('main', 'button_ascend'), wait=1) and cnt < 10:
self.lvlup_hero('heroes', 'amenhotep', lvl_count=100)
cnt += 1
destroy_relics_pat = self.get_pattern('main', 'destroy_relics')
wish_to_ascend_pat = self.get_pattern('main', 'wish_to_ascend')
# Refresh screenshot
self.window.makeScreenshotClientAreaRegion()
# if self.find_pattern_from_list(self.get_pattern('main', 'button_ascend')):
if self.wait_for_pattern_list(self.get_pattern('main', 'button_ascend')):
with self.lock:
if self.click_pattern('main', 'button_ascend'):
if self.wait_for_pattern_list(destroy_relics_pat):
self.click_pattern('main', 'button_yes')
if self.wait_for_pattern_list(wish_to_ascend_pat):
if self.click_pattern('main', 'button_yes'):
time.sleep(5)
curr_time = time.clock()
self.menus['heroes']['last_available_hero'] = None
self.menus['heroes']['max_seen_hero'] = None
self.menus['heroes']['visible_heroes_cache'] = None
self.menus['heroes']['hero_level'] = {}
# self.save_heroes_levels('heroes', self.get_heroes_level_dict('heroes'))
self.save_container('heroes', 'hero_level', self.menus['heroes']['hero_level'])
self.starter_clicks = True
self.got_heroes_souls = False
self.ascend_time = curr_time
self.lvlup_all_heroes_time = curr_time
self.click_pattern('main', 'button_progression_off')
self.buy_quick_ascension()
def monster_clicker(self, count=100, cps=30):
for i in range(count):
if not self.click_monster(cps):
break
def collect_gilds(self):
self.window.makeScreenshotClientAreaRegion()
present_reg = self.find_pattern_from_list(self.get_pattern('main', 'transcension_highest_zone_gift'))
if present_reg:
with self.lock:
if self.click_pattern('main', 'transcension_highest_zone_gift'):
transcension_highest_zone_menu = self.get_pattern('main', 'transcension_highest_zone_menu')
if self.wait_for_pattern_list(transcension_highest_zone_menu):
if self.click_location(self.window.getClientRegion().center(), refresh=True):
self.close_menu('main')
def get_np_level(self):
next_lvl_button = self.find_pattern_from_list(self.get_pattern('main', 'levelnext'))
if next_lvl_button:
next_lvl_button = next_lvl_button[0]
else:
return None
prev_lvl_button = self.find_pattern_from_list(self.get_pattern('main', 'levelprev'))
if prev_lvl_button:
prev_lvl_button = prev_lvl_button[0]
else:
return None
x_n, y_n = next_lvl_button.center().get_xy()
x_p, y_p = prev_lvl_button.center().get_xy()
x_curr_level, y_curr_level = ((x_n + x_p) / 2, (y_n + y_p) / 2)
x_next_level = x_curr_level + 0.4 * (x_n - x_curr_level)
y_next_level = y_curr_level
x_prev_level = x_curr_level - 0.4 * (x_curr_level - x_p)
y_prev_level = y_curr_level
return (Location(x_prev_level, y_prev_level), Location(x_next_level, y_next_level))
def next_level(self):
skull_farm = self.find_pattern_from_list(self.get_pattern('main', 'skull_farm'))
if skull_farm:
return
np_level = self.get_np_level()
if np_level:
next_level = self.get_np_level()[1]
else:
return None
if next_level:
self.click_location(next_level)
def prev_level(self):
np_level = self.get_np_level()
if np_level:
prev_level = np_level[0]
else:
return None
self.click_location(prev_level)
def progress_auto(self, farm_mode_timer=300, boss_timer=5):
curr_time = time.clock()
progress_off = self.find_pattern_from_list(self.get_pattern('main', 'button_progression_off'))
progress_on = self.find_pattern_from_list(self.get_pattern('main', 'button_progression_on'))
if progress_on and self.stuck_on_boss(boss_time=boss_timer, check_interval=1):
if self.try_skill_combos('869', '123457', '123'):
time.sleep(30)
self.prev_level()
self.farm_mode_start_time = curr_time
self.click_pattern('main', 'button_progression_off')
return True
if not progress_off:
return False
if progress_off and self.farm_mode_start_time is None:
self.farm_mode_start_time = curr_time
return True
if curr_time - self.farm_mode_start_time >= farm_mode_timer:
self.farm_mode_start_time = None
self.click_pattern('main', 'button_progression_off')
return True
def progress_manual(self, farm_mode_timer=300, boss_timer=10):
curr_time = time.clock()
if self.farm_mode_start_time and curr_time - self.farm_mode_start_time > farm_mode_timer:
self.farm_mode_start_time = None
# return False
if not self.farm_mode_start_time:
self.next_level()
if not self.farm_mode_start_time and self.stuck_on_boss(boss_time=boss_timer, check_interval=1):
if self.try_skill_combos('869', '123457', '123'):
time.sleep(30)
self.prev_level()
self.farm_mode_start_time = curr_time
return True
return False
def stuck_on_boss(self, boss_time, check_interval=5):
curr_time = time.clock()
if self.boss_time and curr_time - self.boss_check_time <= check_interval:
return False
self.boss_check_time = curr_time
boss_clock = self.find_pattern_from_list(self.get_pattern('main', 'boss_clock'))
if not boss_clock:
return False
skull_farm = self.find_pattern_from_list(self.get_pattern('main', 'skull_farm'))
if not skull_farm:
return False
if not self.levels_region:
next_lvl_button = self.find_pattern_from_list(self.get_pattern('main', 'levelnext'))[0]
prev_lvl_button = self.find_pattern_from_list(self.get_pattern('main', 'levelprev'))[0]
x_n, y_n = next_lvl_button.getBottomRight().get_xy()
x_p, y_p = prev_lvl_button.getTopLeft().get_xy()
x_c, y_c = (x_n + x_p) / 2, (y_n + y_p) / 2
h = (y_c - y_p) * 2
y_p = int(y_c - h)
y_n = int(y_c + h)
self.levels_region = Region.from2POINT(x_p, y_p, x_n, y_n)
time.sleep(1.1)
if self.boss_time is None:
self.boss_time = curr_time
self.levels_region_scrshot = self.window.makeScreenshotClientAreaRegion(self.levels_region)
# cv2.imshow('self.levels_region_scrshot', self.levels_region_scrshot.get_array())
# cv2.waitKey(50)
return False
self.boss_check_time = curr_time
if curr_time - self.boss_time >= boss_time:
levels_region_scrshot = self.window.makeScreenshotClientAreaRegion(self.levels_region)
levels_region_scrshot.set_name('123456')
# cv2.imshow('levels_region_scrshot',levels_region_scrshot.get_array())
# cv2.imshow('self.levels_region_scrshot', self.levels_region_scrshot.get_array())
# cv2.waitKey(50)
self.boss_time = None
if levels_region_scrshot.find_pattern(self.levels_region_scrshot):
# cv2.imshow('levels_region_scrshot',levels_region_scrshot.get_array())
# cv2.imshow('self.levels_region_scrshot', self.levels_region_scrshot.get_array())
# cv2.waitKey(50)
return True
return False
def progress_level(self, farm_mode_timer=300, boss_timer=30, progress_button_timer=30):
self.window.makeScreenshotClientAreaRegion()
curr_time = time.clock()
progress_button = None
if self.progress_button_time and curr_time - self.progress_button_time >= progress_button_timer:
progress_button = self.find_pattern_from_list(self.get_pattern('main', 'button_progression'))
if progress_button:
self.progress_button_time = None
else:
self.progress_button_time = curr_time
if progress_button or self.progress_button_time is None:
return self.progress_auto(farm_mode_timer=farm_mode_timer, boss_timer=boss_timer)
return self.progress_manual(farm_mode_timer=farm_mode_timer, boss_timer=boss_timer)
def get_cache_state(self):
return self.cache_state
def invalidate_cache_state(self):
self.cache_state = False
def validate_cache_state(self):
self.cache_state = True
def cache_state_is_invalid(self):
return not self.get_cache_state()
def cache_state_is_valid(self):
return self.get_cache_state()
# def buy_available_upgrades_old(self):
# self.window.makeScreenshotClientAreaRegion()
#
# menu_name = 'heroes'
# max_seen_hero = self.get_max_seen_hero(menu_name)
# if max_seen_hero is None:
# return None
# self.scroll_to_hero(menu_name, max_seen_hero)
# while not self.click_pattern('main', 'buy_available_upgrades_old'):
# self.scroll_menu(menu_name, WHEEL_DOWN)
# self.scroll_menu(menu_name, WHEEL_UP)
# self.scroll_menu(menu_name, WHEEL_DOWN)
def buy_available_upgrades(self, upgrades_timer=300):
curr_time = time.clock()
if curr_time - self.skills_upgrades_time < upgrades_timer:
return None
self.window.makeScreenshotClientAreaRegion()
menu_name = 'heroes'
max_seen_hero = self.get_max_seen_hero(menu_name)
if max_seen_hero is None:
return None
self.scroll_to_hero(menu_name, max_seen_hero)
cnt = 0
MAX_RETRY = 3
while cnt <= MAX_RETRY:
if not self.click_pattern('main', 'buy_available_upgrades'):
self.scroll_menu(menu_name, WHEEL_DOWN)
self.scroll_menu(menu_name, WHEEL_UP)
self.scroll_menu(menu_name, WHEEL_DOWN)
else:
self.skills_upgrades_time = time.clock()
return True
cnt += 1
self.window.makeScreenshotClientAreaRegion()
sorted_hero_list = self.get_sorted_heroes_list(menu_name)
if sorted_hero_list is None:
return None
heroes_upgraded_list = self.menus[menu_name]['heroes_upgraded_list']
# if heroes_upgraded_list is None:
# return None
# heroes_to_lvlup = [hero_name for hero_name in last_ascend_seen_heroes if hero_name not in heroes_upgraded_list]
# Make list from sorted heroes list up to max_seen_hero included.
# heroes_to_lvlup = list(itertools.takewhile(lambda x: x != max_seen_hero, sorted_hero_list))+[max_seen_hero]
heroes_to_lvlup = list(
itertools.takewhile(lambda x: x not in self.get_next_hero_name(menu_name, max_seen_hero), sorted_hero_list))
# Exclude from this list upgraded heroes
heroes_to_lvlup = [hero_name for hero_name in heroes_to_lvlup if hero_name not in heroes_upgraded_list]
for hero_name in heroes_to_lvlup:
###Buy heroes skill except ascension
hero_reg = self.scroll_to_hero(menu_name, hero_name)
hero_reg_scr = self.window.makeScreenshotClientAreaRegion(hero_reg)
ascend_skill_reg = None
if hero_name == 'amenhotep':
ascend_skill_reg = hero_reg_scr.find_pattern_from_list(
self.get_pattern('heroes_skills', 'amenhotep_ascend'),
cache=False)
if ascend_skill_reg:
ascend_skill_reg = ascend_skill_reg[0]
button_edge_reg = hero_reg_scr.find_pattern_from_list(self.get_pattern('heroes_button', 'edge_'),
cache=False)
if not button_edge_reg:
continue
button_edge_reg = button_edge_reg[0]
hero_name_reg = hero_reg_scr.find_pattern_from_list(self.get_pattern(menu_name, hero_name))
if hero_name_reg is None:
continue
hero_name_reg = hero_name_reg[0]
# skills_reg_left_x, skills_reg_left_y = button_edge_reg.center().get_xy()
skills_reg_left_x, skills_reg_left_y = button_edge_reg.getRight(), button_edge_reg.center().get_y()
skills_reg_right_x = hero_name_reg.getRight()
y = hero_reg.getTop() + skills_reg_left_y
for i in range(100):
x = hero_reg.getLeft() + skills_reg_left_x + int(
random.random() * (skills_reg_right_x - skills_reg_left_x))
if ascend_skill_reg and ascend_skill_reg.contains((x - hero_reg.getLeft(), y - hero_reg.getTop())):
continue
# hero_reg_scr = self.window.makeScreenshotClientAreaRegion(hero_reg)
# cv2.imshow("hero_reg_scr", hero_reg_scr.get_array())
# cv2.waitKey(10)
# skills_reg = hero_reg_scr.find_pattern_from_list(self.get_pattern('heroes_skills', '%s_c' % hero_name))
# if skills_reg:
# break
self.window.click(x, y, cps=30)
hero_reg_scr = self.window.makeScreenshotClientAreaRegion(hero_reg)
skills_reg = hero_reg_scr.find_pattern_from_list(self.get_pattern('heroes_skills', '%s_c' % hero_name))
if skills_reg:
# heroes_upgraded_list.remove(hero_name)
heroes_upgraded_list = self.menus[menu_name]['heroes_upgraded_list']
heroes_upgraded_list.append(hero_name)
self.save_container(menu_name, 'heroes_upgraded_list', heroes_upgraded_list)
self.skills_upgrades_time = time.clock()
return True
def buy_quick_ascension(self):
self.window.makeScreenshotClientAreaRegion()
self.close_menu()
with self.window.lock:
if self.click_pattern('main', 'button_shop'):
if self.wait_for_pattern_list(self.get_pattern('shop', 'shop_title')):
self.click_pattern('shop', 'button_buy_quick_ascension')
if self.wait_for_pattern_list(self.get_pattern('shop', 'buy_confirm')):
self.click_pattern('shop', 'button_yes')
if self.wait_for_pattern_list(self.get_pattern('shop', 'title_thank_you')):
# Close all shop submenu
self.click_pattern('shop', 'button_close_menu', all=True)
# self.click_pattern('shop', 'button_okey')
# if self.wait_for_pattern_list(self.get_pattern('shop', 'shop_title')):
else:
if self.wait_for_pattern_list(self.get_pattern('shop', 'title_you_need_more_rubies')):
self.click_pattern('shop', 'button_close_menu', all=True)
# self.click_pattern('shop', 'button_no')
return False
return True
menu_name = 'heroes'
max_seen_hero = self.get_max_seen_hero(menu_name)
if max_seen_hero is None:
return None
self.scroll_to_hero(menu_name, max_seen_hero)
while not self.click_pattern('main', 'buy_available_upgrades'):
self.scroll_menu(menu_name, WHEEL_DOWN)
self.scroll_menu(menu_name, WHEEL_UP)
self.scroll_menu(menu_name, WHEEL_DOWN)
def try_skill_combos(self, *args):
def is_skill_combo_available(skill_combo):
for sn in skill_combo:
if not self.find_pattern_from_list(self.get_pattern('skills', 'skill_%s' % sn)):
if DEBUG:
print("try_skill_combos: skill %s is not ready yet. Try another combo" % sn)
return False
return True
self.window.makeScreenshotClientAreaRegion()
for combo in args:
if is_skill_combo_available(combo):
if DEBUG:
print("try_skill_combos: Combo %s is ready to activate" % combo)
self.window.pressKeyList(combo)
return True
return False
def start_play(self):
if self.click_pattern('buttons', 'button_play'):
if self.click_pattern('buttons', 'button_close_menu', all=True):
return True
return None
class Window:
def __init__(self, hwnd, lock):
self.hwnd = hwnd
self.lock = lock
self.screenshot = None
self.last_click_location = (None, None)
if DEBUG:
print("Window:_init_:hwnd=%s" % (hwnd))
winLong = GetWindowLong(self.hwnd, GWL_STYLE)
# SetWindowLong(hwnd, GWL_STYLE, winLong & ~WS_SIZEBOX)
# # SetWindowLong(self.hwnd, GWL_STYLE, winLong |WS_SYSMENU|WS_CAPTION| WS_MAXIMIZEBOX | WS_MINIMIZEBOX)
# SetWindowLong(self.hwnd, GWL_STYLE, winLong |WS_SYSMENU|WS_CAPTION| ~WS_MAXIMIZEBOX | ~WS_MINIMIZEBOX)
pass
def move(self, x, y):
reg = self.getWindowRegion()
SetWindowPos(self.hwnd,
HWND_TOP,
x,
y,
reg.w,
reg.h,
0)
def resize(self, width, height):
reg = self.getWindowRegion()
SetWindowPos(self.hwnd,
HWND_TOP,
reg.x,
reg.y,
width,
height,
0)
def resizeRel(self, dwidth, dheight):
reg = self.getWindowRegion()
SetWindowPos(self.hwnd,
HWND_TOP,
reg.x,
reg.y,
reg.w + dwidth,
reg.h + dheight,
0)
def resizeCliPropW(self, width):
self.resizeClientArea(width, int(width * 9.0 / 16))
def resizeCliPropH(self, height):
self.resizeClientArea(int(round(height * 16.0 / 9)), height)
def resizeClientArea(self, width, height):
cliReg = self.getClientRegion()
dx = width - cliReg.getWidth()
dy = height - cliReg.getHeight()
self.resizeRel(dx, dy)
def getClientRegion(self):
cliRect = RECT()
GetClientRect(self.hwnd, ctypes.byref(cliRect))
return Region(cliRect.left, cliRect.top, cliRect.right - cliRect.left, cliRect.bottom - cliRect.top)
def getWidth(self):
return self.getClientRegion().getWidth()
def getHeight(self):
return self.getClientRegion().getHeight()
def get_size(self):
return self.getClientRegion().get_size()
def getHeight(self):
return self.getClientRegion().getHeight()
def getWindowRegion(self):
winRect = RECT()
GetWindowRect(self.hwnd, ctypes.byref(winRect))
return Region(winRect.left, winRect.top, winRect.right - winRect.left, winRect.bottom - winRect.top)
def getRegionScreenShot(Region):
return Image
def pressKey(self, char):
with self.lock:
SendMessage(self.hwnd, WM_KEYDOWN, charToKeyCode(char), 1)
# time.sleep(0.1)
SendMessage(self.hwnd, WM_KEYUP, charToKeyCode(char), 1)
def pressAndHoldKey(self, char):
with self.lock:
SendMessage(self.hwnd, WM_KEYDOWN, charToKeyCode(char), 1)
def releaseKey(self, char):
with self.lock:
SendMessage(self.hwnd, WM_KEYUP, charToKeyCode(char), 1)
def pressKeyList(self, chars):
with self.lock:
for c in chars:
self.pressKey(c)
return
def getScreenshot(self, region=None):
if region:
return self.screenshot.crop(region)
return self.screenshot
def getScreenshotCliRegion(self, name):
return self.getClientRegion().getScreenshot()
def makeScreenshotClientAreaRegion(self, region=None):
with self.lock:
isIconic = IsIconic(self.hwnd)
winLong = None
# ShowWindow(self.hwnd, SW_HIDE)
if isIconic:
animationInfo = ANIMATIONINFO()
animationInfo.iMinAnimate = 0
animationInfo.cbSize = ctypes.sizeof(ANIMATIONINFO)
winLong = GetWindowLong(self.hwnd, GWL_EXSTYLE)
SetWindowLong(self.hwnd, GWL_EXSTYLE, winLong | WS_EX_LAYERED)
SetLayeredWindowAttributes(self.hwnd, 0, 0, LWA_ALPHA)
# SystemParametersInfo(SPI_GETANIMATION, animationInfo.cbSize,ctypes.byref(animationInfo), 0)
SystemParametersInfo(SPI_SETANIMATION, animationInfo.cbSize, ctypes.byref(animationInfo),
SPIF_SENDCHANGE)
ShowWindow(self.hwnd, SW_SHOWNOACTIVATE)
wr = RECT()
cliRect = RECT()
GetClientRect(self.hwnd, ctypes.byref(cliRect))
if region is None:
x = 0
y = 0
w = cliRect.right
h = cliRect.bottom
else:
ir = region.intersection(Region.fromRECT(cliRect))
if ir is None:
raise Exception(
'Region ' + str(region) + ' is not intersect with client area rectangle' + str(cliRect))
x = ir.x
y = ir.y
w = ir.w
h = ir.h
# w = cliRect.right
# h = cliRect.bottom
# x = region.get_x()
# y = region.get_y()
# w = region.getWidth()
# h = region.getHeight()
hDC = GetDC(self.hwnd)
myDC = CreateCompatibleDC(hDC)
myBitMap = CreateCompatibleBitmap(hDC, w, h)
SelectObject(myDC, myBitMap)
BitBlt(myDC, 0, 0, w, h, hDC, x, y, SRCCOPY)
if isIconic:
ShowWindow(self.hwnd, SW_SHOWMINNOACTIVE)
SetWindowLong(self.hwnd, GWL_EXSTYLE, winLong)
# SystemParametersInfo(SPI_GETANIMATION, animationInfo.cbSize,ctypes.byref(animationInfo), 0)
animationInfo = ANIMATIONINFO()
animationInfo.iMinAnimate = 1
animationInfo.cbSize = ctypes.sizeof(ANIMATIONINFO)
SystemParametersInfo(SPI_SETANIMATION, animationInfo.cbSize, ctypes.byref(animationInfo),
SPIF_SENDCHANGE)
bmpScreen = BITMAP()
GetObject(myBitMap, ctypes.sizeof(BITMAP), ctypes.byref(bmpScreen))
bi = BITMAPINFOHEADER()
bi.biSize = ctypes.sizeof(BITMAPINFOHEADER)
bi.biWidth = bmpScreen.bmWidth
bi.biHeight = bmpScreen.bmHeight
bi.biPlanes = 1
bi.biBitCount = bmpScreen.bmBitsPixel
bi.biCompression = BI_RGB
bi.biSizeImage = 0
bi.biXPelsPerMeter = 0
bi.biYPelsPerMeter = 0
bi.biClrUsed = 0
bi.biClrImportant = 0
img = np.empty((h, w, int(bmpScreen.bmBitsPixel / 8)), np.uint8)
winplace = WINDOWPLACEMENT()
GetWindowPlacement(self.hwnd, ctypes.byref(winplace))
wr = winplace.rcNormalPosition
if (GetDIBits(hDC, myBitMap, 0,
bmpScreen.bmHeight,
ctypes.c_void_p(img.ctypes.data),
ctypes.byref(bi), DIB_RGB_COLORS) == 0):
print ("makeScreenshotClientAreaRegion: GetDIBits = 0 ")
return None
DeleteDC(myDC)
DeleteObject(myBitMap)
ReleaseDC(self.hwnd, hDC)
screenshot = Image.fromArray(cv2.flip(img, 0))
screenshot.set_name('Screenshot of %s %s' % (self.hwnd, id(screenshot)))
if region is None:
self.screenshot = screenshot
return screenshot
def scroll(self, direction, x=1, y=1):
with self.lock:
tmp = (y << 16) | x
time1 = time.clock()
err = 0
err += SendMessage(self.hwnd, WM_MOUSEWHEEL,
(WHEEL_DELTA * direction) << 16, tmp)
time.sleep(0.02)
x = 1
y = 1
tmp = (y << 16) | x
err += SendMessage(self.hwnd, WM_MOUSEMOVE, 0, tmp)
time2 = time.clock()
if time2 - time1 > 1 or err > 0:
print("scroll: got delay > 1 sec %s err %s" % (time2 - time1, err))
def scrollDown(self):
self.scrollDown(1, 1)
def scrollUp(self):
self.scrollUp(1, 1)
def scrollDown(self, x, y):
tmp = (y << 16) | x
SendMessage(self.hwnd, WM_MOUSEWHEEL,
(WHEEL_DELTA * -1) << 16, tmp)
def scrollUp(self, x, y):
tmp = (y << 16) | x
SendMessage(self.hwnd, WM_MOUSEWHEEL,
(WHEEL_DELTA * 1) << 16, tmp)
def click(self, x, y, refresh=False, park=True, cps=30):
x = int(x)
y = int(y)
self.last_click_location = (x, y)
tmp = (y << 16) | x
delay = 1 / cps
# if park:
# delay /= 2
with self.lock:
err = 0
time1 = time.clock()
err += SendMessage(self.hwnd, WM_LBUTTONDOWN, 0, tmp)
err += SendMessage(self.hwnd, WM_LBUTTONUP, 0, tmp)
time2 = time.clock()
if time2 - time1 > 1 or err > 0:
print("scroll: got delay > 1 sec %s err %s" % (time2 - time1, err))
time.sleep(delay)
if park:
x = 1
y = 1
tmp = (y << 16) | x
err += SendMessage(self.hwnd, WM_MOUSEMOVE, 0, tmp)
# time.sleep(delay / 2)
if refresh:
self.makeScreenshotClientAreaRegion()
if err > 0:
return None
return True
def move_mouse(self, x=None, y=None, refresh=False, park=True, cps=30):
l_x, l_y = self.last_click_location
xc, yc = l_x, l_y
steps = 30
if x:
dx = (x - l_x) / steps
else:
dx = 0
if y:
dy = (y - l_y) / steps
else:
dy = 0
for i in range(steps):
xc += dx
yc += dy
xi, yi = int(xc), int(yc)
tmp = (yi << 16) | xi
delay = 1 / cps
err = 0
with self.lock:
err += SendMessage(self.hwnd, WM_MOUSEMOVE, 0, tmp)
time.sleep(delay)
if err > 0:
return None
return True
def click_location(self, loc, refresh=False, park=True, cps=50):
return self.click(loc.get_x(), loc.get_y(), refresh=refresh, park=park, cps=cps)
def click_region(self, reg, refresh=False, park=True, cps=30):
x, y = reg.center().get_xy()
return self.click(x, y, refresh=refresh, park=park, cps=cps)
class Location:
def __init__(self, x, y):
self.x = x
self.y = y
def get_x(self):
return self.x
def get_y(self):
return self.y
def set(self, x, y):
self.x = x
self.y = y
def get_xy(self):
return (self.x, self.y)
class Region:
def __init__(self, x, y, w, h):
self.x = x
self.y = y
self.w = w
self.h = h
# def get_xy(self):
# self.hwnd = hwnd
# r = RECT()
# GetWindowRect(hwnd, ctypes.byref(r))
# (self.x, self.y, self.w, self.h) = (r.left, r.top, r.right - r.left, r.bottom - r.top)
@classmethod
def fromRECT(cls, rect):
return cls(rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top)
@classmethod
def from2POINT(cls, left, top, right, bottom):
return cls(left, top, right - left, bottom - top)
@classmethod
def from2Location(cls, l1, l2):
x1, y1 = l1.get_xy()
x2, y2 = l2.get_xy()
w = x2 - x1
h = y2 - y1
return cls(x1, y1, w, h)
def getTopLeft(self):
return Location(self.getLeft(), self.getTop())
def getTopRight(self):
return Location(self.getRight(), self.getTop())
# def __eq__(self, other):
# if isinstance(other, Region):
# return self.is_intersect(other)
# return NotImplemented
#
# def __hash__(self):
# # return hash((self.x,self.y,self.w,self.h))
# return 1
@property
def getBottomLeft(self):
return Location(self.getLeft(), self.getbottom())
def getBottomRight(self):
return Location(self.getRight(), self.getBottom())
def resize(self, x, y, w, h):
return 1
def getX(self):
return self.x
def getY(self):
return self.y
def getWidth(self):
return self.w
def getHeight(self):
return self.h
def get_size(self):
return self.w, self.h
def getLeft(self):
return self.x
def getRight(self):
return self.x + self.w
def getTop(self):
return self.y
def getBottom(self):
return self.y + self.h
def setLeft(self, left):
self.x = left
def setRight(self, right):
self.w = right - self.x
def setTop(self, top):
self.y = top
def setBottom(self, bottom):
self.h = bottom - self.y
def center(self):
return Location(int(self.x + self.w / 2), int(self.y + self.h / 2))
###################################
#
# t1
# |----------------|
# l1| |r1
# | |
# | maxt | t2
# | |-------|------|
# | maxl|#######|minr |
# |--------|-------| |
# b1 | minb |
# | |
# l2| |r2
# | |
# |--------------|
# b2
#
###################################
def intersection(self, region):
t1 = self.getTop()
b1 = self.getBottom()
l1 = self.getLeft()
r1 = self.getRight()
t2 = region.getTop()
b2 = region.getBottom()
l2 = region.getLeft()
r2 = region.getRight()
maxt = max(t1, t2)
minb = min(b1, b2)
maxl = max(l1, l2)
minr = min(r1, r2)
if not (maxt < minb and maxl < minr):
return None
return Region(maxl, maxt, minr - maxl, minb - maxt)
def is_intersect(self, region):
t1 = self.getTop()
b1 = self.getBottom()
l1 = self.getLeft()
r1 = self.getRight()
t2 = region.getTop()
b2 = region.getBottom()
l2 = region.getLeft()
r2 = region.getRight()
maxt = max(t1, t2)
minb = min(b1, b2)
maxl = max(l1, l2)
minr = min(r1, r2)
if (maxt > minb or maxl > minr):
return False
return True
def contains(self, loc):
return self.getLeft() <= loc[0] <= self.getRight() and self.getTop() <= loc[1] <= self.getBottom()
def __add__(self, x):
return Region(self.getX() + x[0], self.getY() + x[1], self.getWidth(), self.getHeight())
def get_collectables(click_lock, start_barrier):
ch = ClickerHeroes(click_lock)
print("get_collectables: Started")
ch.start_play()
start_barrier.wait()
while True:
# try:
ch.collect_fish()
ch.collect_gilds()
if ch.collect_newyear():
ch.monster_clicker(count=750)
ch.collect_relic_ooze()
ch.monster_clicker()
# except Exception as e:
# print("get_collectables:Exception:%s" % repr(e))
# continue
def levelup_heroes(click_lock, start_barrier):
start_barrier.wait()
print("levelup_heroes: Started")
ch = ClickerHeroes(click_lock)
i=0
while True:
# try:
time.sleep(10)
i+=1
# time1=time.clock()
ch.window.makeScreenshotClientAreaRegion()
cv2.imwrite('D:\\tmp\\scr\\scr_%d.png'% i, ch.window.getScreenshot().get_array(),[cv2.IMWRITE_PNG_COMPRESSION,9])
# cv2.imshow('Test screenshot', ch.window.getScreenshot().get_array())
# cv2.waitKey(10)
# time.sleep(10)
# time2 = time.clock()
# print("%d time2-time1= %s" % (i,time2-time1))
# ch.buy_quick_ascension()
ch.reindex_heroes_list('heroes')
# if ch.lvlup_all_heroes('heroes', max_level=150, timer=600):
# continue
ch.lvlup_top_heroes('heroes')
# ch.buy_quick_ascension()
# ch.lvlup_all_heroes('heroes', timer=1800)
# ch.buy_available_upgrades(upgrades_timer=1800)
# ch.ascend(ascension_life=7200, check_timer=30, check_progress=False)
# except Exception as e:
# print("levelup_heroes:Exception:%s" % repr(e))
# continue
def progress_levels(click_lock, start_barrier):
start_barrier.wait()
print("progress_levels: Started")
ch = ClickerHeroes(click_lock)
while True:
# try:
# img = ch.window.getScreenshot().get_canny_array()
# cv2.imshow('Clicker Heroes', img)
ch.progress_level(farm_mode_timer=180, boss_timer=30)
# ch.try_skill_combos('12345')
# ch.try_skill_combos('869', '123457','123')
# except Exception as e:
# print("progress_levels:Exception:%s" % repr(e))
# continue
if __name__ == '__main__':
c_lock = multiprocessing.RLock()
start_condition = multiprocessing.Condition()
mp_target = [progress_levels, get_collectables, levelup_heroes]
# mp_target = [progress_levels]
# mp_target = [levelup_heroes]
start_barrier = multiprocessing.Barrier(len(mp_target))
proc = []
for target in mp_target:
proc.append(Process(target=target, args=(c_lock, start_barrier,)))
for p in proc:
p.start()
ch = ClickerHeroes(c_lock)
while True:
time.sleep(1)
if DEBUG:
print("Bot is running")
| mit | -3,060,479,858,056,614,000 | 37.725935 | 129 | 0.55233 | false | 3.495238 | false | false | false |
robwilkerson/BitBucket-api | bitbucket/repository.py | 1 | 6817 | # -*- coding: utf-8 -*-
import json
from tempfile import NamedTemporaryFile
from zipfile import ZipFile
from pprint import pprint
URLS = {
'CREATE_REPO': 'repositories/',
'CREATE_REPO_V2': 'repositories/%(username)s/%(repo_slug)s/',
'GET_REPO': 'repositories/%(username)s/%(repo_slug)s/',
'UPDATE_REPO': 'repositories/%(username)s/%(repo_slug)s/',
'DELETE_REPO': 'repositories/%(username)s/%(repo_slug)s/',
'GET_USER_REPOS': 'user/repositories/',
# Get archive
'GET_ARCHIVE': 'repositories/%(username)s/%(repo_slug)s/%(format)s/master/',
}
class Repository(object):
""" This class provide repository-related methods to Bitbucket objects."""
def __init__(self, bitbucket):
self.bitbucket = bitbucket
self.bitbucket.URLS.update(URLS)
def _get_files_in_dir(self, repo_slug=None, owner=None, dir='/'):
repo_slug = repo_slug or self.bitbucket.repo_slug or ''
owner = owner or self.bitbucket.username
dir = dir.lstrip('/')
url = self.bitbucket.url(
'GET_ARCHIVE',
username=owner,
repo_slug=repo_slug,
format='src')
dir_url = url + dir
response = self.bitbucket.dispatch('GET', dir_url, auth=self.bitbucket.auth)
if response[0] and isinstance(response[1], dict):
repo_tree = response[1]
url = self.bitbucket.url(
'GET_ARCHIVE',
username=owner,
repo_slug=repo_slug,
format='raw')
# Download all files in dir
for file in repo_tree['files']:
file_url = url + '/'.join((file['path'],))
response = self.bitbucket.dispatch('GET', file_url, auth=self.bitbucket.auth)
self.bitbucket.repo_tree[file['path']] = response[1]
# recursively download in dirs
for directory in repo_tree['directories']:
dir_path = '/'.join((dir, directory))
self._get_files_in_dir(repo_slug=repo_slug, owner=owner, dir=dir_path)
def public(self, username=None):
""" Returns all public repositories from an user.
If username is not defined, tries to return own public repos.
"""
username = username or self.bitbucket.username or ''
url = self.bitbucket.url('GET_USER', username=username)
response = self.bitbucket.dispatch('GET', url)
try:
return (response[0], response[1]['repositories'])
except TypeError:
pass
return response
def all(self, owner=None):
""" Return all repositories owned by a given owner """
owner = owner or self.bitbucket.username
url = self.bitbucket.url('GET_USER', username=owner)
response = self.bitbucket.dispatch('GET', url, auth=self.bitbucket.auth)
try:
return (response[0], response[1]['repositories'])
except TypeError:
pass
return response
def team(self, include_owned=True):
"""Return all repositories for which the authenticated user is part of
the team.
If include_owned is True (default), repos owned by the user are
included (and therefore is a superset of the repos returned by
all().
If include_owned is False, repositories only repositories
owned by other users are returned.
"""
url = self.bitbucket.url('GET_USER_REPOS')
status, repos = self.bitbucket.dispatch('GET', url, auth=self.bitbucket.auth)
if status and not include_owned:
return status,[r for r in repos if r['owner'] != self.bitbucket.username]
return status, repos
def get(self, repo_slug=None, owner=None):
""" Get a single repository on Bitbucket and return it."""
repo_slug = repo_slug or self.bitbucket.repo_slug or ''
owner = owner or self.bitbucket.username
url = self.bitbucket.url('GET_REPO', username=owner, repo_slug=repo_slug)
return self.bitbucket.dispatch('GET', url, auth=self.bitbucket.auth)
def create(self, repo_name=None, repo_slug=None, owner=None, scm='git', private=True, **kwargs):
""" Creates a new repository on a Bitbucket account and return it."""
repo_slug = repo_slug or self.bitbucket.repo_slug or ''
if owner:
url = self.bitbucket.url_v2('CREATE_REPO_V2', username=owner, repo_slug=repo_slug)
else:
owner = self.bitbucket.username
url = self.bitbucket.url('CREATE_REPO')
return self.bitbucket.dispatch('POST', url, auth=self.bitbucket.auth, name=repo_name, scm=scm, is_private=private, **kwargs)
def update(self, repo_slug=None, owner=None, **kwargs):
""" Updates repository on a Bitbucket account and return it."""
repo_slug = repo_slug or self.bitbucket.repo_slug or ''
owner = owner or self.bitbucket.username
url = self.bitbucket.url('UPDATE_REPO', username=owner, repo_slug=repo_slug)
return self.bitbucket.dispatch('PUT', url, auth=self.bitbucket.auth, **kwargs)
def delete(self, repo_slug=None, owner=None):
""" Delete a repository on own Bitbucket account.
Please use with caution as there is NO confimation and NO undo.
"""
repo_slug = repo_slug or self.bitbucket.repo_slug or ''
owner = owner or self.bitbucket.username
url = self.bitbucket.url_v2('DELETE_REPO', username=owner, repo_slug=repo_slug)
return self.bitbucket.dispatch('DELETE', url, auth=self.bitbucket.auth)
def archive(self, repo_slug=None, owner=None, format='zip', prefix=''):
""" Get one of your repositories and compress it as an archive.
Return the path of the archive.
format parameter is curently not supported.
"""
owner = owner or self.bitbucket.username
prefix = '%s'.lstrip('/') % prefix
self._get_files_in_dir(repo_slug=repo_slug, owner=owner, dir='/')
if self.bitbucket.repo_tree:
with NamedTemporaryFile(delete=False) as archive:
with ZipFile(archive, 'w') as zip_archive:
for name, f in self.bitbucket.repo_tree.items():
with NamedTemporaryFile(delete=False) as temp_file:
if isinstance(f, dict):
f = json.dumps(f)
try:
temp_file.write(f.encode('utf-8'))
except UnicodeDecodeError:
temp_file.write(f)
zip_archive.write(temp_file.name, prefix + name)
return (True, archive.name)
return (False, 'Could not archive your project.')
| isc | 1,174,813,250,221,191,000 | 42.698718 | 132 | 0.598944 | false | 4.177083 | false | false | false |
ping/youtube-dl | youtube_dl/extractor/unity.py | 30 | 1228 | from __future__ import unicode_literals
from .common import InfoExtractor
from .youtube import YoutubeIE
class UnityIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?unity3d\.com/learn/tutorials/(?:[^/]+/)*(?P<id>[^/?#&]+)'
_TESTS = [{
'url': 'https://unity3d.com/learn/tutorials/topics/animation/animate-anything-mecanim',
'info_dict': {
'id': 'jWuNtik0C8E',
'ext': 'mp4',
'title': 'Live Training 22nd September 2014 - Animate Anything',
'description': 'md5:e54913114bd45a554c56cdde7669636e',
'duration': 2893,
'uploader': 'Unity',
'uploader_id': 'Unity3D',
'upload_date': '20140926',
}
}, {
'url': 'https://unity3d.com/learn/tutorials/projects/2d-ufo-tutorial/following-player-camera?playlist=25844',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
youtube_id = self._search_regex(
r'data-video-id="([_0-9a-zA-Z-]+)"',
webpage, 'youtube ID')
return self.url_result(youtube_id, ie=YoutubeIE.ie_key(), video_id=video_id)
| unlicense | -41,882,844,988,352,350 | 37.375 | 117 | 0.576547 | false | 3.257294 | false | false | false |
sparkslabs/kamaelia_ | Sketches/DK/Kamaelia-Paint/App/ToolBox.py | 3 | 5397 | # -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pygame
import Axon
from Axon.Ipc import producerFinished, WaitComplete
from Kamaelia.UI.Pygame.Display import PygameDisplay
from Kamaelia.UI.Pygame.Button import Button
from ColourSelector import ColourSelector
from Slider import Slider
class ToolBox(Axon.Component.component):
Inboxes = {"inbox" : "Receive events from Pygame Display",
"control" : "For shutdown messages",
"callback" : "Receive callbacks from Pygame Display",
"buttons" : "Recieve interrupts from the buttons"
}
Outboxes = {"outbox" : "XY positions emitted here",
"signal" : "For shutdown messages",
"display_signal" : "Outbox used for communicating to the display surface"
}
def __init__(self, position=None, size=(500,500)):
"""x.__init__(...) initializes x; see x.__class__.__doc__ for signature"""
super(ToolBox,self).__init__()
self.size = size
self.dispRequest = { "DISPLAYREQUEST" : True,
"callback" : (self,"callback"),
"events" : (self, "inbox"),
"size": self.size,
"transparency" : None }
if not position is None:
self.dispRequest["position"] = position
def waitBox(self,boxname):
"""Generator. yields 1 until data ready on the named inbox."""
waiting = True
while waiting:
if self.dataReady(boxname): return
else: yield 1
def main(self):
"""Main loop."""
displayservice = PygameDisplay.getDisplayService()
self.link((self,"display_signal"), displayservice)
self.send( self.dispRequest,
"display_signal")
for _ in self.waitBox("callback"): yield 1
self.display = self.recv("callback")
# tool buttons
circleb = Button(caption="Circle",position=(10,10), msg = (("Tool", "Circle"),)).activate()
eraseb = Button(caption="Eraser",position=(100,10), msg = (("Tool", "Eraser"),)).activate()
lineb = Button(caption="Line",position=(10,50), msg = (("Tool", "Line"),)).activate()
bucketb = Button(caption="Bucket",position=(10,90), msg = (("Tool", "Bucket"),)).activate()
eyeb = Button(caption="Eyedropper",position=(10,130), msg = (("Tool", "Eyedropper"),)).activate()
addlayerb = Button(caption="Add Layer",position=(10,540), msg = (("Layer", "Add"),)).activate()
prevlayerb = Button(caption="<-",position=(80,540), msg = (("Layer", "Prev"),)).activate()
nextlayerb = Button(caption="->",position=(110,540), msg = (("Layer", "Next"),)).activate()
dellayerb = Button(caption="Delete",position=(140,540), msg = (("Layer", "Delete"),)).activate()
self.link( (circleb,"outbox"), (self,"outbox"), passthrough = 2 )
self.link( (eraseb,"outbox"), (self,"outbox"), passthrough = 2 )
self.link( (lineb,"outbox"), (self,"outbox"), passthrough = 2 )
self.link( (bucketb,"outbox"), (self,"outbox"), passthrough = 2 )
self.link( (eyeb,"outbox"), (self,"outbox"), passthrough = 2 )
self.link( (addlayerb,"outbox"), (self,"outbox"), passthrough = 2 )
self.link( (prevlayerb,"outbox"), (self,"outbox"), passthrough = 2 )
self.link( (nextlayerb,"outbox"), (self,"outbox"), passthrough = 2 )
self.link( (dellayerb,"outbox"), (self,"outbox"), passthrough = 2 )
colSel = ColourSelector(position = (10,170), size = (255,255)).activate()
self.link( (colSel,"outbox"), (self,"outbox"), passthrough = 2 )
SizeSlider = Slider(size=(255, 50), messagePrefix = "Size", position = (10, 460), default = 9).activate()
self.link( (SizeSlider,"outbox"), (self,"outbox"), passthrough = 2 )
AlphaSlider = Slider(size=(255, 10), messagePrefix = "Alpha", position = (10, 515), default = 255).activate()
self.link( (AlphaSlider,"outbox"), (self,"outbox"), passthrough = 2 )
self.drawBG()
done = False
while not done:
if not self.anyReady():
self.pause()
yield 1
def drawBG(self):
self.display.fill( (255,255,255) )
if __name__ == "__main__":
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import ConsoleEchoer
Pipeline(ToolBox(size = (275,600)), ConsoleEchoer()).run()
# ToolBox(size = (275,600)).activate()
Axon.Scheduler.scheduler.run.runThreads()
| apache-2.0 | 1,796,602,243,262,130,200 | 48.972222 | 117 | 0.602001 | false | 3.868817 | false | false | false |
Ohmnivore/HeadRush | modexample/hasher.py | 1 | 1213 | #!/usr/bin/python
import sys
import os
import hashlib
import json
path = ""
towrite = {}
hash = hashlib.md5()
def PrintHelp():
print """\nHasher v0.1 is an utility that produces a .txt of MD5 checksums in a folder.
Simply run in a terminal and specify the folder's path as an argument. The .txt will be created
or updated if already existent in the above-mentioned folder."""
def hashfile(afile, hasher, blocksize=65536):
buf = afile.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(blocksize)
print " ->" + hasher.hexdigest()
print ""
return str(hasher.hexdigest())
def Hash(p):
print ""
for root, dirs, files in os.walk(p):
for file in files:
if file != "hashes.txt":
print file
filex = open(os.path.join(p, file))
# file.read()
towrite[file] = str(hashfile(filex, hash))
json.dump(towrite, open(os.path.join(p, 'hashes.txt'), 'w'))
if len(sys.argv) <= 1:
print "\nTry -h/help arguments for help."
else:
if sys.argv[1] == "-h" or sys.argv[1] == "help":
PrintHelp()
else:
path = sys.argv[1]
Hash(path) | gpl-3.0 | -8,781,004,111,173,241,000 | 24.829787 | 95 | 0.591921 | false | 3.350829 | false | false | false |
Ezeer/VegaStrike_win32FR | vegastrike/boost/1_28/src/gen_function.py | 2 | 8743 | r"""
>>> template = ''' template <class T%(, class A%+%)>
... static PyObject* call( %1(T::*pmf)(%(A%+%:, %))%2, PyObject* args, PyObject* ) {
... PyObject* self;
... %( PyObject* a%+;
... %) if (!PyArg_ParseTuple(args, const_cast<char*>("O%(O%)"), &self%(, &a%+%)))
... return 0;
... T& target = from_python(self, type<T&>());
... %3to_python((target.*pmf)(%(
... from_python(a%+, type<A%+>())%:,%)
... ));%4
... }'''
>>> print gen_function(template, 0, 'R ', '', 'return ', '')
template <class T>
static PyObject* call( R (T::*pmf)(), PyObject* args, PyObject* ) {
PyObject* self;
if (!PyArg_ParseTuple(args, const_cast<char*>("O"), &self))
return 0;
T& target = from_python(self, type<T&>());
return to_python((target.*pmf)(
));
}
>>> print gen_function(template, 2, 'R ', '', 'return ', '')
template <class T, class A1, class A2>
static PyObject* call( R (T::*pmf)(A1, A2), PyObject* args, PyObject* ) {
PyObject* self;
PyObject* a1;
PyObject* a2;
if (!PyArg_ParseTuple(args, const_cast<char*>("OOO"), &self, &a1, &a2))
return 0;
T& target = from_python(self, type<T&>());
return to_python((target.*pmf)(
from_python(a1, type<A1>()),
from_python(a2, type<A2>())
));
}
>>> print gen_function(template, 3, 'void ', ' const', '', '\n'+8*' ' + 'return none();')
template <class T, class A1, class A2, class A3>
static PyObject* call( void (T::*pmf)(A1, A2, A3) const, PyObject* args, PyObject* ) {
PyObject* self;
PyObject* a1;
PyObject* a2;
PyObject* a3;
if (!PyArg_ParseTuple(args, const_cast<char*>("OOOO"), &self, &a1, &a2, &a3))
return 0;
T& target = from_python(self, type<T&>());
to_python((target.*pmf)(
from_python(a1, type<A1>()),
from_python(a2, type<A2>()),
from_python(a3, type<A3>())
));
return none();
}
"""
import string
def _find(s, sub, start=0, end=None):
"""Just like string.find, except it returns end or len(s) when not found.
"""
if end == None:
end = len(s)
pos = string.find(s, sub, start, end)
if pos < 0:
return end
else:
return pos
def _raise_no_argument(key, n, args):
raise IndexError(str(key) + " extra arg(s) not passed to gen_function")
def _gen_common_key(key, n, args, fill = _raise_no_argument):
# import sys
# print >> sys.stderr, "_gen_common_key(", repr(key), ",", repr(n), ',', repr(args), ',', fill, ')'
# sys.stderr.flush()
if len(key) > 0 and key in '123456789':
index = int(key) - 1;
if index >= len(args):
return fill(key, n, args)
arg = args[index]
if callable(arg):
return str(arg(key, n, args))
else:
return str(arg)
elif key in ('x','n','-','+'):
return str(n + {'-':-1,'+':+1,'x':0,'n':0}[key])
else:
return key
def _gen_arg(template, n, args, fill = _raise_no_argument):
result = ''
i = 0
while i < len(template): # until the template is consumed
# consume everything up to the first '%'
delimiter_pos = _find(template, '%', i)
result = result + template[i:delimiter_pos]
# The start position of whatever comes after the '%'+key
start = delimiter_pos + 2
key = template[start - 1 : start] # the key character. If there were no
# '%'s left, key will be empty
if 0 and key == 'n':
result = result + `n`
else:
result = result + _gen_common_key(key, n, args, fill)
i = start
return result
def gen_function(template, n, *args, **keywords):
r"""gen_function(template, n, [args...] ) -> string
Generate a function declaration based on the given template.
Sections of the template between '%(', '%)' pairs are repeated n times. If '%:'
appears in the middle, it denotes the beginning of a '%'.
Sections of the template between '%{', '%}' pairs are ommitted if n == 0.
%n is transformed into the string representation of 1..n for each
repetition within %(...%). Elsewhere, %n is transformed into the
string representation of n
%- is transformed into the string representation of 0..n-1 for
each repetition within %(...%). Elsewhere, %- is transformed into the
string representation of n-1.
%+ is transformed into the string representation of 2..n+1 for
each repetition within %(...%). Elsewhere, %- is transformed into the
string representation of n+1.
%x is always transformed into the string representation of n
%z, where z is a digit, selects the corresponding additional
argument. If that argument is callable, it is called with three
arguments:
key - the string representation of 'z'
n - the iteration number
args - a tuple consisting of all the additional arguments to
this function
otherwise, the selected argument is converted to a string representation
for example,
>>> gen_function('%1 abc%x(%(int a%n%:, %));%{ // all args are ints%}', 2, 'void')
'void abc2(int a0, int a1); // all args are ints'
>>> gen_function('%1 abc(%(int a%n%:, %));%{ // all args are ints%}', 0, 'x')
'x abc();'
>>> gen_function('%1 abc(%(int a%n%:, %));%{ // all args are ints%}', 0, lambda key, n, args: 'abcd'[n])
'a abc();'
>>> gen_function('%2 %1 abc(%(int a%n%:, %));%{ // all args are ints%}', 0, 'x', fill = lambda key, n, args: 'const')
'const x abc();'
>>> gen_function('abc%[k%:v%]', 0, fill = lambda key, n, args, value = None: '<' + key + ',' + value + '>')
'abc<k,v>'
"""
expand = (lambda s, n = n:
apply(gen_function, (s, n) + args, keywords))
fill = keywords.get('fill', _raise_no_argument);
result = ''
i = 0
while i < len(template): # until the template is consumed
# consume everything up to the first '%'
delimiter_pos = _find(template, '%', i)
result = result + template[i:delimiter_pos]
# The start position of whatever comes after the '%'+key
start = delimiter_pos + 2
key = template[start - 1 : start] # the key character. If there were no
# '%'s left, key will be empty
pairs = { '(':')', '{':'}', '[':']' }
if key in pairs.keys():
end = string.find(template, '%' + pairs[key], start)
assert end >= 0, "Matching '" + '%' + pairs[key] +"' not found!"
delimiter_pos = end
if key == '{':
if n > 0:
result = result + expand(template[start:end])
else:
separator_pos = _find(template, '%:', start, end)
remainder = template[separator_pos+2 : end]
if key == '(':
for x in range(n):
iteration = expand(
template[start:separator_pos], x)
result = result + expand(iteration, x)
if x != n - 1:
result = result + expand(remainder, x)
else:
function_result = fill(
template[start:separator_pos], n, args, value = remainder)
result = result + expand(function_result)
else:
result = result + expand(_gen_common_key(key, n, args, fill))
i = delimiter_pos + 2
return result
def gen_functions(template, n, *args, **keywords):
r"""gen_functions(template, n, [args...]) -> string
Call gen_function repeatedly with from 0..n and the given optional
arguments.
>>> print gen_functions('%1 abc(%(int a%n%:, %));%{ // all args are ints%}\n', 2, 'void'),
void abc();
void abc(int a0); // all args are ints
void abc(int a0, int a1); // all args are ints
"""
fill = keywords.get('fill', _raise_no_argument);
result = ''
for x in range(n + 1):
result = result + apply(gen_function, (template, x) + args, keywords)
return result
if __name__ == '__main__':
import doctest
import sys
doctest.testmod(sys.modules.get(__name__))
| mit | 355,258,400,327,322,940 | 34.979424 | 121 | 0.510008 | false | 3.707803 | false | false | false |
40huo/cobra | cobra/cli.py | 2 | 4483 | # -*- coding: utf-8 -*-
"""
cli
~~~
Implements CLI mode
:author: Feei <[email protected]>
:homepage: https://github.com/WhaleShark-Team/cobra
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2018 Feei. All rights reserved
"""
import re
import os
from .detection import Detection
from .engine import scan, Running
from .exceptions import PickupException
from .export import write_to_file
from .log import logger
from .pickup import Directory
from .send_mail import send_mail
from .utils import ParseArgs
from .utils import md5, random_generator, clean_dir
from .push_to_api import PushToThird
def get_sid(target, is_a_sid=False):
target = target
if isinstance(target, list):
target = ';'.join(target)
sid = md5(target)[:5]
if is_a_sid:
pre = 'a'
else:
pre = 's'
sid = '{p}{sid}{r}'.format(p=pre, sid=sid, r=random_generator())
return sid.lower()
def start(target, formatter, output, special_rules, a_sid=None, is_del=False):
"""
Start CLI
:param target: File, FOLDER, GIT
:param formatter:
:param output:
:param special_rules:
:param a_sid: all scan id
:param is_del: del target directory
:return:
"""
# generate single scan id
s_sid = get_sid(target)
r = Running(a_sid)
data = (s_sid, target)
r.init_list(data=target)
r.list(data)
report = '?sid={a_sid}'.format(a_sid=a_sid)
d = r.status()
d['report'] = report
r.status(d)
logger.info('[REPORT] Report URL: {u}'.format(u=report))
# parse target mode and output mode
pa = ParseArgs(target, formatter, output, special_rules, a_sid=None)
target_mode = pa.target_mode
output_mode = pa.output_mode
# target directory
try:
target_directory = pa.target_directory(target_mode)
target_directory = target_directory.rstrip("/")
logger.info('[CLI] Target directory: {d}'.format(d=target_directory))
# static analyse files info
files, file_count, time_consume = Directory(target_directory).collect_files()
# detection main language and framework
dt = Detection(target_directory, files)
main_language = dt.language
main_framework = dt.framework
logger.info('[CLI] [STATISTIC] Language: {l} Framework: {f}'.format(l=main_language, f=main_framework))
logger.info('[CLI] [STATISTIC] Files: {fc}, Extensions:{ec}, Consume: {tc}'.format(fc=file_count,
ec=len(files),
tc=time_consume))
if pa.special_rules is not None:
logger.info('[CLI] [SPECIAL-RULE] only scan used by {r}'.format(r=','.join(pa.special_rules)))
# scan
scan(target_directory=target_directory, a_sid=a_sid, s_sid=s_sid, special_rules=pa.special_rules,
language=main_language, framework=main_framework, file_count=file_count, extension_count=len(files))
if target_mode == 'git' and '/tmp/cobra/git/' in target_directory and is_del is True:
res = clean_dir(target_directory)
if res is True:
logger.info('[CLI] Target directory remove success')
else:
logger.info('[CLI] Target directory remove fail')
except PickupException:
result = {
'code': 1002,
'msg': 'Repository not exist!'
}
Running(s_sid).data(result)
logger.critical('Repository or branch not exist!')
exit()
except Exception:
result = {
'code': 1002,
'msg': 'Exception'
}
Running(s_sid).data(result)
raise
# 匹配邮箱地址
if re.match(r'^[A-Za-z\d]+([-_.][A-Za-z\d]+)*@([A-Za-z\d]+[-.])+[A-Za-z\d]{2,4}$', output):
# 生成邮件附件
attachment_name = s_sid + '.' + formatter
write_to_file(target=target, sid=s_sid, output_format=formatter, filename=attachment_name)
# 发送邮件
send_mail(target=target, filename=attachment_name, receiver=output)
elif output.startswith('http'):
# HTTP API URL
pusher = PushToThird(url=output)
pusher.add_data(target=target, sid=s_sid)
pusher.push()
else:
write_to_file(target=target, sid=s_sid, output_format=formatter, filename=output)
| mit | 4,558,426,633,579,418,600 | 32.719697 | 113 | 0.590429 | false | 3.5608 | false | false | false |
memsharded/conan | conans/client/recorder/upload_recoder.py | 1 | 1783 | from collections import OrderedDict, namedtuple
from datetime import datetime
from conans.model.ref import ConanFileReference
class _UploadElement(namedtuple("UploadElement", "reference, remote_name, remote_url, time")):
def __new__(cls, reference, remote_name, remote_url):
the_time = datetime.utcnow()
return super(cls, _UploadElement).__new__(cls, reference, remote_name, remote_url, the_time)
def to_dict(self):
ret = {"remote_name": self.remote_name,
"remote_url": self.remote_url, "time": self.time}
ret.update(_id_dict(self.reference))
return ret
def _id_dict(ref):
if isinstance(ref, ConanFileReference):
ret = {"id": str(ref)}
else:
ret = {"id": ref.id}
# FIXME: When revisions feature is completely release this field should be always there
# with None if needed
if ref.revision:
ret["revision"] = ref.revision
return ret
class UploadRecorder(object):
def __init__(self):
self.error = False
self._info = OrderedDict()
def add_recipe(self, ref, remote_name, remote_url):
self._info[str(ref)] = {"recipe": _UploadElement(ref, remote_name, remote_url),
"packages": []}
def add_package(self, pref, remote_name, remote_url):
self._info[str(pref.ref)]["packages"].append(_UploadElement(pref, remote_name, remote_url))
def get_info(self):
info = {"error": self.error, "uploaded": []}
for item in self._info.values():
recipe_info = item["recipe"].to_dict()
packages_info = [package.to_dict() for package in item["packages"]]
info["uploaded"].append({"recipe": recipe_info, "packages": packages_info})
return info
| mit | -8,984,704,860,235,695,000 | 31.418182 | 100 | 0.618059 | false | 3.901532 | false | false | false |
robert-budde/smarthome | tools/build_requirements.py | 4 | 3021 | #!/usr/bin/env python3
# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab
#########################################################################
# Copyright 2018- Martin Sinn [email protected]
#########################################################################
# This file is part of SmartHomeNG
#
# SmartHomeNG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SmartHomeNG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SmartHomeNG If not, see <http://www.gnu.org/licenses/>.
#########################################################################
"""
This script assembles a complete list of requirements for the SmartHomeNG core and all plugins.
The list is not tested for correctness nor checked for contrary
requirements.
The procedure is as following:
1) walks the plugins subdirectory and collect all files with requirements
2) read the requirements for the core
3) read all files with requirements and add them with source of requirement to a dict
4) write it all to a file all.txt in requirements directory
"""
import os
import sys
sh_basedir = os.sep.join(os.path.realpath(__file__).split(os.sep)[:-2])
sys.path.insert(0, sh_basedir)
program_name = sys.argv[0]
arguments = sys.argv[1:]
if "-debug_tox" in arguments:
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('build_requirements')
logger.setLevel(logging.DEBUG)
logger.debug("sys.path = {}".format(sys.path))
import lib.shpypi as shpypi
# ==========================================================================
selection = 'all'
if not os.path.exists(os.path.join(sh_basedir, 'modules')):
print ("Directory <shng-root>/modules not found!")
exit(1)
if not os.path.exists(os.path.join(sh_basedir, 'plugins')):
print ("Directory <shng-root>/plugins not found!")
exit(1)
if not os.path.exists(os.path.join(sh_basedir, 'requirements')):
print ("Directory <shng-root>/requirements not found!")
exit(1)
req_files = shpypi.Requirements_files()
# req_files.create_requirementsfile('core')
# print("File 'requirements" + os.sep + "core.txt' created.")
# req_files.create_requirementsfile('modules')
# print("File 'requirements" + os.sep + "modules.txt' created.")
fn = req_files.create_requirementsfile('base')
print("File {} created.".format(fn))
# req_files.create_requirementsfile('plugins')
# print("File 'requirements" + os.sep + "plugins.txt' created.")
fn = req_files.create_requirementsfile('all')
print("File {} created.".format(fn))
| gpl-3.0 | -4,602,444,489,486,859,000 | 35.841463 | 95 | 0.652102 | false | 3.853316 | false | false | false |
tengqm/senlin | senlin/profiles/os/heat/resource.py | 1 | 1387 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#from senlin.drivers import heat_v1 as heat
from senlin.profiles import base
__type_name__ = 'os.heat.resource'
class ResourceProfile(base.Profile):
'''Profile for an OpenStack Heat resource.
When this profile is used, the whole cluster is Heat stack, composed
of resources initialzed from this profile.
'''
spec_schema = {}
def __init__(self, ctx, name, type_name=__type_name__, **kwargs):
super(ResourceProfile, self).__init__(ctx, name, type_name, **kwargs)
def do_create(self):
return {}
def do_delete(self, id):
return True
def do_update(self, ):
self.status = self.UPDATING
# TODO(anyone): do update
self.status = self.ACTIVE
return {}
def do_check(self, id):
#TODO(liuh): add actual checking logic
return True
| apache-2.0 | 4,180,324,875,907,093,500 | 30.522727 | 77 | 0.677722 | false | 3.974212 | false | false | false |
PXL-CF2016/pxl-master-server | pxl_master/settings.py | 1 | 3529 | """
Django settings for pxl_master project.
Generated by 'django-admin startproject' using Django 1.9.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '&6_#1&951zc&9*$9br_#jezu2%7s&7cx5^%w5@z8_@aq#5!epe'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'pxl',
'rest_framework.authtoken',
]
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
# 'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.TokenAuthentication',
),
'PAGE_SIZE': 10
}
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'pxl_master.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pxl_master.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'pxldb',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| mit | 8,296,968,059,707,074,000 | 25.533835 | 91 | 0.684046 | false | 3.568251 | false | false | false |
linsalrob/PyFBA | PyFBA/filters/reactions_and_proteins.py | 2 | 1568 | import sys
def reactions_with_no_proteins(reactions, verbose=False):
"""
Figure out which reactions in our set have no proteins associated with them.
:param reactions: The reactions dictionary
:type reactions: dict
:param verbose: prints out how many reactions have no proteins out of the total
:type verbose: bool
:return: a set of reaction ids that have no proteins associated with them.
:rtype: set
"""
nopegs = set()
for r in reactions:
if reactions[r].number_of_enzymes() == 0:
nopegs.add(r)
if verbose:
sys.stderr.write("REACTIONS WITH NO PROTEINS: {} reactions have no pegs associated ".format(len(nopegs)) +
"with them (out of {} reactions)\n".format(len(reactions)))
return nopegs
def reactions_with_proteins(reactions, verbose=False):
"""
Figure out which reactions in our set have proteins associated with them.
:param reactions: The reactions dictionary
:type reactions: dict
:param verbose: prints out how many reactions have no proteins out of the total
:type verbose: bool
:return: a set of reaction ids that have proteins associated with them.
:rtype: set
"""
pegs = set()
for r in reactions:
if reactions[r].number_of_enzymes() != 0:
pegs.add(r)
if verbose:
sys.stderr.write("REACTIONS WITH PROTEINS: {} reactions have pegs associated ".format(len(pegs)) +
"with them (out of {} reactions)\n".format(len(reactions)))
return pegs
| mit | 1,700,619,016,538,963,500 | 31 | 114 | 0.649872 | false | 3.742243 | false | false | false |
yipenggao/moose | python/TestHarness/testers/ImageDiff.py | 3 | 2914 | from FileTester import FileTester
import os
import sys
from mooseutils.ImageDiffer import ImageDiffer
class ImageDiff(FileTester):
@staticmethod
def validParams():
params = FileTester.validParams()
params.addRequiredParam('imagediff', [], 'A list of files to compare against the gold.')
params.addParam('allowed', 0.98, "Absolute zero cutoff used in exodiff comparisons.")
params.addParam('allowed_linux', "Absolute zero cuttoff used for linux machines, if not provided 'allowed' is used.")
params.addParam('allowed_darwin', "Absolute zero cuttoff used for Mac OS (Darwin) machines, if not provided 'allowed' is used.")
# We don't want to check for any errors on the screen with this. If there are any real errors then the image test will fail.
params['errors'] = []
params['display_required'] = True
return params
def __init__(self, name, params):
FileTester.__init__(self, name, params)
def getOutputFiles(self):
return self.specs['imagediff']
def processResults(self, moose_dir, options, output):
"""
Perform image diff
"""
# Call base class processResults
FileTester.processResults(self, moose_dir, options, output)
if self.getStatus() == self.bucket_fail:
return output
# Loop through files
specs = self.specs
for filename in specs['imagediff']:
# Error if gold file does not exist
if not os.path.exists(os.path.join(specs['test_dir'], specs['gold_dir'], filename)):
output += "File Not Found: " + os.path.join(specs['test_dir'], specs['gold_dir'], filename)
self.setStatus('MISSING GOLD FILE', self.bucket_fail)
break
# Perform diff
else:
output = 'Running ImageDiffer.py'
gold = os.path.join(specs['test_dir'], specs['gold_dir'], filename)
test = os.path.join(specs['test_dir'], filename)
if sys.platform in ['linux', 'linux2']:
name = 'allowed_linux'
elif sys.platform == 'darwin':
name = 'allowed_darwin'
allowed = specs[name] if specs.isValid(name) else specs['allowed']
differ = ImageDiffer(gold, test, allowed=allowed)
# Update golds (e.g., uncomment this to re-gold for new system or new defaults)
#import shutil; shutil.copy(test, gold)
output += differ.message()
if differ.fail():
self.setStatus('IMAGEDIFF', self.bucket_diff)
break
# If status is still pending, then it is a passing test
if self.getStatus() == self.bucket_pending:
self.setStatus(self.success_message, self.bucket_success)
return output
| lgpl-2.1 | -514,916,517,787,486,200 | 40.042254 | 136 | 0.595402 | false | 4.317037 | true | false | false |
Kyria/LazyBlacksmith | lazyblacksmith/models/sde/item.py | 1 | 3146 | # -*- encoding: utf-8 -*-
from . import db
from .activity import Activity
from flask import url_for
class Item(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=False)
name = db.Column(db.String(100), nullable=True)
max_production_limit = db.Column(db.Integer, nullable=True)
market_group_id = db.Column(db.Integer)
group_id = db.Column(db.Integer)
category_id = db.Column(db.Integer)
volume = db.Column(db.Numeric(precision=16, scale=4, decimal_return_scale=4, asdecimal=False), nullable=True)
# calculated field on import
is_from_manufacturing = db.Column(db.Boolean(), default=True)
is_from_reaction = db.Column(db.Boolean(), default=True)
base_cost = db.Column(
db.Numeric(
precision=17,
scale=2,
decimal_return_scale=2,
asdecimal=False
),
nullable=True,
)
# foreign keys
activities = db.relationship(
'Activity',
backref='blueprint',
lazy='dynamic'
)
activity_products = db.relationship(
'ActivityProduct',
backref='blueprint',
lazy='dynamic',
foreign_keys='ActivityProduct.item_id'
)
activity_skills = db.relationship(
'ActivitySkill',
backref='blueprint',
lazy='dynamic',
foreign_keys='ActivitySkill.item_id'
)
activity_materials = db.relationship(
'ActivityMaterial',
backref='blueprint',
lazy='dynamic',
foreign_keys='ActivityMaterial.item_id'
)
product_for_activities = db.relationship(
'ActivityProduct',
backref='product',
lazy='dynamic',
foreign_keys='ActivityProduct.product_id'
)
skill_for_activities = db.relationship(
'ActivitySkill',
backref='skill',
lazy='dynamic',
foreign_keys='ActivitySkill.skill_id'
)
material_for_activities = db.relationship(
'ActivityMaterial',
backref='material',
lazy='dynamic',
foreign_keys='ActivityMaterial.material_id'
)
# relationship only defined for performance issues
# ------------------------------------------------
activity_products__eager = db.relationship(
'ActivityProduct',
lazy='joined',
foreign_keys='ActivityProduct.item_id'
)
def icon_32(self):
static_url = "ccp/Types/%d_32.png" % self.id
return url_for('static', filename=static_url)
def icon_64(self):
static_url = "ccp/Types/%d_64.png" % self.id
return url_for('static', filename=static_url)
def is_moon_goo(self):
return self.market_group_id == 499
def is_pi(self):
return self.category_id == 43
def is_mineral_salvage(self):
return self.market_group_id in [1857, 1033, 1863]
def is_ancient_relic(self):
return self.category_id == 34
def is_cap_part(self):
""" Return if the item is a cap part / blueprint of cap part.
914 / 915 are Blueprints
913 / 873 are their respective items """
return self.group_id in [914, 915, 913, 873]
| bsd-3-clause | -4,950,217,859,485,991,000 | 28.12963 | 113 | 0.601081 | false | 3.813333 | false | false | false |
d5c5ceb0/currency.workflow | src/currency.py | 1 | 3812 | #!/usr/bin/python
# encoding: utf-8
import sys
from workflow import Workflow, web
import datetime
ICON_DEFAULT = 'icon.png'
def get_yunbi_tickers(query):
url = 'https://yunbi.com//api/v2/tickers.json'
r = web.get(url)
r.raise_for_status()
tickers = r.json()
for name in tickers:
if name[:-3] not in query:
continue
last = tickers[name]['ticker']['last']
sell = tickers[name]['ticker']['sell']
high = tickers[name]['ticker']['high']
buy = tickers[name]['ticker']['buy']
low = tickers[name]['ticker']['low']
vol = tickers[name]['ticker']['vol']
yunb_title = (name[:-3]).upper() + u":\t云币\t最新价 " + str(last)
yunb_arg = "https://yunbi.com/markets/" + name
yunb_subtitle = u"量:" + vol + u" 买:" + str(buy) + u" 卖:" + \
str(sell) + u" 高:" + high + u" 低:" + low
wf.add_item(title = yunb_title,
subtitle = yunb_subtitle,
arg = yunb_arg,
valid = True,
icon = ICON_DEFAULT)
def get_jubi_tickers(query):
url = 'https://www.jubi.com/api/v1/allticker'
r = web.get(url)
r.raise_for_status()
tickers = r.json()
for name in tickers:
if name not in query:
continue
last = tickers[name]['last']
buy = tickers[name]['buy']
sell = tickers[name]['sell']
high = tickers[name]['high']
low = tickers[name]['low']
vol = tickers[name]['vol']
jub_title = name.upper() + u":\t聚币\t最新价 " + str(last)
jub_arg = "https://www.jubi.com/coin/" + name
jub_subtitle = u"量:" + str(vol) + u" 买:" + str(buy) + u" 卖:" + \
str(sell) + u" 高:" + str(high) + u" 低:" + str(low)
wf.add_item(title = jub_title,
subtitle = jub_subtitle,
arg = jub_arg,
valid = True,
icon = ICON_DEFAULT)
def get_yuanbao_tickers(query):
names = ['ans', 'btc', 'eth', 'etc', 'ltc', 'zec', 'qtum']
for id in query:
if id not in names:
continue
url = 'https://www.yuanbao.com/api_market/getInfo_cny/coin/' + id
r = web.get(url)
r.raise_for_status()
ticker = r.json()
name = ticker['name']
last = ticker['price']
buy = ticker['buy']
sell = ticker['sale']
high = ticker['max']
low = ticker['min']
vol = ticker['volume_24h']
url = ticker['Markets']
yuanb_title = name + u":\t元宝\t最新价 " + last
yuanb_subtitle = u"量:" + vol + u" 买:" + str(buy) + u" 卖:" + \
str(sell) + u" 高:" + high + u" 低:" + low
wf.add_item(title = yuanb_title,
subtitle = yuanb_subtitle,
arg = url,
valid = True,
icon = ICON_DEFAULT)
def main(wf):
query = wf.args[0].strip().replace("\\", "")
if not isinstance(query, unicode):
query = query.decode('utf8')
names = ['ans', 'btc', 'eth', 'etc', 'ltc', 'zec', 'qtum', 'bts', 'eos', 'sc']
if query == 'yun':
get_yunbi_tickers(names)
elif query == 'ju':
get_jubi_tickers(names)
elif query == 'bao':
get_yuanbao_tickers(names)
else:
get_yuanbao_tickers([query])
get_yunbi_tickers([query])
get_jubi_tickers([query])
wf.send_feedback()
if __name__ == '__main__':
wf = Workflow()
logger = wf.logger
sys.exit(wf.run(main))
| apache-2.0 | -9,168,712,520,551,374,000 | 28.3125 | 82 | 0.466951 | false | 3.131886 | false | false | false |
Peanhua/diamond-girl | blender-py/treasure-models.py | 1 | 1477 | import bpy
import sys
scene = bpy.context.scene
scene.frame_set(1)
output_filename = None
parsing = False
for i in sys.argv:
if parsing == True:
output_filename = i
elif i == '--':
parsing = True
for obj in bpy.data.objects:
obj.select = False
for obj in bpy.data.objects:
if obj.name != 'Lamp' and obj.name != 'Lamp2' and obj.name != 'Camera':
filename = obj.name
if output_filename != None:
if output_filename == '*' or output_filename == filename + '.obj':
obj.hide = False
obj.hide_render = False
obj.select = True
#bpy.ops.export_scene.autodesk_3ds(filepath=bpy.path.abspath(filename))
bpy.ops.export_scene.obj(filepath=bpy.path.abspath(filename) + '.obj',
check_existing=False,
use_selection=True,
use_normals=True,
use_uvs=True,
use_materials=True,
use_triangles=True,
group_by_material=True,
path_mode='RELATIVE')
obj.select = False
obj.hide = True
obj.hide_render = True
else:
print('Filename: ' + filename)
| gpl-2.0 | 4,880,896,058,564,181,000 | 32.568182 | 87 | 0.450237 | false | 4.630094 | false | false | false |
odicraig/kodi2odi | addons/plugin.video.salts/scrapers/tvshow_scraper.py | 1 | 4494 | """
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urlparse
import kodi
import log_utils # @UnusedImport
import dom_parser
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import VIDEO_TYPES
from salts_lib.constants import QUALITIES
from salts_lib.constants import Q_ORDER
from salts_lib.utils2 import i18n
import scraper
BASE_URL = 'http://www.tvshow.me'
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.EPISODE])
@classmethod
def get_name(cls):
return 'TVShow.me'
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if source_url and source_url != FORCE_NO_MATCH:
url = urlparse.urljoin(self.base_url, source_url)
html = self._http_get(url, require_debrid=True, cache_limit=.5)
title = dom_parser.parse_dom(html, 'title')
if title:
title = re.sub('^\[ST\]\s*–\s*', '', title[0])
meta = scraper_utils.parse_episode_link(title)
page_quality = scraper_utils.height_get_quality(meta['height'])
else:
page_quality = QUALITIES.HIGH
fragment = dom_parser.parse_dom(html, 'section', {'class': '[^"]*entry-content[^"]*'})
if fragment:
for section in dom_parser.parse_dom(fragment[0], 'p'):
match = re.search('([^<]*)', section)
meta = scraper_utils.parse_episode_link(match.group(1))
if meta['episode'] != '-1' or meta['airdate']:
section_quality = scraper_utils.height_get_quality(meta['height'])
else:
section_quality = page_quality
if Q_ORDER[section_quality] < Q_ORDER[page_quality]:
quality = section_quality
else:
quality = page_quality
for stream_url in dom_parser.parse_dom(section, 'a', ret='href'):
host = urlparse.urlparse(stream_url).hostname
hoster = {'multi-part': False, 'host': host, 'class': self, 'views': None, 'url': stream_url, 'rating': None, 'quality': quality, 'direct': False}
hosters.append(hoster)
return hosters
def get_url(self, video):
return self._blog_get_url(video, delim=' ')
@classmethod
def get_settings(cls):
settings = super(cls, cls).get_settings()
settings = scraper_utils.disable_sub_check(settings)
name = cls.get_name()
settings.append(' <setting id="%s-filter" type="slider" range="0,180" option="int" label=" %s" default="60" visible="eq(-3,true)"/>' % (name, i18n('filter_results_days')))
settings.append(' <setting id="%s-select" type="enum" label=" %s" lvalues="30636|30637" default="0" visible="eq(-4,true)"/>' % (name, i18n('auto_select')))
return settings
def search(self, video_type, title, year, season=''): # @UnusedVariable
html = self._http_get(self.base_url, params={'s': title}, require_debrid=True, cache_limit=1)
post_pattern = '<h\d+[^>]+class="entry-title[^>]*>\s*<[^>]+href="(?P<url>[^"]*/(?P<date>\d{4}/\d{1,2}/\d{1,2})[^"]*)[^>]+>(?:\[ST\]\s+–\s*)?(?P<post_title>[^<]+)'
date_format = '%Y/%m/%d'
return self._blog_proc_results(html, post_pattern, date_format, video_type, title, year)
| gpl-3.0 | -5,407,110,154,330,804,000 | 43.94 | 191 | 0.58656 | false | 3.735661 | false | false | false |
VigTech/Vigtech-Services | Logica/ConexionBD/adminBD.py | 1 | 10033 | import psycopg2
from psycopg2 import extras
from principal.parameters import *
# Create your views here.
import sys
reload(sys) # to re-enable sys.setdefaultencoding()
sys.setdefaultencoding('utf-8')
class AdminBD:
#conn_string = ""
def __init__(self):
#host='127.0.0.1' dbname='docker' user='docker' password='docker' port='49153'"
try:
#self.conn = psycopg2.connect(database="docker", user="docker", password="docker", host="localhost", port="49153")
self.conn = psycopg2.connect(database=DATABASE, user=USER, password=PASSWORD, host=HOST, port=PORT)
# get a connection, if a connect cannot be made an exception will be raised here
# conn.cursor will return a cursor object, you can use this cursor to perform queries
self.cursor = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
#self.conn.set_character_encoding('utf8')
self.conn.set_client_encoding('UTF-8')
#self.cursor.execute("SET CLIENT_ENCODING TO 'LATIN-1';")
#cursor_factory=psycopg2.extras.DictCursor
except:
raise Exception('No se pudo conectar a la DB!')
def get_eid_papers_proyecto(self, proyecto):
consulta = "SELECT eid from paper_proyecto JOIN paper ON id_paper=id WHERE id_proyecto = %s;" %(str(proyecto))
try:
self.cursor.execute(consulta)
filas = self.cursor.fetchall()
eids=[]
for row in filas:
eids.append( row['eid'])
return eids
except psycopg2.DatabaseError, e:
raise Exception('No se pudo get_eid_papers_proyecto()')
def get_autores(self, proyecto):
consulta= "Select au.nombre_autor from paper_proyecto pp JOIN paper_autor pa ON pa.paper_id = pp.id_paper JOIN autor au ON au.id = autor_id WHERE pp.id_proyecto = %s;" %(str(proyecto))
try:
self.cursor.execute(consulta)
filas = self.cursor.fetchall()
return filas
except psycopg2.DatabaseError, e:
raise Exception('No se pudo get_autores()')
#Select au.nombre_autor from paper_proyecto pp JOIN paper_autor pa ON pa.paper_id = pp.id_paper JOIN autor au ON au.id = autor_id WHERE pp.id_proyecto = 1;
def get_dois_proyecto(self, proyecto):
consulta= "SELECT doi from paper_proyecto pp JOIN paper p ON pp.id_paper=p.id WHERE pp.id_proyecto =%s AND p.descargo=false AND NOT doi='00000';" %str(proyecto)
try:
self.cursor.execute(consulta)
filas = self.cursor.fetchall()
doi=[]
for row in filas:
doi.append( row['doi'])
return doi
except psycopg2.DatabaseError, e:
raise Exception('No se pudo get_dois_proyecto()')
def insertar_papers(self, proyecto,papers):
for paper in papers:
consulta = "INSERT INTO paper (doi,fecha,titulo_paper, total_citaciones,eid,abstract,descargo,link_source) VALUES (\'%s\',\'%s\',\'%s\',\'%s\',\'%s\',\'%s\',\'%s\',\'%s\') RETURNING id"%(paper['doi'], paper['fecha'], paper['titulo'],str(0), '00000', paper['abstract'], 'FALSE', paper['link'])
try:
self.cursor.execute(consulta)
self.conn.commit()
data = self.cursor.fetchall()
id_paper=data[0][0]
self.insertar_autores(paper['autores'], id_paper)
self.insertar_paper_proyecto(proyecto,id_paper)
except psycopg2.DatabaseError, e:
if self.conn:
self.conn.rollback()
raise Exception('No se pudo insertar_papers()')
sys.exit(1)
def insertar_autor(self,autor):
autor = autor.replace("'","''")
consulta = "INSERT INTO autor (nombre_autor) VALUES('%s') RETURNING id;"%(autor)
try:
self.cursor.execute(consulta)
self.conn.commit()
data = self.cursor.fetchall()
return data[0][0]
except psycopg2.DatabaseError, e:
if self.conn:
self.conn.rollback()
raise Exception('No se pudo insertar_autor()')
sys.exit(1)
def insertar_paper_autor(self,id_autor,id_paper):
consulta = "INSERT INTO paper_autor (paper_id,autor_id) VALUES(\'%s\',\'%s\');"%(str(id_paper), str(id_autor))
try:
self.cursor.execute(consulta)
self.conn.commit()
except psycopg2.DatabaseError, e:
if self.conn:
self.conn.rollback()
raise Exception('No se pudo insertar_paper_autor()')
sys.exit(1)
def insertar_autores(self,autores,id_paper):
for autor in autores:
id_autor=self.insertar_autor(autor)
self.insertar_paper_autor(id_autor,id_paper)
def insertar_paper_proyecto(self,id_proyecto,id_paper):
consulta = "INSERT INTO paper_proyecto (id_proyecto,id_paper) VALUES(\'%s\',\'%s\');"%(str(id_proyecto), str(id_paper))
try:
self.cursor.execute(consulta)
self.conn.commit()
except psycopg2.DatabaseError, e:
if self.conn:
self.conn.rollback()
raise Exception('No se pudo insertar_paper_proyecto()')
sys.exit(1)
def get_papers_eid(self, eids):
consulta = 'SELECT titulo_paper, link_source FROM paper WHERE '
count = 0
for eid in eids:
if count == 0:
concat = ' eid = \'%s\'' %(str(eid))
consulta += concat
else:
concat = ' OR eid = \'%s\'' %(str(eid))
consulta += concat
count +=1
try:
self.cursor.execute(consulta)
filas = self.cursor.fetchall()
#filas=[]
papers=[]
for row in filas:
#papers.append({"titulo": row['titulo_paper'], "link": row['link'])
papers.append({"titulo": row['titulo_paper'], "link_source": row['link_source']})
#eids.append( row['eid'])
return papers
except psycopg2.DatabaseError, e:
raise Exception('No se pudo get_papers_eid()')
sys.exit(1)
def get_papers_proyecto(self, proyecto):
consulta="SELECT id_paper, titulo_paper, fecha, total_citaciones, revista_issn, eid, abstract from paper_proyecto pp JOIN paper p ON p.id=pp.id_paper WHERE pp.id_proyecto=%s" %(str(proyecto))
try:
self.cursor.execute(consulta)
filas = self.cursor.fetchall()
papers=[]
for row in filas:
#papers.append({"titulo": row['titulo_paper'], "link": row['link'])
papers.append({"titulo": row['titulo_paper'], "link_source": row['link_source']})
#eids.append( row['eid'])
return eids
except psycopg2.DatabaseError, e:
if self.conn:
self.conn.rollback()
raise Exception('No se pudo get_papers_proyecto()')
sys.exit(1)
def getAuthors(self, paper_id):
#cur = self.cursor(cursor_factory=psycopg2.extras.DictCursor)
query = """
SELECT
id_scopus AS authid,
nombre_autor AS authname,
id_afiliacion_scopus AS afid
FROM
paper_autor pau, autor au
WHERE
pau.paper_id = {} AND pau.autor_id = au.id;
"""
query = query.format(paper_id)
self.cursor.execute(query)
#cur.execute(query)
data = self.cursor.fetchall()
authors = []
for data_tuple in data:
authors.append(dict(data_tuple))
return authors
def getAffiliation(self, paper_id):
#cur = self.cursor(cursor_factory=psycopg2.extras.DictCursor)
query = """
SELECT
scopus_id AS afid,
nombre AS affilname,
pais AS affiliation__country,
ciudad AS affiliation__city,
variante_nombre AS name__variant
FROM
paper_afiliacion pa, afiliacion a
WHERE
pa.paper_id = {} AND pa.afiliacion_id = a.id
"""
query = query.format(paper_id)
#cur.execute(query)
self.cursor.execute(query)
data = self.cursor.fetchone()
return dict(data) if data else {}
def getKeywords(self, paper_id):
#cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
query = """
SELECT
pk.paper_id,
string_agg(k.keyword, '|') as authkeywords
FROM
paper_keyword pk, keyword k
WHERE
pk.paper_id = {} AND pk.keyword_id = k.id
GROUP BY pk.paper_id
"""
query = query.format(paper_id)
self.cursor.execute(query)
#cur.execute(query)
data = self.cursor.fetchone()
return data['authkeywords'] if data else ''
def getPapers(self, project_id):
#cur = self.cursor(cursor_factory=psycopg2.extras.DictCursor)
query = """
SELECT
id,
p.link_source AS prism_url,
eid, titulo_paper AS dc_title,
doi AS prism_doi,
abstract AS dc_description,
fecha AS prism_coverDate,
total_citaciones AS citedby__count
FROM
paper p, paper_proyecto pp
WHERE
pp.id_proyecto = {} AND pp.id_paper = p.id;
"""
query = query.format(project_id)
self.cursor.execute(query)
data = self.cursor.fetchall()
papers = []
for data_tuple in data:
paper_id = data_tuple[0]
paper = dict(data_tuple)
paper['authors'] = self.getAuthors(paper_id)
paper['affiliation'] = self.getAffiliation(paper_id)
paper['authkeywords'] = self.getKeywords(paper_id)
papers.append(paper)
return papers
"""
data = {"Hola": "hola", "mundo": [1,2,3] }
import json
with open('data.txt', 'w') as outfile:
json.dump(data, outfile)
"""
| lgpl-3.0 | -7,890,586,123,452,067,000 | 38.038911 | 304 | 0.570517 | false | 3.486101 | false | false | false |
John-Hart/autorest | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyComplex/autorestcomplextestservice/operations/polymorphism.py | 2 | 7536 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class Polymorphism(object):
"""Polymorphism operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get complex types that are polymorphic.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`Fish
<fixtures.acceptancetestsbodycomplex.models.Fish>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/complex/polymorphism/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Fish', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_valid(
self, complex_body, custom_headers=None, raw=False, **operation_config):
"""Put complex types that are polymorphic.
:param complex_body: Please put a salmon that looks like this:
{
'fishtype':'Salmon',
'location':'alaska',
'iswild':true,
'species':'king',
'length':1.0,
'siblings':[
{
'fishtype':'Shark',
'age':6,
'birthday': '2012-01-05T01:00:00Z',
'length':20.0,
'species':'predator',
},
{
'fishtype':'Sawshark',
'age':105,
'birthday': '1900-01-05T01:00:00Z',
'length':10.0,
'picture': new Buffer([255, 255, 255, 255,
254]).toString('base64'),
'species':'dangerous',
},
{
'fishtype': 'goblin',
'age': 1,
'birthday': '2015-08-08T00:00:00Z',
'length': 30.0,
'species': 'scary',
'jawsize': 5
}
]
};
:type complex_body: :class:`Fish
<fixtures.acceptancetestsbodycomplex.models.Fish>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/complex/polymorphism/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(complex_body, 'Fish')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def put_valid_missing_required(
self, complex_body, custom_headers=None, raw=False, **operation_config):
"""Put complex types that are polymorphic, attempting to omit required
'birthday' field - the request should not be allowed from the client.
:param complex_body: Please attempt put a sawshark that looks like
this, the client should not allow this data to be sent:
{
"fishtype": "sawshark",
"species": "snaggle toothed",
"length": 18.5,
"age": 2,
"birthday": "2013-06-01T01:00:00Z",
"location": "alaska",
"picture": base64(FF FF FF FF FE),
"siblings": [
{
"fishtype": "shark",
"species": "predator",
"birthday": "2012-01-05T01:00:00Z",
"length": 20,
"age": 6
},
{
"fishtype": "sawshark",
"species": "dangerous",
"picture": base64(FF FF FF FF FE),
"length": 10,
"age": 105
}
]
}
:type complex_body: :class:`Fish
<fixtures.acceptancetestsbodycomplex.models.Fish>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/complex/polymorphism/missingrequired/invalid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(complex_body, 'Fish')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
| mit | -2,923,434,051,144,003,600 | 32.945946 | 84 | 0.589039 | false | 4.438163 | true | false | false |
kinow/rosalind-exercises | src/cons.py | 1 | 10762 | from fasta.Fasta import read_fasta
import sys
if __name__ == '__main__':
data = '''
>Rosalind_4592
>Rosalind_9829
TTAGACCATGCTGTTGTACTCCCCCCGTCATGGCAAAAATGACTCATTCGAGTCTTTCGC
ATGCGTCCACCCGGCTGTGGACTTGTCTGTTCGGCCTAGGCGTGACAAAGGTTAAAGTCA
TGTATACAGGATGCCGACCAAATGTAGAGCTACTCATTCCGTATAGCTTCTAAGCCACTA
ACGAGGATACGAAGTGTATAGATATCCTTAAAGCTGGCGTTTGTAGCCTAGTTCGGCTAG
CCGGCATTATAATAATGAGATAAATTACAGGTGATCGCCAAGGCTCATTTAGTTGTGTTC
TTCGCAACTTTCCGGACCTTCGCCCTAATAATTAAAGCGCAGAGCCGCCTCGTATAGGAG
ATTAGTTTTATGCGACCGGATCTCGTAAGAAAATTAGATCGGGAAATTGTTAAACTACAG
TACTTAATTCAGGTAATGACATTTCCGTCGTTTGTATCTTTATAATAGTATATGATCGCC
TATAGTTATAAGAGCGCGAACGACAGCTTACGCGTGGTTGCGATTATCGGGTTAGTCTTA
ACTCGGCCCAAATGAGAAGAATTCACGTGGTAAAGTGTCGGACTGATAGCGCACGCATTC
GGTGACTTCTGATAGGCGACCCAGTATCTACGTCTGCCTATTCTGCACAGCGTATATCCT
ATTCAACCTAGGGTTACGTCGCGAAAGCTATATGCCCACACGGCAAGGGTAGCTGCAAGG
TGATAAGGTTGTTTGATTTGCCTCATAGCTGAGAAGACCGACAGAGGCTGCCTACTGGCA
CGTACAAACCAAGGTAACGTATCTGTGGGACACTTCGATGGTTGTGTCTAACGCCTTGAG
ACGTACGATACAAAACGTAGCGGGATTCACATGCAGGTAGTTGAAGTGTGTTCGGAGCTA
>Rosalind_8886
ACCAGCTATAACGTCCTAGCGTACCCTGCTTTGGTTAAAACACGAAATTGTTACAGTTGA
CGTCCGCCACTACTACGTGGAATATACAACTAGCCGAGTAGCTAAGCACTGTTCGTATCA
TCGGAGGATCCTAGTCTTGTCCTCGCTCTACAAAGGCCAAAAAATGTGTGGCTAAATGGC
GTAGGGTGGCTTATTAGAACTCTATTCATTCTCTATACTAAACTGCACGTGCTACATGGT
CTCCACGCGGGGTTCCGATAGTCGCCGCGAGTCTACGATTGGTACGTCCAAAAACCGTGG
GAGCGGGGAATAAATTTGCGGAACAGCCGACCACTGCCTCAAACACGGCGGGTTGAAGCC
GGGGCGTAGCCCTGAGGCAGTCTTGGGGGCGAGTTCCGCGCCGAGACGTACTGTCTAAGT
CGGACTGGGTACATCAGCGTGCTAGACAATAGCAATCGTCCTATTGATTACCCAAGACCG
CTAAGATTTGGAATCAACACCCTATAGAGGCGTATTTGACGAACCAGGAGTAATAGTTGA
GCATCTATGGATACAATAACCGAGGCCTACTACAGAAAAGTGTCCTAGTTGAGTAATACG
GCGTAACTACTAAAGCCGTTCGTGCAGAAGAGGTCAAGTGTAACGGCTCACGAGCCCAGC
GATCAGCCTTAACTTCTACAATGTGCGAGATATGTTTCGAAGGCACATCGCTTGGCGGCC
TCGCATTTTCTTTCTCGTTTTGGACAAGGCAAACTCAGGATGATGTAGTCGGCTTAGCAA
AAACCATACACAGGGATCTAGCCGAATCGGTCCGGGTCTCAAACACTAGGCTACCAGAAT
CCCTTCGACTGAAGACTCGGTGCAAGCCTCTATGGTACGCATACTCGAAAGCAGATAATA
>Rosalind_8552
GATTCCCGGACACCCGTGCTACCGCACGCCTCTTGTAAGTCAGTTTTCCGCTGAGTGTGG
ATCCGCACGTGATCAGCTGCAGTCACGCCGCACAGTATGGTGTATCTAGGTTCAGACACT
ACAGGCTGCGGGGTGACCGGATTTAGCGTTATCCATAAACAGATAGCGGGTTATGATGCA
TGTTACCACTATGTATATCGGGCCGGGCAGCGATCTGTTAACGGTCGGTAAACTGATCGG
CCGCTAGACGTAGTCTTTAGTAGCGGATCCTGTTCACCCACAATACGACAGGTAGTGCTT
CTGAAGTTATATAGGGCAAATATTGTATGTGGCCGACCGGCACAGCATTGACACGACGTC
TTGTCTCCTGACATCAACCGATAGAAGCATGAGAAGATAGTTACGTTATGGCGAATAGGA
AGGTACGCACCGAAACCTTCCATGAGATGGATGCCCATGCTTCTCTATGGGCGTTCCGGG
GAGCTATGGACTACCGGGATTCAACAGGCACCAGACGTTGTGGCGGGACTGTCGTCCATT
GCCGTGAATCTGGACTTTTAGTTCTAAGTATGAAGCCGTCGGGTCTGTATATGGAATCTG
AAAATCCATAGAGATGGATCACTGTGTATTGTTACGGAGGACTGATTTTCCAAAGTTTAC
CTGGTTACAGACCGCCGGTGGCAATTTTGATTAAAGTGGGGTCTTGATCCTGGCTGTATA
CGTGCTAGCGTCTCTCGCGTACCCCGCTTGAGTCGCAACAGCCGCTACGTCAAGAGACGT
GGTCCTACTAAACGGGCATGGGTCCGATGGTTCGACTCTCGATTGCTGTTCGAACCGGAG
ATTTATAGGGACTGAACCGCCCATCCACCTACTTGACTTCAGAGTGCTTCGCCATAACCC
>Rosalind_3431
TCCCCCGATCCTGCATTTTAGACCGTCATTTCTGAGCGCAGCCGTTACTCTGTGTTCTAG
TCAATACGTGAGCGACGCGTGGTCAAGGATTAGCTTTGTTCTAGCATCATAGGTGGAACT
GTTTCCGAAACCTAGAGCTTGCAAGTAGCTCACCCTGTTTCACTGCATAACGAATTAAGT
GATACGAGCCTTAGGTAAACTATGGAATAGCATCCCCCAACGCTGCCCTTTGCTAGTTTT
CCAAGCATGCCTGGTTTTAGATCAGTTTACTTTTAAGTGAGTGCGCGTGATGCGAATCTC
TCAGCGATTTATTGCTGACCTACCAGTGAAACTTATGCAAGGCTATGTGCGCCTGGCCGT
ACCTGAAGCCGGGACCAGTTCTATGAGTGGCAATATACCTTCTTTTGGTTCCCTCGTATG
AACGTTAACATGGGGATTGAAAACTTGTTGATGTTTTATTTCAATTGTTCCCATGATGTG
TCGATGGGTGGAACGCATCGCAGTTCGCACAGATAGCGTCCAGGAATGTTCACGCGGGAT
TGCGGGAGCGTAGCTTTCGGGAAAAGGACGACGTTCGCATACCGATCAGTCGCCATGCCA
CATATACGGAGTTGGTATCAGTCTTTTGCATGGTCAGAGCGTACCAGGCCAACCGGACCC
ACATATCGTGGTTACCGCGACAGCAGTATGCACCGGGTGCACAATCCTGCTAAACCCCGG
ATCTTGGGCCATCAAGAGGTTACTTCGAAAGGCTCAATGGCCGTAGTGTGGTGCCGATCC
GGGCATGATCTCCTCGTTTGAATGTTCTGCCGCACCTCAACGGTTAAGTGAACTTACACT
GGAAGGTAGATCGTCACGGCTAAGTTCGGCCAAAACCTCGCCCGCAGTTGGAGCCAATCC
>Rosalind_2026
CTCATCATTTCCCAGCAATGGAGTTAAAGTTGGTCCTCCTCTCGTGAGTGAGCGTTGAAT
TTATAAGTAACCTCGTAGGTCCGAAGGAGAGTAAGGGAATAAGAAACGGCTCCGTTCCTA
ATGACTAGTTAGGAGGTTTGGGATGACGTGAGAAGGGTGTCCCTTTGGTACTCGAATCGG
AATATGTCGCTCGCATCCATGTGCTATACATCCTTACTTGCAAGTCATATGCGGGGTCAG
GGTTAGGTAGCCAGTGGCCTCTGAACTATCGGGATGACCTGTACTAACCGGTTTACAACC
AGACGGACCAGGGCACGGGAGTCCCTACGGTGCCCAGTACTACTGCGGGAAAATACACCC
TCACTGCAATAACGCGAAGACTAAACTCTGCCATAATATCGTAGGTATGCTCGCTCGCGC
GAATCGGTATCCTAGCCTTGGTATTCTTGGCCGGGGCCAATCTGCCCTCCTTAAGCGGAC
CATAATACCGGACCTCGTAATAAGCCGAAATAGATATTCCCATCCAAACGAGTTACCCTA
GGCGAGACGGCAGAGCTTCCATATGGTAAACCTACCTGATGCGGGTATGGCTCCAAAGCG
TTGGCATTCCACCATCCCGGATAAATTAAGGAACCTCATGAGTGGGTTTGCAACTGGGAG
CGTTTGCGCCGACCTTCGCTTTCCCACGTCTTAACTCGATCCGATATCCTGGTCGGGGGT
GAGGGCAGCACGGTACCTGGTTTGCTGACAGGTTCTCTCCGGCCGCCAGCCCCAGGGCGT
GTTAAGAGAAACCGCAACGGAGACCACCTAGTATTTTAGGCCGCCGGGGTGTTAGGTGAA
TAAACACAGACAACTTCCACAACACTCCAATTCTCATACGAACGGGTTAATAAGGATTTT
>Rosalind_5007
GGGAAGCTAAATCCCCCCCCAAGTGGGGCCGAGAAAAATAAGTACGCAGTCCTGTCAACC
CCGGATCTACCTACTCCCATCTTGCGCGGCTTGAACAGATGTGAGGGTCAGCGGCCTCTG
GATGTTCTCTGGCTTGGGTACTAGGAGACGAACCCAATTGTTAGTTCGAATTATTTGCCC
CAGGCGCCGATCCTTCTTCTCGAACGCCTCTTCCTAGCCCTGCCGGCTCCCTCCGAAATG
GACTCAGGCGCTTTCGTAGTCCCGACACGCGCGTTCTCATCTTGGTAATCAACGGTTCTT
GTGAGTGCGAATTGGTCGCACTCGTGCCCTGACACTTGCGGGCGGTGTGGACTTTACAAC
ATATGACCTGCGCACTTTGGGGCATAATCATGAAAATGAACCGCTTGTGACGTAGCGGGT
AGAGTTTGGGTAACTGAAGTGAAGTGCACGCGGGGGACGACAGTCGAGGCGGGTTAGAAT
ATTCGGGAAGCACACACCTACCTTCAGGGTCAGCCGGTGGAGAGGGGGGCTTCCGTGGCA
TTACACATACTGGACCAGATCAACTTACTGCGGTTTATCATAGTTTTCATCAGATTAATT
TTTCACGGTCTGCGAAGCGGTCTCTCAGCACAATAACCCTTTACCTTTCCGCAGATGACT
GTTTGGAATAGATTGGGTAGACACCCCGTCGCCCGCTTATCATGTAAATTACCCACTAAG
GAAGTTCGTACTAAGTAGACGTTTCTGGAAGGACGTCAAGAGAGTGTACTAGACACTATT
AATCTCACCACGATTTGTTGACACTATGCAGAAACTCAGGTTAGATATCCTCCTGTGGCC
TTCCACTTGCACTTTCCATTATCGTGCGCTAACAAAGCACACGACTGGGTCTACAACGTA
>Rosalind_2348
AAAACCCGCGCAGCTCTACGGCCCATCAATCTGAGCTAATAAGTCGTTCGCTTAAAGGGA
CTTCGCACCCCATCATCTGAACAAACCGTCAGACGTCTCTTGTGGACTCTACTGGTACGG
TTCTCGACGAAATTGCGCCATCAACGCAGCACGTAGGTCCACCGTAGCCACCTGAGGTAC
GGCTGGGCACAGTTTGCTCTGTATGCTACTGGGCAGAGAGTGTCTACTACTGCCGGTGCC
TGGACGCGCTCCTGCTAGACCACAATCTCCAAGGAGATTGCCTTGAAAGCTGCATATGTA
GAGTTCATCAATCCATAACTTCTCCGGACGCACTGTAAATCAATTATAGCATCTGTTCAC
TGTGAGATGTGTTTCAGGATGATTCCTTTCTAGAGATACCTTTTGATTGGCAGAGTCCTC
TGGAATCTCGGTGGACCATGGTTCTCATAACTCAGGGATCTCCATTCTATCGCACCGGTT
AACGTGGACACTCTTGTTCTGCGACGCCTGTCTTGTCGCGATGACGAGATACCGTGGGCT
TCGAGTACTATCAAGGACTACGCCGTACCCAAATCAATTGAACGATCAGCATATTGGCGC
GAGGCTTAAACGGCGTCTGCATGAGTATAGTTGCTTGGTCCAGGGCCTTATCATTGCATG
ACTTTATGGTTAGTATATGGTGTGGTAAGTTGTCGGTGGAGCTTTACTGGCCCGTTTATG
ACTGCCCATTACAGACTCCGTGTGCTGAATCGGGTCAGCCACCTCCATCTATGCCGCAGC
TCCCCGCTGGCTACATATCCGCACCGTATACAGGGGAAAGAGTACGCTCTAGACAGACCC
GCTGGCATCTCGTGCCCACCTGGACCGAATAGCGACCACAGTCATTCACGACTTTGCAAT
>Rosalind_3740
GAGCCGTTCTTTTGATATACTTAGGTCCGTGCGGTCTCCACCCAAGCAACTCCTCTTAAC
GCTATTACTCGGATTGATAGTATATAGGTACCTCTGTGAAGTTTTGCAACGAGCATCTTA
CATGTCCCCAGGTGGACTCAAGTATTGACGAGGTGACCGGCGTCCTAGGATTGGGCCAAT
GACTTAAGGGTATTGTGAACTAAGTTATGCTACCTTTCGATCGAGAGATCCACCCTTGCA
TCAGGCCATTTACGGGAGCAGCGTGTAATATTGGCTAGACTATCTCTAGTTCGAAATCGA
CAGATGGGTGCCCTTAGCTACTGTGTCCAGCCAGCCTAATGGTCGAGGGTTGATACATCT
TGATCGTACGTTCCTCTAGCGGCCAAGAACTCGATCGTGTGGAGAACATGAGCAACCCCA
CAGACATGCTATAGTCCTAGAGCTGATCTTTACCCTGGGAGAAGTCTGTTTTCTCGGGGC
ATGCCGTGTTTGGCTGGGCTATTACGGTGCCCCACACGGTACTTACAGTACAAGAGTTAG
CACTGGTTAAGGGATAAATTATCGATATATTGTCCCCCGAGAGCACTTTCAGAGGACCTG
ACCAAGTAACTTATGGGGCCAAGCACGAATCGGTGTCCGTTCGCCTACGCGTGAACCAGC
CGCAGGAGGTTAACGTTACCTATTCTGCTTAGTCCTCAGTCCGATAGTAGCACCTTTGTG
AGCGCAGGGAAAGTAGAGGCTAGGCCTTCATTGGCGCCCCAACAAGACCAACCCAAGGCG
AATAAAACCGCCCCAGTTCAGAGAAATTCGCGGACGAAACACCACTCGTGAACAACCATA
TCATGAGTAGAGTGGCCACATAGACGGGAACAACAAAAACGTAAGTGAATGGCTGGACTT
>Rosalind_0868
CTCTGAGTGGGCACACACTGCGGTGCCACCGCAGCTAGCAAGCGTGATACCACTATTCTT
CAGTGCTCCCTGCAGATGTGGCATACCTTGCTTTAATTTCGTTTAAGGGCGGCATGGCTT
TAACTGTTCTACATGCGTATATTGATCATCCAATGCCGCGCGTGCACAGTTCAAAATTAG
TCAGTTCCCACCCGACAATCTTCCCAGTCACTTCGATAAAAACGGCAGAGAATTTTGCTG
AGCAGAGTGACCATCAATATGGCTTGCGACTTACTTAAGTTTCCTCCCAGGTTATACATT
AATAGCGTCAGCATGCATTCCAGCATGAAGTTCCCAGATTCGCTCTCGCCTCAACTAAAG
CAGAAGCCACCACCGACCACCGCATGTTGTTTTTGGATAGCTACTATTCACACAGAGAAG
CTGTTTCGATTATTTGTGATTTGCACCGATTGAAGATTCGGCTCGATAGGGACTCTCGGA
CAGACTGTACCGGTTAGGGGATCTTTATTTACTATGTTACTATTATGTCTTCCCTAATAC
GCCTCTGCTAGTAGCTAAGGTTCCAGATTAAAACCCGGAGACGTGCGGTCGTACCGATCG
GCGGCCATCACAATGATCTTATTTAATTACACGTAGGCCATTGTCTTCGTCAATTTGCAG
GGCTTTGACTAGGACACACGAACGGCTTGAGGGGAAACCCGGCAACGTGCGCGAATATTC
TTTAGGCATTTTGGAGTGGTCATTTCAGGTCCTACCCCGAACCTGAAAGCGGGTAGGGGC
GTGGAATGCAGCAAACGATGCTTGAGGTCGCTCAAGCGGGCCCAATGTCAAGGGTTACCT
GCGAGAGGCGGAAGTGCAAAGAACCAGCGAAGGATATTGGCTATTCCCTAGTCATGAGGT
>Rosalind_6517
AGCTATTTGGGGTTTCACAATAGAGTTTCGAGGCTTAAGATAGACACCAGGCATAGACGT
CGGCAATCCTTTTACTTCAATATAGATATTATCCAAATTTTAAGCCACTCTTTCCGGTCA
GTTCCGCATCGGCCACCTCTCCTGGCCGCCACATTAAACGACCCTTTCTGTGGTCTTGGA
CTACCTCGCCTGCCATAGCCTACATACAATTGACAGATCTCGCTATTCCGCAAGTGTTGG
GCTAAACAAGGCAAGGATACTCATCTTCGTGCGCGATGGAAGTTATTCCTCTGTCGATGT
CCCAAGTCTGAATTGGAATGCATCAGACTAGTGCTGTCAGACCGCAGCTGGCTCATATGT
GAATCCATTCTTGAACGAGCGCGTCTATGTCTTCGGACTCCTGGGACTATTTACCCGCCA
AATGAGTACGGTATTGTTGCCGCATCACGCGAACACGTAGTGGGGCAAGTTAGGACATAT
GGGTTCCATCATACGTTTGCGAGGCAGCGGTATGGTATAACTCCAGCTAAGGAAGTCGCC
ACGGTTGCTTCGTCAACGAAGGCTGTGATGGACGCAGTCGTGTAGCAAATACTGACAAAA
CACTGAGTTGGCCACAGAAGCGGCTAAAATTAATCATCGTCTTGAAAATGTCGCCTTGAA
ATTGGTACAGTATGTTATGAGCTCGCACGGGGTTGGAGGATAACGAGTTTAAGTTACCTG
CCACGCAAAACATTGAACTCGAAACTTCGTTTTGAGGAGTATCTTTATCAATCGCGTTGG
GTGATTTATGCTGAGGGTATGAGATAATAATGCGATGAACTAGGAAAGCGGAGTTTCTAT
TGGCAGTATGGTCGCTTTATCGTCCATGTCTAAAATCCTTAGTTAGTGAGTTAAATGCAA
'''
seqs = read_fasta(data)
# matrix
m = []
length = 0
for s in seqs:
r = []
length = len(s.sequence)
for c in s.sequence:
r.append(c)
m.append(r)
a = [0] * length
c = [0] * length
g = [0] * length
t = [0] * length
for row in m:
col = 0
for e in row:
if (e == 'A'):
a[col] += 1
elif (e == 'C'):
c[col] += 1
elif (e == 'G'):
g[col] += 1
else:
t[col] += 1
col += 1
profile = list()
for i in range(length):
if (a[i] >= c[i] and a[i] >= g[i] and a[i] >= t[i]):
profile.append('A')
elif (c[i] >= a[i] and c[i] >= g[i] and c[i] >= t[i]):
profile.append('C')
elif (g[i] >= c[i] and g[i] >= a[i] and g[i] >= t[i]):
profile.append('G')
elif (t[i] >= c[i] and t[i] >= a[i] and t[i] >= a[i]):
profile.append('T')
for e in profile:
sys.stdout.write(e)
print
print 'A:',
for e in a:
print e,
print
print 'C:',
for e in c:
print e,
print
print 'G:',
for e in g:
print e,
print
print 'T:',
for e in t:
print e,
| unlicense | 3,255,450,500,082,523,600 | 45.593074 | 62 | 0.898253 | false | 2.47573 | false | false | false |
thiswind/nn_practice | tensorflow/knn.py | 1 | 1534 | from tensorflow.contrib.learn.python.learn.datasets.mnist import read_data_sets
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
mnist = read_data_sets('data/', one_hot=True)
train_pixels, train_list_values = mnist.train.next_batch(100)
test_pixels, test_list_values = mnist.train.next_batch(10)
train_pixel_tensor = tf.placeholder(dtype=tf.float32, shape=[None, 784])
test_pixels_tensor = tf.placeholder(dtype=tf.float32, shape=[784])
distance = tf.reduce_sum(
tf.abs(
tf.add(
train_pixel_tensor,
tf.negative(test_pixels_tensor)
)
),
reduction_indices=1
)
pred = tf.arg_min(distance, 0)
accuracy = 0
init = tf.global_variables_initializer()
with tf.Session() as session:
session.run(init)
for i in range(len(test_list_values)):
print(test_pixels)
nn_index = session.run(
pred,
feed_dict={
train_pixel_tensor: train_pixels,
test_pixels_tensor: test_pixels[i, :]
}
)
trained_value = train_list_values[nn_index]
true_value = train_list_values[i]
trained_value_number = np.argmax(trained_value)
true_value_number = np.argmax(true_value)
print('test N %s Predicted Class: %s, True Class: %s, %s'
% (i,
trained_value_number,
true_value_number,
trained_value_number == true_value_number
)
)
if trained_value_number == true_value_number:
accuracy += 1.0 / len(test_pixels)
image = np.reshape(train_pixels[nn_index], [28, 28])
plt.imshow(image)
plt.show()
print('Accuracy = %s' % accuracy)
| gpl-3.0 | -165,997,286,173,574,100 | 22.242424 | 79 | 0.67927 | false | 2.878049 | true | false | false |
deepmind/acme | acme/environment_loop_test.py | 1 | 3599 | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the environment loop."""
from typing import Optional
from absl.testing import absltest
from absl.testing import parameterized
from acme import environment_loop
from acme import specs
from acme import types
from acme.testing import fakes
import numpy as np
EPISODE_LENGTH = 10
# Discount specs
F32_2_MIN_0_MAX_1 = specs.BoundedArray(
dtype=np.float32, shape=(2,), minimum=0.0, maximum=1.0)
F32_2x1_MIN_0_MAX_1 = specs.BoundedArray(
dtype=np.float32, shape=(2, 1), minimum=0.0, maximum=1.0)
TREE_MIN_0_MAX_1 = {'a': F32_2_MIN_0_MAX_1, 'b': F32_2x1_MIN_0_MAX_1}
# Reward specs
F32 = specs.Array(dtype=np.float32, shape=())
F32_1x3 = specs.Array(dtype=np.float32, shape=(1, 3))
TREE = {'a': F32, 'b': F32_1x3}
TEST_CASES = (
('scalar_discount_scalar_reward', None, None),
('vector_discount_scalar_reward', F32_2_MIN_0_MAX_1, F32),
('matrix_discount_matrix_reward', F32_2x1_MIN_0_MAX_1, F32_1x3),
('tree_discount_tree_reward', TREE_MIN_0_MAX_1, TREE),
)
class EnvironmentLoopTest(parameterized.TestCase):
@parameterized.named_parameters(*TEST_CASES)
def test_one_episode(self, discount_spec, reward_spec):
_, loop = _parameterized_setup(discount_spec, reward_spec)
result = loop.run_episode()
self.assertIn('episode_length', result)
self.assertEqual(EPISODE_LENGTH, result['episode_length'])
self.assertIn('episode_return', result)
self.assertIn('steps_per_second', result)
@parameterized.named_parameters(*TEST_CASES)
def test_run_episodes(self, discount_spec, reward_spec):
actor, loop = _parameterized_setup(discount_spec, reward_spec)
# Run the loop. There should be EPISODE_LENGTH update calls per episode.
loop.run(num_episodes=10)
self.assertEqual(actor.num_updates, 10 * EPISODE_LENGTH)
@parameterized.named_parameters(*TEST_CASES)
def test_run_steps(self, discount_spec, reward_spec):
actor, loop = _parameterized_setup(discount_spec, reward_spec)
# Run the loop. This will run 2 episodes so that total number of steps is
# at least 15.
loop.run(num_steps=EPISODE_LENGTH + 5)
self.assertEqual(actor.num_updates, 2 * EPISODE_LENGTH)
def _parameterized_setup(discount_spec: Optional[types.NestedSpec] = None,
reward_spec: Optional[types.NestedSpec] = None):
"""Common setup code that, unlike self.setUp, takes arguments.
Args:
discount_spec: None, or a (nested) specs.BoundedArray.
reward_spec: None, or a (nested) specs.Array.
Returns:
environment, actor, loop
"""
env_kwargs = {'episode_length': EPISODE_LENGTH}
if discount_spec:
env_kwargs['discount_spec'] = discount_spec
if reward_spec:
env_kwargs['reward_spec'] = reward_spec
environment = fakes.DiscreteEnvironment(**env_kwargs)
actor = fakes.Actor(specs.make_environment_spec(environment))
loop = environment_loop.EnvironmentLoop(environment, actor)
return actor, loop
if __name__ == '__main__':
absltest.main()
| apache-2.0 | 339,868,367,020,457,540 | 34.284314 | 77 | 0.714365 | false | 3.323176 | true | false | false |
alfredodeza/pacha | pacha/database.py | 1 | 3105 | import sqlite3
import os
from pacha.util import get_db_file, get_db_dir
REPOS_TABLE = """CREATE TABLE IF NOT EXISTS repos(
id integer primary key,
path TEXT,
permissions TEXT,
type TEXT,
timestamp TEXT
)"""
METADATA_TABLE = """CREATE TABLE IF NOT EXISTS metadata(
id integer primary key,
path TEXT,
owner TEXT,
grp TEXT,
permissions INT,
ftype TEXT
)"""
DB_FILE = get_db_file()
DB_DIR = get_db_dir()
class Worker(object):
"""CRUD Database operations"""
def __init__(self, db = DB_FILE):
self.db = db
self.conn = sqlite3.connect(self.db)
self.c = self.conn.cursor()
self.c.execute(REPOS_TABLE)
self.c.execute(METADATA_TABLE)
def is_tracked(self):
repo = [i for i in self.get_repo(DB_DIR)]
if repo:
return True
return False
def closedb(self):
"""Make sure the db is closed"""
self.conn.close()
def insert(self, path=None, permissions=None, type=None, timestamp=None):
"""Puts a new repo in the database and checks if the record
is not already there"""
if not timestamp:
stat = os.lstat(path)
timestamp = int(stat.st_mtime)
values = (path, permissions, type, timestamp, path)
command = 'INSERT INTO repos(path, permissions, type, timestamp) select ?,?,?,? WHERE NOT EXISTS(SELECT 1 FROM repos WHERE path=?)'
self.c.execute(command, values)
self.conn.commit()
def insert_meta(self, path, owner, grp, permissions, ftype):
"""Gets the metadata into the corresponding table"""
values = (path, owner, grp, permissions, ftype, path)
command = 'INSERT INTO metadata(path, owner, grp, permissions, ftype) select ?,?,?,?,? WHERE NOT EXISTS(SELECT 1 FROM metadata WHERE path=?)'
self.c.execute(command, values)
self.conn.commit()
def get_meta(self, path):
"""Gets metadata for a specific file"""
values = (path,)
command = "SELECT * FROM metadata WHERE path = (?)"
return self.c.execute(command, values)
def update_timestamp(self, path, timestamp):
"""Updates the timestamp for a repo that got modified"""
values = (timestamp, path)
command = 'UPDATE repos SET timestamp=? WHERE path=?'
self.c.execute(command, values)
self.conn.commit()
def remove(self, path):
"""Removes a repo from the database"""
values = (path,)
command = "DELETE FROM repos WHERE path = (?)"
self.c.execute(command, values)
self.conn.commit()
def get_repos(self):
"""Gets all the hosts"""
command = "SELECT * FROM repos"
return self.c.execute(command)
def get_repo(self, host):
"""Gets attributes for a specific repo"""
values = (host,)
command = "SELECT * FROM repos WHERE path = (?)"
return self.c.execute(command, values)
| mit | 1,075,039,406,109,860,200 | 28.018692 | 149 | 0.577778 | false | 4.074803 | false | false | false |
Fale/ansible | test/support/network-integration/collections/ansible_collections/vyos/vyos/plugins/module_utils/network/vyos/config/lldp_interfaces/lldp_interfaces.py | 47 | 15951 | #
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The vyos_lldp_interfaces class
It is in this file where the current configuration (as dict)
is compared to the provided configuration (as dict) and the command set
necessary to bring the current configuration to it's desired end-state is
created
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.cfg.base import (
ConfigBase,
)
from ansible_collections.vyos.vyos.plugins.module_utils.network.vyos.facts.facts import (
Facts,
)
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import (
to_list,
dict_diff,
)
from ansible.module_utils.six import iteritems
from ansible_collections.vyos.vyos.plugins.module_utils.network.vyos.utils.utils import (
search_obj_in_list,
search_dict_tv_in_list,
key_value_in_dict,
is_dict_element_present,
)
class Lldp_interfaces(ConfigBase):
"""
The vyos_lldp_interfaces class
"""
gather_subset = [
"!all",
"!min",
]
gather_network_resources = [
"lldp_interfaces",
]
params = ["enable", "location", "name"]
def __init__(self, module):
super(Lldp_interfaces, self).__init__(module)
def get_lldp_interfaces_facts(self):
""" Get the 'facts' (the current configuration)
:rtype: A dictionary
:returns: The current configuration as a dictionary
"""
facts, _warnings = Facts(self._module).get_facts(
self.gather_subset, self.gather_network_resources
)
lldp_interfaces_facts = facts["ansible_network_resources"].get(
"lldp_interfaces"
)
if not lldp_interfaces_facts:
return []
return lldp_interfaces_facts
def execute_module(self):
""" Execute the module
:rtype: A dictionary
:returns: The result from module execution
"""
result = {"changed": False}
commands = list()
warnings = list()
existing_lldp_interfaces_facts = self.get_lldp_interfaces_facts()
commands.extend(self.set_config(existing_lldp_interfaces_facts))
if commands:
if self._module.check_mode:
resp = self._connection.edit_config(commands, commit=False)
else:
resp = self._connection.edit_config(commands)
result["changed"] = True
result["commands"] = commands
if self._module._diff:
result["diff"] = resp["diff"] if result["changed"] else None
changed_lldp_interfaces_facts = self.get_lldp_interfaces_facts()
result["before"] = existing_lldp_interfaces_facts
if result["changed"]:
result["after"] = changed_lldp_interfaces_facts
result["warnings"] = warnings
return result
def set_config(self, existing_lldp_interfaces_facts):
""" Collect the configuration from the args passed to the module,
collect the current configuration (as a dict from facts)
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
want = self._module.params["config"]
have = existing_lldp_interfaces_facts
resp = self.set_state(want, have)
return to_list(resp)
def set_state(self, want, have):
""" Select the appropriate function based on the state provided
:param want: the desired configuration as a dictionary
:param have: the current configuration as a dictionary
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
state = self._module.params["state"]
if state in ("merged", "replaced", "overridden") and not want:
self._module.fail_json(
msg="value of config parameter must not be empty for state {0}".format(
state
)
)
if state == "overridden":
commands.extend(self._state_overridden(want=want, have=have))
elif state == "deleted":
if want:
for item in want:
name = item["name"]
have_item = search_obj_in_list(name, have)
commands.extend(
self._state_deleted(want=None, have=have_item)
)
else:
for have_item in have:
commands.extend(
self._state_deleted(want=None, have=have_item)
)
else:
for want_item in want:
name = want_item["name"]
have_item = search_obj_in_list(name, have)
if state == "merged":
commands.extend(
self._state_merged(want=want_item, have=have_item)
)
else:
commands.extend(
self._state_replaced(want=want_item, have=have_item)
)
return commands
def _state_replaced(self, want, have):
""" The command generator when state is replaced
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
if have:
commands.extend(self._state_deleted(want, have))
commands.extend(self._state_merged(want, have))
return commands
def _state_overridden(self, want, have):
""" The command generator when state is overridden
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
commands = []
for have_item in have:
lldp_name = have_item["name"]
lldp_in_want = search_obj_in_list(lldp_name, want)
if not lldp_in_want:
commands.append(
self._compute_command(have_item["name"], remove=True)
)
for want_item in want:
name = want_item["name"]
lldp_in_have = search_obj_in_list(name, have)
commands.extend(self._state_replaced(want_item, lldp_in_have))
return commands
def _state_merged(self, want, have):
""" The command generator when state is merged
:rtype: A list
:returns: the commands necessary to merge the provided into
the current configuration
"""
commands = []
if have:
commands.extend(self._render_updates(want, have))
else:
commands.extend(self._render_set_commands(want))
return commands
def _state_deleted(self, want, have):
""" The command generator when state is deleted
:rtype: A list
:returns: the commands necessary to remove the current configuration
of the provided objects
"""
commands = []
if want:
params = Lldp_interfaces.params
for attrib in params:
if attrib == "location":
commands.extend(
self._update_location(have["name"], want, have)
)
elif have:
commands.append(self._compute_command(have["name"], remove=True))
return commands
def _render_updates(self, want, have):
commands = []
lldp_name = have["name"]
commands.extend(self._configure_status(lldp_name, want, have))
commands.extend(self._add_location(lldp_name, want, have))
return commands
def _render_set_commands(self, want):
commands = []
have = {}
lldp_name = want["name"]
params = Lldp_interfaces.params
commands.extend(self._add_location(lldp_name, want, have))
for attrib in params:
value = want[attrib]
if value:
if attrib == "location":
commands.extend(self._add_location(lldp_name, want, have))
elif attrib == "enable":
if not value:
commands.append(
self._compute_command(lldp_name, value="disable")
)
else:
commands.append(self._compute_command(lldp_name))
return commands
def _configure_status(self, name, want_item, have_item):
commands = []
if is_dict_element_present(have_item, "enable"):
temp_have_item = False
else:
temp_have_item = True
if want_item["enable"] != temp_have_item:
if want_item["enable"]:
commands.append(
self._compute_command(name, value="disable", remove=True)
)
else:
commands.append(self._compute_command(name, value="disable"))
return commands
def _add_location(self, name, want_item, have_item):
commands = []
have_dict = {}
have_ca = {}
set_cmd = name + " location "
want_location_type = want_item.get("location") or {}
have_location_type = have_item.get("location") or {}
if want_location_type["coordinate_based"]:
want_dict = want_location_type.get("coordinate_based") or {}
if is_dict_element_present(have_location_type, "coordinate_based"):
have_dict = have_location_type.get("coordinate_based") or {}
location_type = "coordinate-based"
updates = dict_diff(have_dict, want_dict)
for key, value in iteritems(updates):
if value:
commands.append(
self._compute_command(
set_cmd + location_type, key, str(value)
)
)
elif want_location_type["civic_based"]:
location_type = "civic-based"
want_dict = want_location_type.get("civic_based") or {}
want_ca = want_dict.get("ca_info") or []
if is_dict_element_present(have_location_type, "civic_based"):
have_dict = have_location_type.get("civic_based") or {}
have_ca = have_dict.get("ca_info") or []
if want_dict["country_code"] != have_dict["country_code"]:
commands.append(
self._compute_command(
set_cmd + location_type,
"country-code",
str(want_dict["country_code"]),
)
)
else:
commands.append(
self._compute_command(
set_cmd + location_type,
"country-code",
str(want_dict["country_code"]),
)
)
commands.extend(self._add_civic_address(name, want_ca, have_ca))
elif want_location_type["elin"]:
location_type = "elin"
if is_dict_element_present(have_location_type, "elin"):
if want_location_type.get("elin") != have_location_type.get(
"elin"
):
commands.append(
self._compute_command(
set_cmd + location_type,
value=str(want_location_type["elin"]),
)
)
else:
commands.append(
self._compute_command(
set_cmd + location_type,
value=str(want_location_type["elin"]),
)
)
return commands
def _update_location(self, name, want_item, have_item):
commands = []
del_cmd = name + " location"
want_location_type = want_item.get("location") or {}
have_location_type = have_item.get("location") or {}
if want_location_type["coordinate_based"]:
want_dict = want_location_type.get("coordinate_based") or {}
if is_dict_element_present(have_location_type, "coordinate_based"):
have_dict = have_location_type.get("coordinate_based") or {}
location_type = "coordinate-based"
for key, value in iteritems(have_dict):
only_in_have = key_value_in_dict(key, value, want_dict)
if not only_in_have:
commands.append(
self._compute_command(
del_cmd + location_type, key, str(value), True
)
)
else:
commands.append(self._compute_command(del_cmd, remove=True))
elif want_location_type["civic_based"]:
want_dict = want_location_type.get("civic_based") or {}
want_ca = want_dict.get("ca_info") or []
if is_dict_element_present(have_location_type, "civic_based"):
have_dict = have_location_type.get("civic_based") or {}
have_ca = have_dict.get("ca_info")
commands.extend(
self._update_civic_address(name, want_ca, have_ca)
)
else:
commands.append(self._compute_command(del_cmd, remove=True))
else:
if is_dict_element_present(have_location_type, "elin"):
if want_location_type.get("elin") != have_location_type.get(
"elin"
):
commands.append(
self._compute_command(del_cmd, remove=True)
)
else:
commands.append(self._compute_command(del_cmd, remove=True))
return commands
def _add_civic_address(self, name, want, have):
commands = []
for item in want:
ca_type = item["ca_type"]
ca_value = item["ca_value"]
obj_in_have = search_dict_tv_in_list(
ca_type, ca_value, have, "ca_type", "ca_value"
)
if not obj_in_have:
commands.append(
self._compute_command(
key=name + " location civic-based ca-type",
attrib=str(ca_type) + " ca-value",
value=ca_value,
)
)
return commands
def _update_civic_address(self, name, want, have):
commands = []
for item in have:
ca_type = item["ca_type"]
ca_value = item["ca_value"]
in_want = search_dict_tv_in_list(
ca_type, ca_value, want, "ca_type", "ca_value"
)
if not in_want:
commands.append(
self._compute_command(
name,
"location civic-based ca-type",
str(ca_type),
remove=True,
)
)
return commands
def _compute_command(self, key, attrib=None, value=None, remove=False):
if remove:
cmd = "delete service lldp interface "
else:
cmd = "set service lldp interface "
cmd += key
if attrib:
cmd += " " + attrib
if value:
cmd += " '" + value + "'"
return cmd
| gpl-3.0 | -6,106,193,386,870,510,000 | 35.417808 | 96 | 0.519967 | false | 4.470572 | true | false | false |
mfherbst/spack | var/spack/repos/builtin/packages/votca-tools/package.py | 2 | 2195 | ##############################################################################
# Copyright (c) 2017-2018, The VOTCA Development Team (http://www.votca.org)
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class VotcaTools(CMakePackage):
"""Versatile Object-oriented Toolkit for Coarse-graining
Applications (VOTCA) is a package intended to reduce the amount of
routine work when doing systematic coarse-graining of various
systems. The core is written in C++.
This package contains the basic tools library of VOTCA.
"""
homepage = "http://www.votca.org"
url = "https://github.com/votca/tools/tarball/v1.4"
git = "https://github.com/votca/tools.git"
version('develop', branch='master')
version('1.4', 'cd47868e9f28e2c7b9d01f95aa0185ca')
version('1.4.1', '3176b72f8a41ec053cc740a5398e7dc4')
depends_on("[email protected]:", type='build')
depends_on("expat")
depends_on("fftw")
depends_on("gsl", when="@:1.4.9999")
depends_on("[email protected]:", when="@1.5:")
depends_on("boost")
depends_on("sqlite")
def cmake_args(self):
args = [
'-DWITH_RC_FILES=OFF'
]
return args
| lgpl-2.1 | 4,780,819,878,848,649,000 | 38.196429 | 78 | 0.64738 | false | 3.658333 | false | false | false |
HoldYourBreath/Library | tests/books/example_books.py | 1 | 1761 | import copy
book1 = {'isbn': 1234,
'title': 'The book',
'authors': ['Bob Author', 'Helio Author'],
'pages': 500,
'format': 'Slippery back',
'publisher': 'Crazy dude publishing',
'publication_date': '1820 01 02',
'description': 'a book',
'thumbnail': 'a thumbnail'}
book2 = {'isbn': 1235,
'title': 'Great book',
'authors': ['Jane Author'],
'pages': 123,
'room_id': 2,
'format': 'Sturdy thing',
'publisher': 'Sane gal publishing',
'publication_date': '2016 12 31',
'description': 'Another book',
'thumbnail': 'another thumbnail'}
book3 = {'isbn': 1236,
'title': 'Great Songs',
'authors': ['Jane Author'],
'pages': 100,
'format': 'Sturdy thing',
'publisher': 'Sane gal publishing',
'publication_date': '2000 01 01',
'description':
'A very nice book about songs! All the best artists',
'thumbnail': 'another thumbnail'}
book4 = {'isbn': 1237,
'title': 'Great Poems',
'authors': ['Jane Author'],
'pages': 3,
'format': 'Sturdy thing',
'publisher': 'Sane gal publishing',
'publication_date': '1999 12 31',
'description':
'A very nice book about poems! All the best poets',
'thumbnail': 'another thumbnail'}
def get_book(book, book_id=1, room_id=1):
temp_book = copy.copy(book)
temp_book['book_id'] = book_id
temp_book['room_id'] = room_id
temp_book['loaned'] = False
return temp_book
def get_descriptor(book, num_copies=1):
temp_book = copy.copy(book)
temp_book['number_of_copies'] = num_copies
return temp_book
| mit | -1,254,717,766,290,441,200 | 28.847458 | 62 | 0.53833 | false | 3.500994 | false | false | false |
playpauseandstop/rororo | examples/hobotnica/src/hobotnica/views.py | 1 | 2868 | import uuid
from aiohttp import web
from rororo import openapi_context, OperationTableDef
from rororo.openapi.exceptions import ObjectDoesNotExist
from .data import ENVIRONMENT_VARS, GITHUB_REPOSITORIES
from .decorators import login_required
operations = OperationTableDef()
@operations.register
@login_required
async def create_repository(request: web.Request) -> web.Response:
with openapi_context(request) as context:
return web.json_response(
{
**context.data,
"uid": str(uuid.uuid4()),
"jobs": ["test", "deploy"],
"status": "cloning",
},
status=201,
)
@operations.register
async def list_all_references(request: web.Request) -> web.Response:
return web.json_response({"default_env": {"CI": "1", "HOBOTNICA": "1"}})
@operations.register
@login_required
async def list_favorites_repositories(request: web.Request) -> web.Response:
with openapi_context(request) as context:
return web.json_response(
status=204, headers={"X-Order": context.parameters.query["order"]}
)
@operations.register
@login_required
async def list_owner_repositories(request: web.Request) -> web.Response:
with openapi_context(request) as context:
username = context.security["basic"].login
return web.json_response(
list((GITHUB_REPOSITORIES.get(username) or {}).values())
)
@operations.register
@login_required
async def list_repositories(request: web.Request) -> web.Response:
with openapi_context(request) as context:
username = context.parameters.header["X-GitHub-Username"]
return web.json_response(
list((GITHUB_REPOSITORIES.get(username) or {}).values())
)
@operations.register
@login_required
async def retrieve_owner_env(request: web.Request) -> web.Response:
with openapi_context(request) as context:
owner = context.parameters.path["owner"]
return web.json_response(ENVIRONMENT_VARS.get(owner) or {})
@operations.register
@login_required
async def retrieve_repository(request: web.Request) -> web.Response:
with openapi_context(request) as context:
owner = context.parameters.path["owner"]
repository = (GITHUB_REPOSITORIES.get(owner) or {}).get(
context.parameters.path["name"]
)
if not repository:
raise ObjectDoesNotExist("Repository")
return web.json_response(repository)
@operations.register
@login_required
async def retrieve_repository_env(request: web.Request) -> web.Response:
with openapi_context(request) as context:
owner = context.parameters.path["owner"]
name = context.parameters.path["name"]
env_key = f"{owner}/{name}"
return web.json_response(ENVIRONMENT_VARS.get(env_key) or {})
| bsd-3-clause | 3,272,009,733,374,445,000 | 29.510638 | 78 | 0.670153 | false | 3.896739 | false | false | false |
dscrobonia/sawyer | analyzers/response_size_centroidmedian.py | 1 | 3343 | import json
import logging
import matplotlib.pyplot as plt
import numpy as np
import scipy.cluster.hierarchy as hac
log = logging.getLogger(__name__)
def analyze(data):
# Convert this to python data for us to be able to run ML algorithms
json_to_python = json.loads(data)
per_size = dict() # IP-Response size
hostlist = dict()
# Data pre-processing here:
for y in json_to_python:
hostlist[y['HOST']] = 1
if y['HOST'] in per_size:
per_size[y['HOST']].append(int(y['SIZE']))
else:
per_size[y['HOST']] = [int(y['SIZE'])]
##Data pre-processing ends here
log.debug(
"*** Printing Input to analysis - 4 (1): K-means on IP and average response size ****"
)
#####*****SIZE******####
#### Analysis #4 (1): IP address - Size of response received feature
X = np.array([[0.00, 0.00]])
for x in hostlist:
avg_size = mean(per_size[x])
log.debug(x + ": " + str(avg_size))
y = x.split(".")
ip = ""
for z in range(4):
l = len(y[z])
l = 3 - l
if (l > 0):
zero = ""
for t in range(3 - len(y[z])):
zero = zero + "0"
y[z] = zero + y[z]
ip = ip + y[z]
# log.debug( str(float(float(ip)/1000)) + ": " + str(avg_size))
le = [float(float(ip) / 1000), avg_size]
X = np.vstack([X, le])
log.info(
"******** Printing Analysis #4: IP-Address and Response Size received: Centroid and Median Hierarchical Clustering ********\nCheck 'test-centroid-median.png' for more info!"
)
# print kmeans.labels_
### Analysis 4 (9): ###### CENTROID AND MEDIAN HAC*****#########
fig, axes23 = plt.subplots(2, 3)
for method, axes in zip(['centroid', 'median'], axes23):
z = hac.linkage(X, method=method)
# Plotting
axes[0].plot(range(1, len(z) + 1), z[::-1, 2])
knee = np.diff(z[::-1, 2], 2)
axes[0].plot(range(2, len(z)), knee)
num_clust1 = knee.argmax() + 2
knee[knee.argmax()] = 0
num_clust2 = knee.argmax() + 2
axes[0].text(num_clust1, z[::-1, 2][num_clust1 - 1],
'possible\n<- knee point')
part1 = hac.fcluster(z, num_clust1, 'maxclust')
part2 = hac.fcluster(z, num_clust2, 'maxclust')
clr = [
'#2200CC', '#D9007E', '#FF6600', '#FFCC00', '#ACE600', '#0099CC',
'#8900CC', '#FF0000', '#FF9900', '#FFFF00', '#00CC01', '#0055CC'
]
for part, ax in zip([part1, part2], axes[1:]):
for cluster in set(part):
ax.scatter(
X[part == cluster, 0],
X[part == cluster, 1],
color=clr[cluster % 10])
m = '\n(method: {})'.format(method)
plt.setp(
axes[0],
title='Screeplot{}'.format(m),
xlabel='partition',
ylabel='{}\ncluster distance'.format(m))
plt.setp(axes[1], title='{} Clusters'.format(num_clust1))
plt.setp(axes[2], title='{} Clusters'.format(num_clust2))
plt.tight_layout()
##plt.show()
plt.savefig('test-centroid-median.png')
def mean(numbers):
return float(sum(numbers)) / max(len(numbers), 1)
| mit | 6,647,522,791,595,655,000 | 27.57265 | 185 | 0.504038 | false | 3.296844 | false | false | false |
JoseSalgado1024/ckanext-needupdate | ckanext/needupdate/plugin.py | 1 | 2104 | # encoding: utf-8
"""
Implementación del Plugin NeedUpdate.
"""
import ckan.plugins as plugins
import ckan.plugins.toolkit as toolkit
import logging
from controller import NeedupdateController
# logs
logger = logging.getLogger(__name__)
def get_plugins_list():
"""
Retorna la lista de plugins que posee la plataforma.
Args:
- None.
Returns:
- list()
"""
c = NeedupdateController()
return c.get_list_of_repos()
class NeedupdatePlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IConfigurer)
plugins.implements(plugins.interfaces.IRoutes, inherit=True)
plugins.implements(plugins.IResourceView, inherit=True)
plugins.implements(plugins.ITemplateHelpers)
def info(self):
return {'name': 'NeedUpdate',
'title': 'NU',
'icon': 'file-text',
'default_title': 'NU',
}
def update_config(self, config_):
toolkit.add_ckan_admin_tab(config_, 'ext_status_dashboard', 'My Plugins')
toolkit.add_template_directory(config_, 'templates')
toolkit.add_public_directory(config_, 'public')
toolkit.add_resource('fanstatic', 'needupdate')
def before_map(self, m):
return m
def after_map(self, m):
m.connect('ext_status_api',
'/ext_status.json',
controller='ckanext.needupdate.plugin:NeedupdateController',
action='ext_status')
m.connect('ext_status_dashboard',
'/my_extensions',
controller='ckanext.needupdate.plugin:NeedupdateController',
action='dashboard_ext')
return m
def get_helpers(self):
"""
Registrar el helper "get_plugins_list()"
Returns:
- list(). Lista de plugins instalados.
"""
# Template helper function names should begin with the name of the
# extension they belong to, to avoid clashing with functions from
# other extensions.
return {'needupdate_get_plugins_list': get_plugins_list}
| mit | -2,441,623,942,506,500,000 | 28.208333 | 81 | 0.61436 | false | 4.099415 | true | false | false |
jamesstott/layers_by_field | layersbyfielddialogbase.py | 1 | 3849 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'layersbyfielddialogbase.ui'
#
# Created: Fri Mar 28 11:23:31 2014
# by: PyQt4 UI code generator 4.8.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_LayersByFieldDialog(object):
def setupUi(self, LayersByFieldDialog):
LayersByFieldDialog.setObjectName(_fromUtf8("LayersByFieldDialog"))
LayersByFieldDialog.resize(307, 232)
LayersByFieldDialog.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.verticalLayout = QtGui.QVBoxLayout(LayersByFieldDialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(LayersByFieldDialog)
self.label.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
self.inputLayerCombo = QtGui.QComboBox(LayersByFieldDialog)
self.inputLayerCombo.setObjectName(_fromUtf8("inputLayerCombo"))
self.verticalLayout.addWidget(self.inputLayerCombo)
self.label_2 = QtGui.QLabel(LayersByFieldDialog)
self.label_2.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.verticalLayout.addWidget(self.label_2)
self.splitFieldCombo = QtGui.QComboBox(LayersByFieldDialog)
self.splitFieldCombo.setObjectName(_fromUtf8("splitFieldCombo"))
self.verticalLayout.addWidget(self.splitFieldCombo)
self.label_3 = QtGui.QLabel(LayersByFieldDialog)
self.label_3.setEnabled(False)
self.label_3.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.label_3.setVisible(False)
self.verticalLayout.addWidget(self.label_3)
self.progressBar = QtGui.QProgressBar(LayersByFieldDialog)
self.progressBar.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.progressBar.setProperty(_fromUtf8("value"), 0)
self.progressBar.setObjectName(_fromUtf8("progressBar"))
self.verticalLayout.addWidget(self.progressBar)
self.buttonBox = QtGui.QDialogButtonBox(LayersByFieldDialog)
self.buttonBox.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedKingdom))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Close|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(LayersByFieldDialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), LayersByFieldDialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), LayersByFieldDialog.reject)
QtCore.QMetaObject.connectSlotsByName(LayersByFieldDialog)
def retranslateUi(self, LayersByFieldDialog):
LayersByFieldDialog.setWindowTitle(QtGui.QApplication.translate("LayersByFieldDialog", "Layers from field", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("LayersByFieldDialog", "Input layer", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("LayersByFieldDialog", "Split by field", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("LayersByFieldDialog", "Save to", None, QtGui.QApplication.UnicodeUTF8))
| gpl-2.0 | -1,596,834,043,230,696,200 | 57.318182 | 154 | 0.747986 | false | 3.758789 | false | false | false |
regnart-tech-club/programming-concepts | course-2:combining-building-blocks/subject-2:functions/topic-3:Functions calling themselves/lesson-3.1:Fibonacci solution.py | 1 | 1098 | # The Fibonacci sequence starts with 0 and 1. Subsequent terms are then gotten by adding the previous two
# such that the first seven terms are: 0, 1, 1, 2, 3, 5, 8.
# Using recursion, write a function that, given an integer n, returns the nth Fibonacci number.
# For example:
# given n = 0, the function should return 0
# if n = 1, it should return 1
# if n = 2, it should return 1
# if n = 4, it should return 3
# if n = 8, it should return 21
# Be sure to write automated tests for your solution.
# Hint:
# fibonacci(0) = 0
# fibonacci(1) = 1
# fibonacci(n) = fibonacci(n - 1) + fibonacci(n - 2)
def fibonacci(n):
if n == 0 or n == 1:
return n
else:
return fibonacci(n - 2) + fibonacci(n - 1)
def test_fibonacci_equals(arg, expected):
observed = fibonacci(arg)
if observed == expected:
print('Thumbs up.')
else:
print('Thumbs down. Expected %i but got %i' % (expected, observed))
test_fibonacci_equals(0, 0)
test_fibonacci_equals(1, 1)
test_fibonacci_equals(2, 1)
test_fibonacci_equals(3, 2)
test_fibonacci_equals(4, 3)
test_fibonacci_equals(5, 5)
test_fibonacci_equals(6, 8)
| apache-2.0 | 2,524,405,582,817,933,000 | 28.675676 | 105 | 0.688525 | false | 2.524138 | true | false | false |
CorrosiveKid/django_project_template | project_template/settings/common.py | 1 | 2909 | """
Django settings for project_template project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '<SOME-SECRET-KEY>'
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'project_template.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project_template.wsgi.application'
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.'
'password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.'
'password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.'
'password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.'
'password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| mit | -2,922,128,141,967,905,000 | 25.688073 | 78 | 0.678927 | false | 3.753548 | false | false | false |
openstack/networking-l2gw | networking_l2gw/extensions/l2gatewayconnection.py | 1 | 3854 | # Copyright 2015 OpenStack Foundation
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from neutron_lib.api import extensions
from neutron.api.v2 import resource_helper
from networking_l2gw.services.l2gateway.common import constants
RESOURCE_ATTRIBUTE_MAP = {
constants.L2_GATEWAYS_CONNECTION: {
'id': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'l2_gateway_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'network_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'is_visible': True},
'segmentation_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True}
},
}
class L2gatewayconnection(extensions.ExtensionDescriptor):
"""API extension for Layer-2 Gateway connection support."""
@classmethod
def get_name(cls):
return "L2 Gateway connection"
@classmethod
def get_alias(cls):
return "l2-gateway-connection"
@classmethod
def get_description(cls):
return "Connects Neutron networks with external networks at layer 2."
@classmethod
def get_updated(cls):
return "2014-01-01T00:00:00-00:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
mem_actions = {}
plural_mappings = resource_helper.build_plural_mappings(
{}, RESOURCE_ATTRIBUTE_MAP)
resources = resource_helper.build_resource_info(plural_mappings,
RESOURCE_ATTRIBUTE_MAP,
constants.L2GW,
action_map=mem_actions,
register_quota=True,
translate_name=True)
return resources
def get_extended_resources(self, version):
if version == "2.0":
return RESOURCE_ATTRIBUTE_MAP
else:
return {}
class L2GatewayConnectionPluginBase(object):
@abc.abstractmethod
def delete_l2_gateway_connection(self, context, l2_gateway_id,
network_mapping_list):
pass
@abc.abstractmethod
def create_l2_gateway_connection(self, context, l2_gateway_id,
network_mapping_list):
pass
@abc.abstractmethod
def get_l2_gateway_connections(self, context, filters=None,
fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
pass
@abc.abstractmethod
def get_l2_gateway_connection(self, context, id, fields=None):
pass
| apache-2.0 | 4,207,975,088,911,122,000 | 34.036364 | 79 | 0.557602 | false | 4.539458 | false | false | false |
glob3mobile/qgis-g3m | QGIS-Plugin/webappbuilder/ext-libs/geoserver/layergroup.py | 1 | 3499 | from geoserver.support import ResourceInfo, bbox, write_bbox, \
write_string, xml_property, url
def _maybe_text(n):
if n is None:
return None
else:
return n.text
def _layer_list(node, element):
if node is not None:
return [_maybe_text(n.find("name")) for n in node.findall(element)]
def _style_list(node):
if node is not None:
return [_maybe_text(n.find("name")) for n in node.findall("style")]
def _write_layers(builder, layers, parent, element, attributes):
builder.start(parent, dict())
for l in layers:
builder.start(element, attributes or dict())
if l is not None:
builder.start("name", dict())
builder.data(l)
builder.end("name")
builder.end(element)
builder.end(parent)
def _write_styles(builder, styles):
builder.start("styles", dict())
for s in styles:
builder.start("style", dict())
if s is not None:
builder.start("name", dict())
builder.data(s)
builder.end("name")
builder.end("style")
builder.end("styles")
class LayerGroup(ResourceInfo):
"""
Represents a layer group in geoserver
"""
resource_type = "layerGroup"
save_method = "PUT"
def __init__(self, catalog, name):
super(LayerGroup, self).__init__()
assert isinstance(name, basestring)
self.catalog = catalog
self.name = name
# the XML format changed in 2.3.x - the element listing all the layers
# and the entries themselves have changed
if self.catalog.gsversion() == "2.2.x":
parent, element, attributes = "layers", "layer", None
else:
parent, element, attributes = "publishables", "published", {'type':'layer'}
self._layer_parent = parent
self._layer_element = element
self._layer_attributes = attributes
self.writers = dict(
name = write_string("name"),
styles = _write_styles,
layers = lambda b,l: _write_layers(b, l, parent, element, attributes),
bounds = write_bbox("bounds")
)
@property
def href(self):
return url(self.catalog.service_url, ["layergroups", self.name + ".xml"])
styles = xml_property("styles", _style_list)
bounds = xml_property("bounds", bbox)
def _layers_getter(self):
if "layers" in self.dirty:
return self.dirty["layers"]
else:
if self.dom is None:
self.fetch()
node = self.dom.find(self._layer_parent)
return _layer_list(node, self._layer_element) if node is not None else None
def _layers_setter(self, value):
self.dirty["layers"] = value
def _layers_delete(self):
self.dirty["layers"] = None
layers = property(_layers_getter, _layers_setter, _layers_delete)
def __str__(self):
return "<LayerGroup %s>" % self.name
__repr__ = __str__
class UnsavedLayerGroup(LayerGroup):
save_method = "POST"
def __init__(self, catalog, name, layers, styles, bounds):
super(UnsavedLayerGroup, self).__init__(catalog, name)
bounds = bounds if bounds is not None else ("-180","180","-90","90","EPSG:4326")
self.dirty.update(name = name, layers = layers, styles = styles, bounds = bounds)
@property
def href(self):
return "%s/layergroups?name=%s" % (self.catalog.service_url, self.name)
| epl-1.0 | -2,319,838,451,695,568,000 | 30.809091 | 89 | 0.587311 | false | 3.892102 | false | false | false |
maciejkula/glove-python | glove/metrics/accuracy.py | 5 | 3242 | try:
from itertools import izip
except ImportError:
izip = zip
import numpy as np
from .accuracy_cython import compute_rank_violations
def read_analogy_file(filename):
"""
Read the analogy task test set from a file.
"""
section = None
with open(filename, 'r') as questions_file:
for line in questions_file:
if line.startswith(':'):
section = line[2:].replace('\n', '')
continue
else:
words = line.replace('\n', '').split(' ')
yield section, words
def construct_analogy_test_set(test_examples, dictionary, ignore_missing=False):
"""
Construct the analogy test set by mapping the words to their
word vector ids.
Arguments:
- test_examples: iterable of 4-word iterables
- dictionay: a mapping from words to ids
- boolean ignore_missing: if True, words in the test set
that are not in the dictionary
will be dropeed.
Returns:
- a N by 4 numpy matrix.
"""
test = []
for example in test_examples:
try:
test.append([dictionary[word] for word in example])
except KeyError:
if ignore_missing:
pass
else:
raise
try:
test = np.array(test, dtype=np.int32)
except ValueError as e:
# This should use raise ... from ... in Python 3.
raise ValueError('Each row of the test set should contain '
'4 integer word ids', e)
return test
def analogy_rank_score(analogies, word_vectors, no_threads=1):
"""
Calculate the analogy rank score for the given set of analogies.
A rank of zero denotes a perfect score; with random word vectors
we would expect a rank of 0.5.
Arguments:
- analogies: a numpy array holding the ids of the words in the analogy tasks,
as constructed by `construct_analogy_test_set`.
- word_vectors: numpy array holding the word vectors to use.
- num_threads: number of parallel threads to use in the calculation.
Returns:
- ranks: a numpy array holding the normalized rank of the target word
in each analogy task. Rank 0 means that the target words was
returned first; rank 1 means it was returned last.
"""
# The mean of the vectors for the
# second, third, and the negative of
# the first word.
input_vectors = (word_vectors[analogies[:, 1]]
+ word_vectors[analogies[:, 2]]
- word_vectors[analogies[:, 0]])
word_vector_norms = np.linalg.norm(word_vectors,
axis=1)
# Pre-allocate the array storing the rank violations
rank_violations = np.zeros(input_vectors.shape[0], dtype=np.int32)
compute_rank_violations(word_vectors,
word_vector_norms,
input_vectors,
analogies[:, 3],
analogies,
rank_violations,
no_threads)
return rank_violations / float(word_vectors.shape[0])
| apache-2.0 | -8,236,008,879,430,348,000 | 29.87619 | 81 | 0.570635 | false | 4.459422 | true | false | false |
sean-tan-columbia/aerospike-async-client | aerospike_client.py | 1 | 9857 |
# Copyright (c) 2015 Jinxiong Tan
# GNU General public licence
import aerospike as aero
import multiprocessing
import time
import datetime
import sys
# Usage example:
# records = [Record('key_1', {'bin': 'value_1'}), Record('key_2', {'bin': 'value_2'}), Record('key_3', {'bin': 'value_3'})]
# aerospike_client = aerospike_client.AsyncClient([(host_1:port_1), (host_2:port_2)], 'namespace', 'set', 604800)
# success_count, failure_records = aerospike_client.put(records)
class Record():
def __init__(self, key, bins):
"""
:param key: Aerospike key, should be a string
:param bins: Aerospike bins, should be a dictionary
:return: None
"""
if type(bins) is dict:
self.key = key
self.bins = bins
else:
raise TypeError('Wrong types for bins')
class Client(object):
def __init__(self, cluster, namespace, set_name, ttl, retry_limit, logger):
self._cluster = cluster
self._namespace = namespace
self._set_name = set_name
self._ttl = ttl
self._retry_limit = retry_limit
self._logger = logger
def put(self, records):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
def close(self):
raise NotImplementedError
def _log(self, content):
log = '{timestamp}: {content}'.format(timestamp=datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S'), content=content)
if self._logger is None:
print log
else:
self._logger.logging(log)
class SyncClient(Client):
def __init__(self, cluster, namespace, set_name, ttl, retry_limit=3, logger=None):
"""
:param cluster: Aerospike cluster, should have the following format, [(host_1: port_1), (host_2: port_2), ..., (host_n: port_n)]
:param namespace: Aerospike namespace
:param set_name: Aerospike set
:param ttl: time to live for records
:return: None
"""
super(SyncClient, self).__init__(cluster, namespace, set_name, ttl, retry_limit, logger)
self.aerospike_dao = []
for node in cluster:
self.aerospike_dao.append(_AerospikeDao(node, namespace, set_name, ttl))
def put(self, records):
failure_records = []
total = len(records)
put_count = 0
self._log('Loading records to {0}'.format(self._cluster))
for record in records:
if not isinstance(record, Record):
raise Exception('Wrong type for aerospike object')
if put_count % 1000 == 0 and put_count > 0:
self._log('Finished {0}%'.format(int(float(put_count)/total*100)))
for aerospike_dao in self.aerospike_dao:
for attempt in xrange(1 + self._retry_limit):
try:
aerospike_dao.put(record.key, record.bins)
except Exception as e:
print e
else:
break
else:
failure_records.append(record.key)
put_count += 1
self._log('Finished 100%')
return len(records) - len(failure_records), failure_records
def get(self, key):
try:
bins = self.aerospike_dao[0].get(key)
except Exception as e:
print e
return None
else:
return bins
def close(self):
for aerospike_dao in self.aerospike_dao:
aerospike_dao.close()
class AsyncClient(Client):
def __init__(self, cluster, namespace, set_name, ttl, retry_limit=3, logger=None, pool_size=4, queue_size=256):
"""
:param cluster: Aerospike cluster, should have the following format, [(host_1: port_1), (host_2: port_2), ..., (host_n: port_n)]
:param namespace: Aerospike namespace
:param set_name: Aerospike set
:param ttl: time to live for records
:param retry_limit: limit for retrying times for failure records
:param pool_size: number of processes to load records
:param queue_size: the maximum capacity of blocking queue, by default it is set to 256
:return: None
"""
super(AsyncClient, self).__init__(cluster, namespace, set_name, ttl, retry_limit, logger)
self._pool_size = pool_size
self._queue_size = queue_size
self._task_queue = multiprocessing.JoinableQueue()
self._failure_queue = multiprocessing.Queue()
def put(self, records):
"""
:param records: Record object collection
:return: success record count and collection of failure records (after retries)
"""
processors = [_Processor(self._cluster, self._namespace, self._set_name, self._ttl, self._task_queue, self._failure_queue, self._retry_limit) for i in xrange(self._pool_size)]
for processor in processors:
processor.start()
total = len(records)
put_count = 0
self._log('Loading records to {0}'.format(self._cluster))
for record in records:
while True:
if self._task_queue.qsize() < self._queue_size:
break
time.sleep(0.1)
if not isinstance(record, Record):
raise Exception('Wrong type for aerospike object')
if put_count % 1000 == 0 and put_count > 0:
self._log('Finished {0}%'.format(int(float(put_count)/total*100)))
for node_index in xrange(len(self._cluster)):
self._task_queue.put(_Put(node_index, record))
put_count += 1
self._log('Finished 100%')
self._task_queue.join()
for i in xrange(self._pool_size):
self._task_queue.put(None)
self._failure_queue.put(None)
failure_records = []
while True:
failure_record = self._failure_queue.get()
if failure_record is not None:
failure_records.append(failure_record)
else:
break
for processor in processors:
processor.join()
processor.terminate()
return len(records) * len(self._cluster) - len(failure_records), failure_records
def get(self, key):
pass
def close(self):
pass
class _Processor(multiprocessing.Process):
def __init__(self, cluster, namespace, set_name, ttl, task_queue, failure_queue, retry_limit):
"""
:param task_queue: process-shared queue to contain tasks
:param failure_queue: process-shared queue to contain failure records after retries
:return: None
"""
super(_Processor, self).__init__()
self._task_queue = task_queue
self._failure_queue = failure_queue
self._retry_limit = retry_limit
self.aerospike_dao = []
for node in cluster:
self.aerospike_dao.append(_AerospikeDao(node, namespace, set_name, ttl))
def run(self):
while True:
next_task = self._task_queue.get()
if next_task is None:
self._task_queue.task_done()
break
result = next_task(self)
if (not result) and next_task.retry_counter < self._retry_limit:
next_task.retry()
self._task_queue.put(next_task)
elif not result:
self._failure_queue.put(next_task.record.key)
# task_done() should be called after appending records to failure queue since processes should be blocked until all failure records are captured
self._task_queue.task_done()
return
def close(self):
for dao in self.aerospike_dao:
dao.close()
def __del__(self):
self.close()
class _Put():
def __init__(self, dao_index, record):
"""
:param dao_index: unique index for each node's aerospike-dao
:param record: record to put
:return: None
"""
self.dao_index = dao_index
self.record = record
self.retry_counter = 0
def retry(self):
self.retry_counter += 1
def __call__(self, processor):
return processor.aerospike_dao[self.dao_index].put(self.record.key, self.record.bins)
def __str__(self):
return 'key={key},bins={bins}'.format(key=self.record.key, bins=self.record.bins)
class _AerospikeDao():
def __init__(self, host, namespace, set_name, ttl):
"""
:param host:
:param namespace:
:param set_name:
:param ttl:
:return:
"""
self._namespace = namespace
self._set_name = set_name
self._ttl = ttl
for attempt in xrange(3):
try:
self._aerospike_client = aero.client({'hosts': [host]}).connect()
except Exception as e:
print e
else:
break
else:
raise Exception('[Error] 3 failed attempts for connecting to {host}'.format(host=host))
def put(self, key, bins):
"""
:param key:
:param bins:
:return:
"""
try:
self._aerospike_client.put((self._namespace, self._set_name, key), bins, meta={'ttl': self._ttl})
except Exception as e:
print e
return False
else:
return True
def get(self, key):
"""
:param key:
:return:
"""
try:
(key, meta, bins) = self._aerospike_client.get((self._namespace, self._set_name, key))
except Exception as e:
print e
return None
else:
return bins
def close(self):
self._aerospike_client.close()
| gpl-2.0 | 2,146,844,727,399,308,500 | 31.639073 | 183 | 0.563153 | false | 4.07651 | false | false | false |
emilio/servo | components/style/properties/build.py | 4 | 4779 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
import json
import os.path
import re
import sys
BASE = os.path.dirname(__file__.replace('\\', '/'))
sys.path.insert(0, os.path.join(BASE, "Mako-0.9.1.zip"))
sys.path.insert(0, BASE) # For importing `data.py`
from mako import exceptions
from mako.lookup import TemplateLookup
from mako.template import Template
import data
RE_PYTHON_ADDR = re.compile(r'<.+? object at 0x[0-9a-fA-F]+>')
OUT_DIR = os.environ.get("OUT_DIR", "")
STYLE_STRUCT_LIST = [
"background",
"border",
"box",
"column",
"counters",
"effects",
"font",
"inherited_box",
"inherited_table",
"inherited_text",
"inherited_ui",
"inherited_svg",
"list",
"margin",
"outline",
"padding",
"position",
"table",
"text",
"ui",
"svg",
"xul",
]
def main():
usage = ("Usage: %s [ servo | gecko ] [ style-crate | geckolib <template> | html ]" %
sys.argv[0])
if len(sys.argv) < 3:
abort(usage)
product = sys.argv[1]
output = sys.argv[2]
if product not in ["servo", "gecko"] or output not in ["style-crate", "geckolib", "html"]:
abort(usage)
properties = data.PropertiesData(product=product)
files = {}
for kind in ["longhands", "shorthands"]:
files[kind] = {}
for struct in STYLE_STRUCT_LIST:
file_name = os.path.join(BASE, kind, "{}.mako.rs".format(struct))
if kind == "shorthands" and not os.path.exists(file_name):
files[kind][struct] = ""
continue
files[kind][struct] = render(
file_name,
product=product,
data=properties,
)
properties_template = os.path.join(BASE, "properties.mako.rs")
files["properties"] = render(
properties_template,
product=product,
data=properties,
__file__=properties_template,
OUT_DIR=OUT_DIR,
)
if output == "style-crate":
write(OUT_DIR, "properties.rs", files["properties"])
for kind in ["longhands", "shorthands"]:
for struct in files[kind]:
write(
os.path.join(OUT_DIR, kind),
"{}.rs".format(struct),
files[kind][struct],
)
if product == "gecko":
template = os.path.join(BASE, "gecko.mako.rs")
rust = render(template, data=properties)
write(OUT_DIR, "gecko_properties.rs", rust)
elif output == "geckolib":
if len(sys.argv) < 4:
abort(usage)
template = sys.argv[3]
header = render(template, data=properties)
sys.stdout.write(header)
elif output == "html":
write_html(properties)
def abort(message):
sys.stderr.write(message + b"\n")
sys.exit(1)
def render(filename, **context):
try:
lookup = TemplateLookup(directories=[BASE],
input_encoding="utf8",
strict_undefined=True)
template = Template(open(filename, "rb").read(),
filename=filename,
input_encoding="utf8",
lookup=lookup,
strict_undefined=True)
# Uncomment to debug generated Python code:
# write("/tmp", "mako_%s.py" % os.path.basename(filename), template.code)
return template.render(**context).encode("utf8")
except Exception:
# Uncomment to see a traceback in generated Python code:
# raise
abort(exceptions.text_error_template().render().encode("utf8"))
def write(directory, filename, content):
if not os.path.exists(directory):
os.makedirs(directory)
full_path = os.path.join(directory, filename)
open(full_path, "wb").write(content)
python_addr = RE_PYTHON_ADDR.search(content)
if python_addr:
abort("Found \"{}\" in {} ({})".format(python_addr.group(0), filename, full_path))
def write_html(properties):
properties = dict(
(p.name, {
"flag": p.servo_pref,
"shorthand": hasattr(p, "sub_properties")
})
for p in properties.longhands + properties.shorthands
)
doc_servo = os.path.join(BASE, "..", "..", "..", "target", "doc", "servo")
html = render(os.path.join(BASE, "properties.html.mako"), properties=properties)
write(doc_servo, "css-properties.html", html)
write(doc_servo, "css-properties.json", json.dumps(properties, indent=4))
if __name__ == "__main__":
main()
| mpl-2.0 | -956,424,572,775,642,800 | 29.43949 | 94 | 0.561833 | false | 3.673328 | false | false | false |
Huyuwei/tvm | nnvm/python/nnvm/compiler/graph_util.py | 2 | 5031 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name
"""Utility function to get information from graph."""
from __future__ import absolute_import as _abs
import tvm
from . import graph_attr
from ..graph import create
from ..symbol import Group, ones_like
def infer_shape(graph, **shape):
"""Infer the shape given the shape of inputs.
Parameters
----------
graph : Graph
The graph to perform shape inference from
shape : dict of str to tuple
The specific input shape.
Returns
-------
in_shape : list of tuple
Shape of inputs
out_shape: list of tuple
Shape of outputs
"""
graph = graph_attr.set_shape_inputs(graph, shape)
graph = graph.apply("InferShape")
shape = graph.json_attr("shape")
index = graph.index
input_shape = [shape[index.entry_id(x)] for x in index.input_names]
output_shape = [shape[index.entry_id(x)] for x in index.output_entries]
return input_shape, output_shape
def infer_dtype(graph, **dtype):
"""Infer the type given the typeS of inputs.
Parameters
----------
graph : Graph
The graph to perform type inference from
dtype : dict of str to dtype
The specific input data type.
Returns
-------
in_dtype : list of tuple
Dtype of inputs
out_dtype: list of tuple
Dtype of outputs
"""
graph = graph_attr.set_dtype_inputs(graph, dtype)
graph = graph.apply("InferType")
dtype = graph.json_attr("dtype")
index = graph.index
input_dtype = [graph_attr.TCODE_TO_DTYPE[dtype[index.entry_id(x)]]
for x in index.input_names]
output_dtype = [graph_attr.TCODE_TO_DTYPE[dtype[index.entry_id(x)]]
for x in index.output_entries]
return input_dtype, output_dtype
_deep_compare = tvm.get_global_func("nnvm.graph.DeepCompare")
def check_graph_equal(grapha, graphb, compare_variable_attrs=False):
"""Check if two graphs have equal structure.
Parameters
----------
grapha : Graph
The first graph
graphb : Graph
The second graph
compare_variable_attrs : bool, optional
Whether we want to compare attributes(names) on variables.
Usually it is safe to skip it unless we want input name
to exactly match
Raises
------
ValueError
ValueError is raised with error message when graph not equal
"""
err = _deep_compare(grapha, graphb, compare_variable_attrs)
if err:
raise ValueError("Graph compare error: " + err)
def get_gradient_graph(ys, xs, grad_ys=None):
"""Create gradient graph of ys with respect to xs.
Parameters
----------
ys : Symbol or list of Symbol
Symbols from which the gradient is calculated.
xs : Symbol or list of Symbol
Symbols the gradient respect to.
For group symbol, gradients for all outputs will be calculated.
grad_ys : Symbol or list of Symbol
Head gradients for ys.
Returns
-------
ret : Graph
Generated gradient graph.
"""
if isinstance(ys, list):
ys = Group(ys)
g = create(ys)
g._set_symbol_list_attr('grad_ys', ys)
g._set_symbol_list_attr('grad_xs', xs)
ny = len(ys.list_output_names())
if grad_ys is None:
grad_ys = [ones_like(ys[i]) for i in range(ny)]
g._set_symbol_list_attr('grad_ys_out_grad', grad_ys)
return g.apply('Gradient')
def gradients(ys, xs, grad_ys=None):
"""Create gradient symbol of ys respect to xs.
Parameters
----------
ys : Symbol or list of Symbol
Symbols from which the gradient is calculated.
xs : Symbol or list of Symbol
Symbols the gradient respect to.
For group symbol, gradients for all outputs will be calculated.
grad_ys : Symbol or list of Symbol
Head gradients for ys.
Returns
-------
ret : list of Symbol
Generated gradient symbol. For each xs,
all gradients from ys are merged into a single symbol.
"""
grad_g = get_gradient_graph(ys, xs, grad_ys)
nx = len(Group(xs).list_output_names()) \
if isinstance(xs, list) else len(xs.list_output_names())
ret = [grad_g.symbol[i] for i in range(nx)]
return ret
| apache-2.0 | -5,987,048,062,232,325,000 | 29.676829 | 75 | 0.649771 | false | 3.970797 | false | false | false |
thispc/download-manager | module/plugins/container/TXT.py | 7 | 2140 | # -*- coding: utf-8 -*-
import codecs
from ..internal.Container import Container
from ..internal.misc import encode
class TXT(Container):
__name__ = "TXT"
__type__ = "container"
__version__ = "0.21"
__status__ = "testing"
__pattern__ = r'.+\.(txt|text)$'
__config__ = [("activated", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("folder_per_package", "Default;Yes;No",
"Create folder for each package", "Default"),
("flush", "bool", "Flush list after adding", False),
("encoding", "str", "File encoding", "utf-8")]
__description__ = """Read link lists in plain text formats"""
__license__ = "GPLv3"
__authors__ = [("spoob", "[email protected]"),
("jeix", "[email protected]")]
def decrypt(self, pyfile):
try:
encoding = codecs.lookup(self.config.get('encoding')).name
except Exception:
encoding = "utf-8"
fs_filename = encode(pyfile.url)
txt = codecs.open(fs_filename, 'r', encoding)
curPack = "Parsed links from %s" % pyfile.name
packages = {curPack: [], }
for link in txt.readlines():
link = link.strip()
if not link:
continue
if link.startswith(";"):
continue
if link.startswith("[") and link.endswith("]"):
#: New package
curPack = link[1:-1]
packages[curPack] = []
continue
packages[curPack].append(link)
txt.close()
#: Empty packages fix
for key, value in packages.items():
if not value:
packages.pop(key, None)
if self.config.get('flush'):
try:
txt = open(fs_filename, 'wb')
txt.close()
except IOError:
self.log_warning(_("Failed to flush list"))
for name, links in packages.items():
self.packages.append((name, links, name))
| gpl-3.0 | -7,854,115,424,311,552,000 | 28.315068 | 84 | 0.499065 | false | 4.17154 | false | false | false |
DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/Resources/Ops/PyScripts/lib/xops/ip.py | 1 | 6377 |
from __future__ import division
from math import floor
def get_cidr_from_subnet(subnet):
if (not validate_ipv4(subnet)):
raise ValueError, 'Subnet must be valid.'
subnetsplit = subnet.split('.')
cidr = 0
for oct in subnetsplit:
cidr = (cidr + list(bin(int(oct))).count('1'))
return cidr
def get_subnet_from_cidr(cidr):
if (not ((type(cidr) is int) or (type(cidr) is long))):
raise TypeError, 'Value must be an integer or a long.'
num = 0
for i in range(0, cidr):
num = (num | (2 ** (31 - i)))
return get_ip_from_int(num)
def get_ip_from_int(num):
if (not ((type(num) is int) or (type(num) is long))):
raise TypeError, 'Value must be an integer or a long.'
one = floor((num // (2 ** 24)))
two = floor(((num - (one * (2 ** 24))) // (2 ** 16)))
three = floor((((num - (one * (2 ** 24))) - (two * (2 ** 16))) // (2 ** 8)))
four = (((num - (one * (2 ** 24))) - (two * (2 ** 16))) - (three * (2 ** 8)))
if validate_ipv4(('%d.%d.%d.%d' % (one, two, three, four))):
return ('%d.%d.%d.%d' % (one, two, three, four))
else:
return False
def get_ip_from_hex_str(data):
if (not (isinstance(data, str) or isinstance(data, unicode))):
raise TypeError, 'Must supply a hex string.'
if (len(data) != 8):
raise ValueError, 'Hex string must be in 8 characters in length'
one = ((int(data[0], 16) * 16) + int(data[1], 16))
two = ((int(data[2], 16) * 16) + int(data[3], 16))
three = ((int(data[4], 16) * 16) + int(data[5], 16))
four = ((int(data[6], 16) * 16) + int(data[7], 16))
if validate_ipv4(('%s.%s.%s.%s' % (one, two, three, four))):
return ('%s.%s.%s.%s' % (one, two, three, four))
else:
return False
def get_int_from_ip(ip):
if (not validate_ipv4(ip)):
raise ValueError, 'IP must be valid.'
splitwork = ip.split('.')
if (len(splitwork) != 4):
return ip
return ((((int(splitwork[0]) * (2 ** 24)) + (int(splitwork[1]) * (2 ** 16))) + (int(splitwork[2]) * (2 ** 8))) + int(splitwork[3]))
def expand_ipv6(address):
if (not validate_ipv6(address)):
raise ValueError, 'Address must be a IPv6 notation.'
half = address.split('::')
if (len(half) == 2):
half[0] = half[0].split(':')
half[1] = half[1].split(':')
nodes = ((half[0] + (['0'] * (8 - (len(half[0]) + len(half[1]))))) + half[1])
else:
nodes = half[0].split(':')
return ':'.join((('%04x' % int((i or '0'), 16)) for i in nodes))
def get_broadcast_from_subnet(ip, subnet):
if (not ((type(subnet) is str) or (type(subnet) is unicode))):
raise TypeError, 'Subnet must be a string representation.'
if (not validate_ipv4_subnet(subnet)):
raise TypeError, 'Subnet must be a valid subnet mask.'
if (not ((type(ip) is str) or (type(ip) is unicode))):
raise TypeError, 'IP must be a string representation.'
if (not validate_ipv4(ip)):
raise TypeError, 'IP must be a valid IP address.'
network = get_network_from_subnet(ip, subnet)
net_split = network.split('.')
sub_split = subnet.split('.')
broadcast = []
for i in range(0, 4):
broadcast.append(str((int(net_split[i]) | (int(sub_split[i]) ^ 255))))
return '.'.join(broadcast)
def get_network_from_subnet(ip, subnet):
if (not ((type(subnet) is str) or (type(subnet) is unicode))):
raise TypeError, 'Subnet must be a string representation.'
if (not validate_ipv4_subnet(subnet)):
raise TypeError, 'Subnet must be a valid subnet mask.'
if (not ((type(ip) is str) or (type(ip) is unicode))):
raise TypeError, 'IP must be a string representation.'
if (not validate_ipv4(ip)):
raise TypeError, 'IP must be a valid IP address.'
ip_split = ip.split('.')
sub_split = subnet.split('.')
network = []
for i in range(0, 4):
network.append(str((int(ip_split[i]) & int(sub_split[i]))))
return '.'.join(network)
def validate_ipv4_subnet(subnet):
if (not ((type(subnet) is str) or (type(subnet) is unicode))):
raise TypeError, 'Subnet must be a string representation.'
if (not validate_ipv4(subnet)):
return False
found_zero = False
for item in subnet.split('.'):
if ((not found_zero) and (item == '255')):
continue
if (found_zero and (not (item == '0'))):
return False
digit = int(item)
for i in range(0, 8):
if ((digit & (2 ** (7 - i))) == 0):
found_zero = True
elif found_zero:
return False
return True
def validate_ipv4(ip):
if (not ((type(ip) is str) or (type(ip) is unicode))):
raise TypeError, 'IP must be a string representation.'
octets = ip.split('.')
if (len(octets) != 4):
return False
for octet in octets:
try:
i = int(octet)
except ValueError:
return False
if ((i < 0) or (i > 255)):
return False
else:
return True
def validate_ipv6(ip):
if (not ((type(ip) is str) or (type(ip) is unicode))):
raise TypeError, 'IP must be a string representation.'
nodes = ip.split('%')
if (len(nodes) not in [1, 2]):
return False
addr = nodes[0]
if (len(nodes) == 2):
try:
int(nodes[1])
except ValueError:
return False
if (addr.count('::') > 1):
return False
groups = addr.split(':')
if ((len(groups) > 8) or (len(groups) < 3)):
return False
for group in groups:
if (group == ''):
continue
try:
i = int(group, 16)
except ValueError:
return False
if ((i < 0) or (i > 65535)):
return False
else:
return True
def validate(ip):
if (not ((type(ip) is str) or (type(ip) is unicode))):
raise TypeError, 'IP must be a string representation.'
if (':' in ip):
return validate_ipv6(ip)
elif ('.' in ip):
return validate_ipv4(ip)
else:
return False
def validate_port(port):
if (not ((type(port) is int) or (type(port) is long))):
raise TypeError, 'Port must be an int or long representation.'
if ((port >= 0) and (port <= 65535)):
return True
return False | unlicense | -89,215,009,421,749,810 | 34.433333 | 135 | 0.545868 | false | 3.419303 | false | false | false |
andrewyoung1991/supriya | supriya/tools/ugentools/Pan4.py | 1 | 5290 | # -*- encoding: utf-8 -*-
from supriya.tools.ugentools.MultiOutUGen import MultiOutUGen
class Pan4(MultiOutUGen):
r'''A four-channel equal-power panner.
::
>>> source = ugentools.In.ar(bus=0)
>>> pan_4 = ugentools.Pan4.ar(
... gain=1,
... source=source,
... x_position=0,
... y_position=0,
... )
>>> pan_4
UGenArray({4})
'''
### CLASS VARIABLES ###
__documentation_section__ = 'Spatialization UGens'
__slots__ = ()
_ordered_input_names = (
'source',
'x_position',
'y_position',
'gain',
)
_valid_calculation_rates = None
### INITIALIZER ###
def __init__(
self,
calculation_rate=None,
gain=1,
source=None,
x_position=0,
y_position=0,
):
MultiOutUGen.__init__(
self,
calculation_rate=calculation_rate,
channel_count=4,
gain=gain,
source=source,
x_position=x_position,
y_position=y_position,
)
### PUBLIC METHODS ###
@classmethod
def ar(
cls,
gain=1,
source=None,
x_position=0,
y_position=0,
):
r'''Constructs an audio-rate Pan4.
::
>>> source = ugentools.In.ar(bus=0)
>>> pan_4 = ugentools.Pan4.ar(
... gain=1,
... source=source,
... x_position=0,
... y_position=0,
... )
>>> pan_4
UGenArray({4})
Returns ugen graph.
'''
from supriya.tools import synthdeftools
calculation_rate = synthdeftools.CalculationRate.AUDIO
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
gain=gain,
source=source,
x_position=x_position,
y_position=y_position,
)
return ugen
@classmethod
def kr(
cls,
gain=1,
source=None,
x_position=0,
y_position=0,
):
r'''Constructs a control-rate Pan4.
::
>>> source = ugentools.In.ar(bus=0)
>>> pan_4 = ugentools.Pan4.kr(
... gain=1,
... source=source,
... x_position=0,
... y_position=0,
... )
>>> pan_4
UGenArray({4})
Returns ugen graph.
'''
from supriya.tools import synthdeftools
calculation_rate = synthdeftools.CalculationRate.CONTROL
ugen = cls._new_expanded(
calculation_rate=calculation_rate,
gain=gain,
source=source,
x_position=x_position,
y_position=y_position,
)
return ugen
### PUBLIC PROPERTIES ###
@property
def gain(self):
r'''Gets `gain` input of Pan4.
::
>>> source = ugentools.In.ar(bus=0)
>>> pan_4 = ugentools.Pan4.ar(
... gain=1,
... source=source,
... x_position=0,
... y_position=0,
... )
>>> pan_4[0].source.gain
1.0
Returns ugen input.
'''
index = self._ordered_input_names.index('gain')
return self._inputs[index]
@property
def source(self):
r'''Gets `source` input of Pan4.
::
>>> source = ugentools.In.ar(bus=0)
>>> pan_4 = ugentools.Pan4.ar(
... gain=1,
... source=source,
... x_position=0,
... y_position=0,
... )
>>> pan_4[0].source.source
OutputProxy(
source=In(
bus=0.0,
calculation_rate=CalculationRate.AUDIO,
channel_count=1
),
output_index=0
)
Returns ugen input.
'''
index = self._ordered_input_names.index('source')
return self._inputs[index]
@property
def x_position(self):
r'''Gets `x_position` input of Pan4.
::
>>> source = ugentools.In.ar(bus=0)
>>> pan_4 = ugentools.Pan4.ar(
... gain=1,
... source=source,
... x_position=0,
... y_position=0,
... )
>>> pan_4[0].source.x_position
0.0
Returns ugen input.
'''
index = self._ordered_input_names.index('x_position')
return self._inputs[index]
@property
def y_position(self):
r'''Gets `y_position` input of Pan4.
::
>>> source = ugentools.In.ar(bus=0)
>>> pan_4 = ugentools.Pan4.ar(
... gain=1,
... source=source,
... x_position=0,
... y_position=0,
... )
>>> pan_4[0].source.y_position
0.0
Returns ugen input.
'''
index = self._ordered_input_names.index('y_position')
return self._inputs[index] | mit | -7,805,452,371,901,044,000 | 23.05 | 64 | 0.433837 | false | 3.895434 | false | false | false |
eyqs/req | parsers/ubc/req.py | 1 | 5615 | #!/usr/bin/env python3
"""
req v3.1
Copyright (c) 2016, 2017, 2018 Eugene Y. Q. Shen.
req is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation, either version
3 of the License, or (at your option) any later version.
req is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see http://www.gnu.org/licenses/.
"""
import os
import sys
from json import dumps
YEAR = '2017'
DUMP = 'req.json'
UBCPATH = 'data/ubc/'
INPATH = '/courses/'
if len(sys.argv) == 1:
COURSES = UBCPATH + YEAR + INPATH
else:
COURSES = sys.argv[1] + INPATH
# Generic class to store course data
class Course():
# Initialize all variables
def __init__(self, code):
self.code = code # course code, x. 'CPSC 121'
self.name = '' # name, x. 'Models of Computation'
self.desc = '' # course description in UBC Calendar
self.prer = '' # raw prereqs, x. 'Either (a) CPSC ...'
self.crer = '' # raw coreqs, x. 'All of CPSC 213 ...'
self.preq = [] # prereq tree, x. ['or', 'CPSC 221', ...]
self.creq = [] # coreq tree, x. ['and', 'CPSC 213', ...]
self.excl = ['or'] # exclusions, x. ['or', 'STAT 200', ...]
self.term = set() # terms offered, x. {'2017S', '2017W'}
self.cred = set() # possible credits, x. {3.0, 6.0}
# Set course parameters
def set_params(self, param, value):
if param == 'name':
self.name = value
elif param == 'desc':
self.desc = value
elif param == 'prer':
self.prer = value
elif param == 'crer':
self.crer = value
elif param == 'preq':
self.preq = get_reqs(value.split())
elif param == 'creq':
self.creq = get_reqs(value.split())
elif param == 'excl':
self.excl.extend(
[''.join(e.strip().split()) for e in value.split(',')])
elif param == 'term':
self.term.update({t.strip() for t in value[:-1].split(',')})
elif param == 'cred':
self.cred.update({float(c.strip()) for c in value.split(',')})
else:
print('Error: parameter not recognized.')
# Get course parameters
def get_params(self, param=''):
params = {'code': self.code, 'name': self.name, 'desc': self.desc,
'prer': self.prer, 'crer': self.crer,
'preq': self.preq, 'creq': self.creq, 'excl': self.excl,
'term': list(self.term), 'cred': list(self.cred)}
if param in params.keys():
return params[param]
else:
return params
# Turn requisites into list format; all entries must be true to satisfy
def get_reqs(value):
reqs = []
course = []
group = []
depth = 0
operator = 'and'
for term in value:
if depth < 0:
print('Error: mismatched parentheses.')
# Outside of parens, only terms are course names, and, or
if depth == 0:
if term.startswith('('):
depth = term.count('(')
group.append(term[1:])
elif term == 'and' or term == 'or':
operator = term
if course:
reqs.append(''.join(course))
course = []
else:
course.append(term)
# Call get_reqs again on anything inside parens
else:
if term.startswith('('):
depth += term.count('(')
elif term.endswith(')'):
depth -= term.count(')')
if depth == 0:
group.append(term[:-1])
reqs.append(get_reqs(group))
group = []
else:
group.append(term)
# Add final course after last operator
if course:
reqs.append(''.join(course))
reqs.insert(0, operator)
return reqs
if __name__ == '__main__':
# Parse all files in COURSES as Courses
courses = {}
for name in os.listdir(COURSES):
if name.endswith('.txt'):
with open(COURSES + '/' + name, encoding='utf8') as f:
for line in f:
split = line.split(':')
if len(split) > 1:
param = split[0].strip()
value = ':'.join(split[1:]).strip()
if param == 'code':
code = ''.join(value.split())
if code in courses.keys():
course = courses[code]
else:
course = Course(code)
courses[code] = course
else:
course.set_params(param, value)
# Dump courses into JSON file for JavaScript frontend
json = {}
for code, course in courses.items():
params = courses[code].get_params()
# Ignore courses with no name
if not params['name']:
continue
json[code] = params
with open(DUMP, 'w', encoding='utf8') as f:
f.write(dumps(json))
| gpl-3.0 | 5,820,187,232,646,883,000 | 34.537975 | 77 | 0.50472 | false | 4.045389 | false | false | false |
KivApple/mcu-info-util | mcu_info_util/toolchain.py | 1 | 1404 | import os
class Toolchain:
MCU_TYPES = {
"stm32": "arm",
"atsam": "arm",
"at91sam": "arm",
"atmega": "avr",
"attiny": "avr",
"msp430": "msp430"
}
@classmethod
def mcu_type(cls, mcu):
for prefix in cls.MCU_TYPES:
if mcu.startswith(prefix):
return cls.MCU_TYPES[prefix]
return "unknown"
@classmethod
def find_toolchain(cls, mcu):
type = cls.mcu_type(mcu)
if type == "arm":
from mcu_info_util.toolchain_arm import ToolchainARM
return ToolchainARM()
if type == "avr":
from mcu_info_util.toolchain_avr import ToolchainAVR
return ToolchainAVR()
if type == "msp430":
from mcu_info_util.toolchain_msp430 import ToolchainMSP430
return ToolchainMSP430()
return None
def find_compiler(self):
return ""
def find_prefix(self):
compiler = self.find_compiler()
if compiler.endswith("gcc"):
return compiler[0:-3]
elif compiler.endswith("gcc.exe"):
return compiler[0:-7]
return os.path.dirname(compiler) + os.sep
def get_flags(self, mcu):
return []
def generate_header(self, mcu, filename=None):
return False
def generate_linker_script(self, mcu, filename=None):
return False
| mit | 6,474,986,655,971,244,000 | 25.490566 | 70 | 0.556268 | false | 3.685039 | false | false | false |
pwojt/beer_app_414 | beer_glass_api.py | 1 | 2753 | from auth import requires_auth
from db_helper import IdUrlField, update_model
from flask.ext.restful import Resource, fields, reqparse, marshal, abort
__author__ = 'wojtowpj'
from google.appengine.ext import db
class BeerGlass(db.Model):
name = db.StringProperty(required=True)
description = db.StringProperty(required=False)
capacity = db.FloatProperty(required=False)
glass_fields = {
'name': fields.String,
'description': fields.String,
'capacity': fields.Float,
'uri': IdUrlField('beer_glass', absolute=True),
}
class BeerGlassListApi(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('name', type=str, required=True, help="Beer Glass Name is required")
self.reqparse.add_argument('description', type=str)
self.reqparse.add_argument('capacity', type=float)
super(BeerGlassListApi, self).__init__()
@requires_auth
def get(self):
glass_list = []
for g in db.Query(BeerGlass):
glass_list.append(g)
return {'beer_glasses': map(lambda g: marshal(g, glass_fields), glass_list)}
@requires_auth
def post(self):
args = self.reqparse.parse_args()
g = BeerGlass.all(keys_only=True).filter('name', args['name']).get()
if g:
abort(409, message="Beer glass with name %s already exists" % args['name'])
g = BeerGlass(name=args['name'],
description=args.get('description'),
capacity=args.get('capacity'))
g.put()
return {'beer_glass': marshal(g, glass_fields)}
class BeerGlassApi(Resource):
def __init__(self):
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument('name', type=str)
self.reqparse.add_argument('description')
self.reqparse.add_argument('capacity')
super(BeerGlassApi, self).__init__()
@requires_auth
def get(self, id):
g = BeerGlass.get_by_id(id)
if not g:
abort(404)
return {'beer_glass': marshal(g, glass_fields)}
@requires_auth
def put(self, id):
args = self.reqparse.parse_args()
g = BeerGlass.get_by_id(id)
if not g:
abort(404)
u = dict(filter(lambda (k, v): v is not None, args.items()))
update_model(g, u)
g.put()
return {'beer_glass': marshal(g, glass_fields)}
@requires_auth
def delete(self, id):
g = BeerGlass.get_by_id(id)
if g:
g.delete()
return {"beer_glass": marshal(g, glass_fields), 'action': 'deleted'}
abort(404)
| apache-2.0 | -7,111,607,220,390,472,000 | 29.284091 | 103 | 0.584453 | false | 3.51148 | false | false | false |
cmorgan/toyplot | toyplot/cairo/png.py | 1 | 3651 | # Copyright 2014, Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains certain
# rights in this software.
from __future__ import absolute_import
from __future__ import division
import cairo
import toyplot.cairo
import toyplot.svg
try:
import cStringIO as StringIO
except: # pragma: no cover
import StringIO
def render(canvas, fobj=None, width=None, height=None, scale=None):
"""Render the PNG bitmap representation of a canvas using Cairo.
By default, canvas dimensions in CSS pixels are mapped directly to pixels in
the output PNG image. Use one of `width`, `height`, or `scale` to override
this behavior.
Parameters
----------
canvas: :class:`toyplot.canvas.Canvas`
Canvas to be rendered.
fobj: file-like object or string, optional
The file to write. Use a string filepath to write data directly to disk.
If `None` (the default), the PNG data will be returned to the caller
instead.
width: number, optional
Specify the width of the output image in pixels.
height: number, optional
Specify the height of the output image in pixels.
scale: number, optional
Ratio of output image pixels to `canvas` pixels.
Returns
-------
png: PNG image data, or `None`
PNG representation of `canvas`, or `None` if the caller specifies the
`fobj` parameter.
"""
svg = toyplot.svg.render(canvas)
scale = canvas._pixel_scale(width=width, height=height, scale=scale)
surface = cairo.ImageSurface(
cairo.FORMAT_ARGB32, int(scale * canvas._width), int(scale * canvas._height))
context = cairo.Context(surface)
context.scale(scale, scale)
toyplot.cairo.render(svg, context)
if fobj is None:
buffer = StringIO.StringIO()
surface.write_to_png(buffer)
return buffer.getvalue()
else:
surface.write_to_png(fobj)
def render_frames(canvas, width=None, height=None, scale=None):
"""Render a canvas as a sequence of PNG images using Cairo.
By default, canvas dimensions in CSS pixels are mapped directly to pixels in
the output PNG images. Use one of `width`, `height`, or `scale` to override
this behavior.
Parameters
----------
canvas: :class:`toyplot.canvas.Canvas`
Canvas to be rendered.
width: number, optional
Specify the width of the output image in pixels.
height: number, optional
Specify the height of the output image in pixels.
scale: number, optional
Ratio of output image pixels to `canvas` pixels.
Returns
-------
frames: Python generator expression that returns each PNG image in the sequence.
The caller must iterate over the returned frames and is responsible for all
subsequent processing, including disk I/O, video compression, etc.
Examples
--------
>>> for frame, png in enumerate(toyplot.cairo.render_png_frames(canvas)):
... open("frame-%s.png" % frame, "wb").write(png)
"""
svg, svg_animation = toyplot.svg.render(canvas, animation=True)
scale = canvas._pixel_scale(width=width, height=height, scale=scale)
for time, changes in sorted(svg_animation.items()):
toyplot.svg.apply_changes(svg, changes)
surface = cairo.ImageSurface(
cairo.FORMAT_ARGB32, int(scale * canvas._width), int(scale * canvas._height))
context = cairo.Context(surface)
context.scale(scale, scale)
toyplot.cairo.render(svg, context)
fobj = StringIO.StringIO()
surface.write_to_png(fobj)
yield fobj.getvalue()
| bsd-3-clause | -3,456,958,551,830,996,500 | 34.794118 | 89 | 0.675705 | false | 4.012088 | false | false | false |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/ldb.py | 1 | 22146 | # encoding: utf-8
# module ldb
# from /usr/lib/python2.7/dist-packages/ldb.so
# by generator 1.135
""" An interface to LDB, a LDAP-like API that can either to talk an embedded database (TDB-based) or a standards-compliant LDAP server. """
# no imports
# Variables with simple values
CHANGETYPE_ADD = 1
CHANGETYPE_DELETE = 2
CHANGETYPE_MODIFY = 3
CHANGETYPE_NONE = 0
ERR_ADMIN_LIMIT_EXCEEDED = 11
ERR_AFFECTS_MULTIPLE_DSAS = 71
ERR_ALIAS_DEREFERINCING_PROBLEM = 36
ERR_ALIAS_PROBLEM = 33
ERR_ATTRIBUTE_OR_VALUE_EXISTS = 20
ERR_AUTH_METHOD_NOT_SUPPORTED = 7
ERR_BUSY = 51
ERR_COMPARE_FALSE = 5
ERR_COMPARE_TRUE = 6
ERR_CONFIDENTIALITY_REQUIRED = 13
ERR_CONSTRAINT_VIOLATION = 19
ERR_ENTRY_ALREADY_EXISTS = 68
ERR_INAPPROPRIATE_AUTHENTICATION = 48
ERR_INAPPROPRIATE_MATCHING = 18
ERR_INSUFFICIENT_ACCESS_RIGHTS = 50
ERR_INVALID_ATTRIBUTE_SYNTAX = 21
ERR_INVALID_CREDENTIALS = 49
ERR_INVALID_DN_SYNTAX = 34
ERR_LOOP_DETECT = 54
ERR_NAMING_VIOLATION = 64
ERR_NOT_ALLOWED_ON_NON_LEAF = 66
ERR_NOT_ALLOWED_ON_RDN = 67
ERR_NO_SUCH_ATTRIBUTE = 16
ERR_NO_SUCH_OBJECT = 32
ERR_OBJECT_CLASS_MODS_PROHIBITED = 69
ERR_OBJECT_CLASS_VIOLATION = 65
ERR_OPERATIONS_ERROR = 1
ERR_OTHER = 80
ERR_PROTOCOL_ERROR = 2
ERR_REFERRAL = 10
ERR_SASL_BIND_IN_PROGRESS = 14
ERR_SIZE_LIMIT_EXCEEDED = 4
ERR_STRONG_AUTH_REQUIRED = 8
ERR_TIME_LIMIT_EXCEEDED = 3
ERR_UNAVAILABLE = 52
ERR_UNDEFINED_ATTRIBUTE_TYPE = 17
ERR_UNSUPPORTED_CRITICAL_EXTENSION = 12
ERR_UNWILLING_TO_PERFORM = 53
FLAG_MOD_ADD = 1
FLAG_MOD_DELETE = 3
FLAG_MOD_REPLACE = 2
FLG_NOMMAP = 8
FLG_NOSYNC = 2
FLG_RDONLY = 1
FLG_RECONNECT = 4
OID_COMPARATOR_AND = '1.2.840.113556.1.4.803'
OID_COMPARATOR_OR = '1.2.840.113556.1.4.804'
SCOPE_BASE = 0
SCOPE_DEFAULT = -1
SCOPE_ONELEVEL = 1
SCOPE_SUBTREE = 2
SEQ_HIGHEST_SEQ = 0
SEQ_HIGHEST_TIMESTAMP = 1
SEQ_NEXT = 2
SUCCESS = 0
SYNTAX_BOOLEAN = '1.3.6.1.4.1.1466.115.121.1.7'
SYNTAX_DIRECTORY_STRING = '1.3.6.1.4.1.1466.115.121.1.15'
SYNTAX_DN = '1.3.6.1.4.1.1466.115.121.1.12'
SYNTAX_INTEGER = '1.3.6.1.4.1.1466.115.121.1.27'
SYNTAX_OCTET_STRING = '1.3.6.1.4.1.1466.115.121.1.40'
SYNTAX_UTC_TIME = '1.3.6.1.4.1.1466.115.121.1.53'
__docformat__ = 'restructuredText'
__version__ = '1.1.17'
# functions
def binary_decode(string): # real signature unknown; restored from __doc__
"""
S.binary_decode(string) -> string
Perform a RFC2254 binary decode on a string
"""
return ""
def binary_encode(string): # real signature unknown; restored from __doc__
"""
S.binary_encode(string) -> string
Perform a RFC2254 binary encoding on a string
"""
return ""
def open(): # real signature unknown; restored from __doc__
"""
S.open() -> Ldb
Open a new LDB context.
"""
return Ldb
def register_module(module): # real signature unknown; restored from __doc__
"""
S.register_module(module) -> None
Register a LDB module.
"""
pass
def string_to_time(string): # real signature unknown; restored from __doc__
"""
S.string_to_time(string) -> int
Parse a LDAP time string into a UNIX timestamp.
"""
return 0
def timestring(p_int): # real signature unknown; restored from __doc__
"""
S.timestring(int) -> string
Generate a LDAP time string from a UNIX timestamp
"""
return ""
def valid_attr_name(name): # real signature unknown; restored from __doc__
"""
S.valid_attr_name(name) -> bool
nCheck whether the supplied name is a valid attribute name.
"""
return False
# classes
from object import object
class Control(object):
""" LDB control. """
def __getattribute__(self, name): # real signature unknown; restored from __doc__
""" x.__getattribute__('name') <==> x.name """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __str__(self): # real signature unknown; restored from __doc__
""" x.__str__() <==> str(x) """
pass
critical = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
oid = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
from object import object
class Dn(object):
""" A LDB distinguished name. """
def add_base(self, dn): # real signature unknown; restored from __doc__
"""
S.add_base(dn) -> None
Add a base DN to this DN.
"""
pass
def add_child(self, dn): # real signature unknown; restored from __doc__
"""
S.add_child(dn) -> None
Add a child DN to this DN.
"""
pass
def canonical_ex_str(self): # real signature unknown; restored from __doc__
"""
S.canonical_ex_str() -> string
Canonical version of this DN (like a posix path, with terminating newline).
"""
return ""
def canonical_str(self): # real signature unknown; restored from __doc__
"""
S.canonical_str() -> string
Canonical version of this DN (like a posix path).
"""
return ""
def check_special(self, name): # real signature unknown; restored from __doc__
"""
S.check_special(name) -> bool
Check if name is a special DN name
"""
return False
def extended_str(self, mode=1): # real signature unknown; restored from __doc__
"""
S.extended_str(mode=1) -> string
Extended version of this DN
"""
return ""
def get_casefold(self, *args, **kwargs): # real signature unknown
pass
def get_component_name(self, num): # real signature unknown; restored from __doc__
"""
S.get_component_name(num) -> string
get the attribute name of the specified component
"""
return ""
def get_component_value(self, num): # real signature unknown; restored from __doc__
"""
S.get_component_value(num) -> string
get the attribute value of the specified component as a binary string
"""
return ""
def get_extended_component(self, name): # real signature unknown; restored from __doc__
"""
S.get_extended_component(name) -> string
returns a DN extended component as a binary string
"""
return ""
def get_linearized(self, *args, **kwargs): # real signature unknown
pass
def get_rdn_name(self): # real signature unknown; restored from __doc__
"""
S.get_rdn_name() -> string
get the RDN attribute name
"""
return ""
def get_rdn_value(self): # real signature unknown; restored from __doc__
"""
S.get_rdn_value() -> string
get the RDN attribute value as a binary string
"""
return ""
def is_child_of(self, basedn): # real signature unknown; restored from __doc__
"""
S.is_child_of(basedn) -> int
Returns True if this DN is a child of basedn
"""
return 0
def is_null(self, *args, **kwargs): # real signature unknown
""" Check whether this is a null DN. """
pass
def is_special(self): # real signature unknown; restored from __doc__
"""
S.is_special() -> bool
Check whether this is a special LDB DN.
"""
return False
def is_valid(self): # real signature unknown; restored from __doc__
""" S.is_valid() -> bool """
return False
def parent(self): # real signature unknown; restored from __doc__
"""
S.parent() -> dn
Get the parent for this DN.
"""
pass
def remove_base_components(self, p_int): # real signature unknown; restored from __doc__
"""
S.remove_base_components(int) -> bool
Remove a number of DN components from the base of this DN.
"""
return False
def set_component(self, *args, **kwargs): # real signature unknown
"""
S.get_component_value(num, name, value) -> None
set the attribute name and value of the specified component
"""
pass
def set_extended_component(self, name, value): # real signature unknown; restored from __doc__
"""
S.set_extended_component(name, value) -> None
set a DN extended component as a binary string
"""
pass
def validate(self): # real signature unknown; restored from __doc__
"""
S.validate() -> bool
Validate DN is correct.
"""
return False
def __add__(self, y): # real signature unknown; restored from __doc__
""" x.__add__(y) <==> x+y """
pass
def __cmp__(self, y): # real signature unknown; restored from __doc__
""" x.__cmp__(y) <==> cmp(x,y) """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __len__(self): # real signature unknown; restored from __doc__
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
def __str__(self): # real signature unknown; restored from __doc__
""" x.__str__() <==> str(x) """
pass
from object import object
class Ldb(object):
""" Connection to a LDB database. """
def add(self, message, controls=None): # real signature unknown; restored from __doc__
"""
S.add(message, controls=None) -> None
Add an entry.
"""
pass
def connect(self, url, flags=0, options=None): # real signature unknown; restored from __doc__
"""
S.connect(url, flags=0, options=None) -> None
Connect to a LDB URL.
"""
pass
def delete(self, dn, controls=None): # real signature unknown; restored from __doc__
"""
S.delete(dn, controls=None) -> None
Remove an entry.
"""
pass
def get_config_basedn(self, *args, **kwargs): # real signature unknown
pass
def get_default_basedn(self, *args, **kwargs): # real signature unknown
pass
def get_opaque(self, name): # real signature unknown; restored from __doc__
"""
S.get_opaque(name) -> value
Get an opaque value set on this LDB connection.
:note: The returned value may not be useful in Python.
"""
pass
def get_root_basedn(self, *args, **kwargs): # real signature unknown
pass
def get_schema_basedn(self, *args, **kwargs): # real signature unknown
pass
def modify(self, message, controls=None, validate=False): # real signature unknown; restored from __doc__
"""
S.modify(message, controls=None, validate=False) -> None
Modify an entry.
"""
pass
def modules(self): # real signature unknown; restored from __doc__
"""
S.modules() -> list
Return the list of modules on this LDB connection
"""
return []
def msg_diff(self, Message): # real signature unknown; restored from __doc__
"""
S.msg_diff(Message) -> Message
Return an LDB Message of the difference between two Message objects.
"""
return Message
def parse_ldif(self, ldif): # real signature unknown; restored from __doc__
"""
S.parse_ldif(ldif) -> iter(messages)
Parse a string formatted using LDIF.
"""
pass
def rename(self, old_dn, new_dn, controls=None): # real signature unknown; restored from __doc__
"""
S.rename(old_dn, new_dn, controls=None) -> None
Rename an entry.
"""
pass
def schema_attribute_add(self, *args, **kwargs): # real signature unknown
pass
def schema_attribute_remove(self, *args, **kwargs): # real signature unknown
pass
def schema_format_value(self, *args, **kwargs): # real signature unknown
pass
def search(self, base=None, scope=None, expression=None, attrs=None, controls=None): # real signature unknown; restored from __doc__
"""
S.search(base=None, scope=None, expression=None, attrs=None, controls=None) -> msgs
Search in a database.
:param base: Optional base DN to search
:param scope: Search scope (SCOPE_BASE, SCOPE_ONELEVEL or SCOPE_SUBTREE)
:param expression: Optional search expression
:param attrs: Attributes to return (defaults to all)
:param controls: Optional list of controls
:return: Iterator over Message objects
"""
pass
def sequence_number(self, type): # real signature unknown; restored from __doc__
"""
S.sequence_number(type) -> value
Return the value of the sequence according to the requested type
"""
pass
def setup_wellknown_attributes(self, *args, **kwargs): # real signature unknown
pass
def set_create_perms(self, mode): # real signature unknown; restored from __doc__
"""
S.set_create_perms(mode) -> None
Set mode to use when creating new LDB files.
"""
pass
def set_debug(self, callback): # real signature unknown; restored from __doc__
"""
S.set_debug(callback) -> None
Set callback for LDB debug messages.
The callback should accept a debug level and debug text.
"""
pass
def set_modules_dir(self, path): # real signature unknown; restored from __doc__
"""
S.set_modules_dir(path) -> None
Set path LDB should search for modules
"""
pass
def set_opaque(self, name, value): # real signature unknown; restored from __doc__
"""
S.set_opaque(name, value) -> None
Set an opaque value on this LDB connection.
:note: Passing incorrect values may cause crashes.
"""
pass
def transaction_cancel(self): # real signature unknown; restored from __doc__
"""
S.transaction_cancel() -> None
cancel a new transaction.
"""
pass
def transaction_commit(self): # real signature unknown; restored from __doc__
"""
S.transaction_commit() -> None
commit a new transaction.
"""
pass
def transaction_prepare_commit(self): # real signature unknown; restored from __doc__
"""
S.transaction_prepare_commit() -> None
prepare to commit a new transaction (2-stage commit).
"""
pass
def transaction_start(self): # real signature unknown; restored from __doc__
"""
S.transaction_start() -> None
Start a new transaction.
"""
pass
def write_ldif(self, message, changetype): # real signature unknown; restored from __doc__
"""
S.write_ldif(message, changetype) -> ldif
Print the message as a string formatted using LDIF.
"""
pass
def __contains__(self, y): # real signature unknown; restored from __doc__
""" x.__contains__(y) <==> y in x """
pass
def __getattribute__(self, name): # real signature unknown; restored from __doc__
""" x.__getattribute__('name') <==> x.name """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
firstmodule = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
from Exception import Exception
class LdbError(Exception):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
from object import object
class Message(object):
""" A LDB Message """
def add(self, *args, **kwargs): # real signature unknown
"""
S.append(element)
Add an element to this message.
"""
pass
def elements(self, *args, **kwargs): # real signature unknown
pass
@classmethod
def from_dict(cls, ldb, dict, mod_flag=None): # real signature unknown; restored from __doc__
"""
Message.from_dict(ldb, dict, mod_flag=FLAG_MOD_REPLACE) -> ldb.Message
Class method to create ldb.Message object from Dictionary.
mod_flag is one of FLAG_MOD_ADD, FLAG_MOD_REPLACE or FLAG_MOD_DELETE.
"""
pass
def get(self, name, default=None, idx=None): # real signature unknown; restored from __doc__
"""
msg.get(name,default=None,idx=None) -> string
idx is the index into the values array
if idx is None, then a list is returned
if idx is not None, then the element with that index is returned
if you pass the special name 'dn' then the DN object is returned
"""
return ""
def items(self, *args, **kwargs): # real signature unknown
pass
def keys(self): # real signature unknown; restored from __doc__
"""
S.keys() -> list
Return sequence of all attribute names.
"""
return []
def remove(self, name): # real signature unknown; restored from __doc__
"""
S.remove(name)
Remove all entries for attributes with the specified name.
"""
pass
def __cmp__(self, y): # real signature unknown; restored from __doc__
""" x.__cmp__(y) <==> cmp(x,y) """
pass
def __delitem__(self, y): # real signature unknown; restored from __doc__
""" x.__delitem__(y) <==> del x[y] """
pass
def __getitem__(self, y): # real signature unknown; restored from __doc__
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __iter__(self): # real signature unknown; restored from __doc__
""" x.__iter__() <==> iter(x) """
pass
def __len__(self): # real signature unknown; restored from __doc__
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
def __setitem__(self, i, y): # real signature unknown; restored from __doc__
""" x.__setitem__(i, y) <==> x[i]=y """
pass
dn = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
from object import object
class MessageElement(object):
""" An element of a Message """
def flags(self, *args, **kwargs): # real signature unknown
pass
def get(self, *args, **kwargs): # real signature unknown
pass
def set_flags(self, *args, **kwargs): # real signature unknown
pass
def __cmp__(self, y): # real signature unknown; restored from __doc__
""" x.__cmp__(y) <==> cmp(x,y) """
pass
def __getitem__(self, y): # real signature unknown; restored from __doc__
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __iter__(self): # real signature unknown; restored from __doc__
""" x.__iter__() <==> iter(x) """
pass
def __len__(self): # real signature unknown; restored from __doc__
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
def __str__(self): # real signature unknown; restored from __doc__
""" x.__str__() <==> str(x) """
pass
from object import object
class Module(object):
""" LDB module (extension) """
def add(self, *args, **kwargs): # real signature unknown
pass
def delete(self, *args, **kwargs): # real signature unknown
pass
def del_transaction(self, *args, **kwargs): # real signature unknown
pass
def end_transaction(self, *args, **kwargs): # real signature unknown
pass
def modify(self, *args, **kwargs): # real signature unknown
pass
def rename(self, *args, **kwargs): # real signature unknown
pass
def search(self, *args, **kwargs): # real signature unknown
pass
def start_transaction(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
def __str__(self): # real signature unknown; restored from __doc__
""" x.__str__() <==> str(x) """
pass
from object import object
class Tree(object):
""" A search tree """
def __init__(self, *args, **kwargs): # real signature unknown
pass
| gpl-2.0 | -6,579,479,681,932,068,000 | 27.068441 | 139 | 0.579563 | false | 3.874388 | false | false | false |
smcgregor/gravity | domain_bridge.py | 1 | 17651 | from FireGirlOptimizer import *
from FireGirlStats import *
# Keep track of file numbers so they don't repeat
server_file_counter = 0
def file_number_str():
global server_file_counter
server_file_counter += 1
return server_file_counter
def initialize():
"""
Return the initialization object for the FireGirl domain.
"""
return {
"reward": [
{"name": "Discount",
"description":"The per-year discount",
"current_value": 1, "max": 1, "min": 0, "units": "~"},
{"name": "Suppression Fixed Cost",
"description":"cost per day of suppression",
"current_value": 500, "max": 999999, "min": 0, "units": "$"},
{"name": "Suppression Variable Cost",
"description":"cost per hectare of suppression",
"current_value": 500, "max": 999999, "min": 0, "units": "$"}
],
"transition": [
{"name": "Years to simulate",
"description": "how far to look into the future",
"current_value": 10, "max": 150, "min": 0, "units": "Y"},
{"name": "Futures to simulate",
"description": "how many stochastic futures to generate",
"current_value": 25, "max": 1000, "min": 0, "units": "#"},
{"name": "Landscape Size",
"description": "how many cells wide and tall should the landscape be. Min:9, Max:129",
"current_value": 21, "max": 129, "min": 9, "units": "#"},
{"name": "Harvest Percent",
"description": "timber harvest rate as a percent of annual increment",
"current_value": 0.95, "max": 1, "min": 0, "units": "%"},
{"name": "Minimum Timber Value",
"description":"the minimum timber value required before harvest is allowed",
"current_value": 50, "max":9999, "min": 0, "units": "$"},
{"name": "Slash Remaning",
"description": "the amount of fuel load (slash) left after a harvest",
"current_value": 10, "max":9999, "min": 0, "units": "#"},
{"name": "Fuel Accumulation",
"description": "the amount of fuel load that accumulates each year",
"current_value": 2, "max":9999, "min": 0, "units": "#"},
{"name": "Suppression Effect",
"description": "the reduction in fire spread rate as the result of suppression",
"current_value": 0.5, "max":1, "min": 0, "units": "%"},
{"name": "Use Original Bugs",
"description": "set to 0 to use original bugs. 1 (or non-zero) to use the patches.",
"current_value": 0, "max":1, "min": 0, "units": "~"},
{"name": "Growth Model",
"description": "set to 1 to use original model; or 2 for updated model.",
"current_value": 1, "max":2, "min": 1, "units": "~"}
],
"policy": [
{"name": "Constant",
"description":"for the intercept",
"current_value": 0, "max": 10, "min":-10, "units": ""},
{"name": "Date",
"description":"for each day of the year",
"current_value": 0, "max": 10, "min":-10, "units": ""},
{"name": "Days Left",
"description":"for each day left in the year",
"current_value": 0, "max": 10, "min":-10, "units": ""},
{"name":"Temperature",
"description":"for air temperature at the time of an ignition",
"current_value": 0, "max": 10, "min":-10, "units": ""},
{"name": "Wind Speed",
"description":"for wind speed at the time of an ignition",
"current_value": 0, "max": 10, "min":-10, "units": ""},
{"name": "Timber Value",
"description":"for the timber value at an ignition location",
"current_value": 0, "max": 10, "min":-10, "units": ""},
{"name": "Timber Value 8",
"description":"for the average timber value in the 8 neighboring stands",
"current_value": 0, "max": 10, "min":-10, "units": ""},
{"name": "Timber Value 24",
"description":"for the average timber value in the 24 neighboring stands",
"current_value": 0, "max": 10, "min":-10, "units": ""},
{"name": "Fuel Load",
"description":"for the fuel load at an ignition location",
"current_value": 0, "max": 10, "min":-10, "units": ""},
{"name": "Fuel Load 8",
"description":"for the average fuel load in the 8 neighboring stands",
"current_value": 0, "max": 10, "min":-10, "units": ""},
{"name": "Fuel Load 24",
"description":"for the average fuel load in the 24 neighboring stands",
"current_value": 0, "max": 10, "min":-10, "units": ""}
]
}
def optimize(query):
"""
Return a newly optimized query.
"""
dict_reward = query["reward"]
dict_transition = query["transition"]
dict_policy = query["policy"]
#some variables
#pathway_count = 5 #how many pathways to use in the optimization
#years = 5 #how many years to simulate for each pathway
pathway_count = dict_transition["Futures to simulate"]
years = dict_transition["Years to simulate"]
#creating optimization objects
opt = FireGirlPolicyOptimizer()
#giving the simulation parameters to opt, so that it can pass
# them on to it's pathways as it creates them
opt.setFireGirlModelParameters(dict_transition, dict_reward)
#setting policy as well
#TODO make this robust to FireWoman policies
pol = FireGirlPolicy()
pol.setParams([dict_policy["Constant"],
dict_policy["Date"],
dict_policy["Days Left"],
dict_policy["Temperature"],
dict_policy["Wind Speed"],
dict_policy["Timber Value"],
dict_policy["Timber Value 8"],
dict_policy["Timber Value 24"],
dict_policy["Fuel Load"],
dict_policy["Fuel Load 8"],
dict_policy["Fuel Load 24"],
])
#assigning the policy to opt, so that it can use it in simulations.
opt.setPolicy(pol)
#creating pathways
opt.createFireGirlPathways(int(pathway_count),int(years))
#set desired objective function
if "Objective Function" in dict_transition.keys():
opt.setObjFn(dict_transition["Objective Function"])
#doing one round of optimization
opt.optimizePolicy()
#pulling the policy variables back out
learned_params = opt.Policy.getParams()
#TODO make this robust to FireWoman policies
dict_new_pol = {}
dict_new_pol["Constant"] = learned_params[0]
dict_new_pol["Date"] = learned_params[1]
dict_new_pol["Days Left"] = learned_params[2]
dict_new_pol["Temperature"] = learned_params[3]
dict_new_pol["Wind Speed"] = learned_params[4]
dict_new_pol["Timber Value"] = learned_params[5]
dict_new_pol["Timber Value 8"] = learned_params[6]
dict_new_pol["Timber Value 24"] = learned_params[7]
dict_new_pol["Fuel Load"] = learned_params[8]
dict_new_pol["Fuel Load 8"] = learned_params[9]
dict_new_pol["Fuel Load 24"] = learned_params[10]
return dict_new_pol
def rollouts(query):
"""
Return a set of rollouts for the given parameters.
"""
dict_reward = query["reward"]
dict_transition = query["transition"]
dict_policy = query["policy"]
pathway_count = int(dict_transition["Futures to simulate"])
years = int(dict_transition["Years to simulate"])
start_ID = 0
#generate 100 rollouts
opt = FireGirlPolicyOptimizer()
opt.setObjFn("J1")
#opt.setObjFn("J2")
opt.SILENT = True
#setting policy...
#This is brittle, and will not work directly with FireWoman data... or with future versions
# of FireGirl if new features get added...
pol = FireGirlPolicy()
pol.setParams([dict_policy["Constant"],
dict_policy["Date"],
dict_policy["Days Left"],
dict_policy["Temperature"],
dict_policy["Wind Speed"],
dict_policy["Timber Value"],
dict_policy["Timber Value 8"],
dict_policy["Timber Value 24"],
dict_policy["Fuel Load"],
dict_policy["Fuel Load 8"],
dict_policy["Fuel Load 24"],
])
#setting the policy in the optimizer, which will pass it to each created pathway
opt.setPolicy(pol)
#giving the optimizer custom model parameters
opt.setFireGirlModelParameters(dict_transition,dict_reward)
#creating landscapes. The function will enforce the custom model parameters
opt.createFireGirlPathways(pathway_count,years,start_ID)
#outermost list to collect one sub-list for each pathway, etc...
return_list = []
#parse the data needed...
for pw in opt.pathway_set:
#new ignition events list for this pathway
year_values = []
for ign in pw.ignition_events:
#get the dictionary representation of the ignition
features = ign.getDictionary()
#fill the total's dictionary
features["Harvest Value"] = pw.getHarvest(ign.year)
#features["Suppression Cost"] = pw.getSuppressionCost(ign.year) #already reported in ign.getDictionary()
features["Growth"] = pw.getGrowth(ign.year)
#TODO - Fix for Discount Rate
features["Discounted Reward"] = features["Harvest Value"] - features["Suppression Cost"]
features["Event Number"] = ign.year
#NOTE: This will be the same number for all ignitions in this pathway. It's the
# id number that a pathway uses to instantiate its random seed
features["Pathway Number"] = pw.ID_number
#adding cumulative measurements, from the start, up to this year
features["Cumulative Harvest Value"] = pw.getHarvestFrom(0, ign.year)
features["Cumulative Growth"] = pw.getGrowthFrom(0, ign.year)
features["Cumulative Timber Loss"] = pw.getTimberLossFrom(0, ign.year)
features["Cumulative Suppression Cost"] = pw.getSuppressionFrom(0, ign.year)
#add this ignition event + year details to this pathway's list of dictionaries
year_values.append(features)
#the events list for this pathway has been filled, so add it to the return list
return_list.append(year_values)
#done with all pathways
return return_list
def state(query):
"""
Return a series of images up to the requested event number.
"""
event_number = int(query["Event Number"])
pathway_number = int(query["Pathway Number"])
dict_reward = query["reward"]
dict_transition = query["transition"]
dict_policy = query["policy"]
show_count = 50
step = 1
if "Past Events to Show" in query.keys():
show_count = 1 + int(query["Past Events to Show"])
if "Past Events to Step Over" in query.keys():
step = 1 + int(query["Past Events to Step Over"])
#sanitizing
if step < 1: step = 1
if show_count < 1: show_count = 1
#creating optimization objects
opt = FireGirlPolicyOptimizer()
#giving the simulation parameters to opt, so that it can pass
# them on to it's pathways as it creates them
opt.setFireGirlModelParameters(dict_transition, dict_reward)
#setting policy as well
#TODO make this robust to FireWoman policies
pol = FireGirlPolicy()
pol.setParams([dict_policy["Constant"],
dict_policy["Date"],
dict_policy["Days Left"],
dict_policy["Temperature"],
dict_policy["Wind Speed"],
dict_policy["Timber Value"],
dict_policy["Timber Value 8"],
dict_policy["Timber Value 24"],
dict_policy["Fuel Load"],
dict_policy["Fuel Load 8"],
dict_policy["Fuel Load 24"],
])
#assigning the policy to opt, so that it can use it in simulations.
opt.setPolicy(pol)
#Setting opt to tell it's pathway(s) to remember their histories
#un-needed, since we're just re-creating the pathway of interest anyway
#opt.PATHWAYS_RECORD_HISTORIES = True
opt.SILENT = True
#creating image name list
names = [[],[],[],[]]
#creating pathway with no years... this will generate the underlying landscape and set
# all the model parameters that were assigned earlier.
opt.createFireGirlPathways(1, 0, pathway_number)
#now incrementing the years
#because we start with the final year, and then skip backward showing every few landscapes,
#we may have to skip over several of the first landscapes before we start showing any
start = event_number - (step * (show_count -1))
#checking for negative numbers, in case the users has specified too many past landscapes to show
while start < 0:
start += step
#manually telling the pathway to do the first set of years
opt.pathway_set[0].doYears(start)
#get new names
timber_name = "static/timber_" + str(file_number_str()) + ".png"
fuel_name = "static/fuel_" + str(file_number_str()) + ".png"
composite_name = "static/composite_" + str(file_number_str()) + ".png"
burn_name = "static/burn_" + str(file_number_str()) + ".png"
#and save it's images
opt.pathway_set[0].saveImage(timber_name, "timber")
opt.pathway_set[0].saveImage(fuel_name, "fuel")
opt.pathway_set[0].saveImage(composite_name, "composite")
opt.pathway_set[0].saveImage(burn_name, "timber", 10)
#add these names to the lists
names[0].append(timber_name)
names[1].append(fuel_name)
names[2].append(composite_name)
names[3].append(burn_name)
#now loop through the rest of the states
for i in range(start, event_number+1, step):
#do the next set of years
opt.pathway_set[0].doYears(step)
#create a new image filenames
timber_name = "static/timber_" + str(file_number_str()) + ".png"
fuel_name = "static/fuel_" + str(file_number_str()) + ".png"
composite_name = "static/composite_" + str(file_number_str()) + ".png"
burn_name = "static/burn_" + str(file_number_str()) + ".png"
#save the images
opt.pathway_set[0].saveImage(timber_name, "timber")
opt.pathway_set[0].saveImage(fuel_name, "fuel")
opt.pathway_set[0].saveImage(composite_name, "composite")
opt.pathway_set[0].saveImage(burn_name, "timber", 10)
#add these names to the lists
names[0].append(timber_name)
names[1].append(fuel_name)
names[2].append(composite_name)
names[3].append(burn_name)
timber_stats = pathway_summary(opt.pathway_set[0],"timber")
fuel_stats = pathway_summary(opt.pathway_set[0],"fuel")
total_growth = opt.pathway_set[0].getGrowthTotal()
total_suppression = opt.pathway_set[0].getSuppressionTotal()
total_harvest = opt.pathway_set[0].getHarvestTotal()
total_timber_loss = opt.pathway_set[0].getTimberLossTotal()
returnObj = {
"statistics": {
"Event Number": int(query["Event Number"]),
"Pathway Number": int(query["Pathway Number"]),
"Average Timber Value": int(timber_stats[0]),
"Timber Value Std.Dev.": int(timber_stats[1]),
"Average Timber Value - Center": int(timber_stats[2]),
"Timber Value Std.Dev. - Center": int(timber_stats[3]),
"Average Fuel Load": int(fuel_stats[0]),
"Fuel Load Std.Dev.": int(fuel_stats[1]),
"Average Fuel Load - Center": int(fuel_stats[2]),
"Fuel Load Std.Dev. - Center": int(fuel_stats[3]),
"Cumulative Harvest":total_harvest,
"Cumulative Suppression Cost": total_suppression,
"Cumulative Timber Loss":total_timber_loss,
"Cumulative Timber Growth":total_growth,
},
"images": names
}
return returnObj
| mpl-2.0 | -2,363,515,958,502,612,000 | 43.349246 | 116 | 0.543935 | false | 4.045611 | false | false | false |
bitcraze/crazyflie-firmware | tools/usdlog/example.py | 1 | 3994 | # -*- coding: utf-8 -*-
"""
example on how to plot decoded sensor data from crazyflie
@author: jsschell
"""
import cfusdlog
import matplotlib.pyplot as plt
import re
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("filename")
args = parser.parse_args()
# decode binary log data
logData = cfusdlog.decode(args.filename)
#only focus on regular logging
logData = logData['fixedFrequency']
# set window background to white
plt.rcParams['figure.facecolor'] = 'w'
# number of columns and rows for suplot
plotCols = 1
plotRows = 1
# let's see which keys exists in current data set
keys = ""
for k, v in logData.items():
keys += k
# get plot config from user
plotGyro = 0
if re.search('gyro', keys):
inStr = input("plot gyro data? ([Y]es / [n]o): ")
if ((re.search('^[Yy]', inStr)) or (inStr == '')):
plotGyro = 1
plotRows += 1
plotAccel = 0
if re.search('acc', keys):
inStr = input("plot accel data? ([Y]es / [n]o): ")
if ((re.search('^[Yy]', inStr)) or (inStr == '')):
plotAccel = 1
plotRows += 1
plotBaro = 0
if re.search('baro', keys):
inStr = input("plot barometer data? ([Y]es / [n]o): ")
if ((re.search('^[Yy]', inStr)) or (inStr == '')):
plotBaro = 1
plotRows += 1
plotCtrl = 0
if re.search('ctrltarget', keys):
inStr = input("plot control data? ([Y]es / [n]o): ")
if ((re.search('^[Yy]', inStr)) or (inStr == '')):
plotCtrl = 1
plotRows += 1
plotStab = 0
if re.search('stabilizer', keys):
inStr = input("plot stabilizer data? ([Y]es / [n]o): ")
if ((re.search('^[Yy]', inStr)) or (inStr == '')):
plotStab = 1
plotRows += 1
# current plot for simple subplot usage
plotCurrent = 0
# new figure
plt.figure(0)
if plotGyro:
plotCurrent += 1
plt.subplot(plotRows, plotCols, plotCurrent)
plt.plot(logData['timestamp'], logData['gyro.x'], '-', label='X')
plt.plot(logData['timestamp'], logData['gyro.y'], '-', label='Y')
plt.plot(logData['timestamp'], logData['gyro.z'], '-', label='Z')
plt.xlabel('timestamp [ms]')
plt.ylabel('Gyroscope [°/s]')
plt.legend(loc=9, ncol=3, borderaxespad=0.)
if plotAccel:
plotCurrent += 1
plt.subplot(plotRows, plotCols, plotCurrent)
plt.plot(logData['timestamp'], logData['acc.x'], '-', label='X')
plt.plot(logData['timestamp'], logData['acc.y'], '-', label='Y')
plt.plot(logData['timestamp'], logData['acc.z'], '-', label='Z')
plt.xlabel('timestamp [ms]')
plt.ylabel('Accelerometer [g]')
plt.legend(loc=9, ncol=3, borderaxespad=0.)
if plotBaro:
plotCurrent += 1
plt.subplot(plotRows, plotCols, plotCurrent)
plt.plot(logData['timestamp'], logData['baro.pressure'], '-')
plt.xlabel('timestamp [ms]')
plt.ylabel('Pressure [hPa]')
plotCurrent += 1
plt.subplot(plotRows, plotCols, plotCurrent)
plt.plot(logData['timestamp'], logData['baro.temp'], '-')
plt.xlabel('timestamp [ms]')
plt.ylabel('Temperature [degC]')
if plotCtrl:
plotCurrent += 1
plt.subplot(plotRows, plotCols, plotCurrent)
plt.plot(logData['timestamp'], logData['ctrltarget.roll'], '-', label='roll')
plt.plot(logData['timestamp'], logData['ctrltarget.pitch'], '-', label='pitch')
plt.plot(logData['timestamp'], logData['ctrltarget.yaw'], '-', label='yaw')
plt.xlabel('timestamp [ms]')
plt.ylabel('Control')
plt.legend(loc=9, ncol=3, borderaxespad=0.)
if plotStab:
plotCurrent += 1
plt.subplot(plotRows, plotCols, plotCurrent)
plt.plot(logData['timestamp'], logData['stabilizer.roll'], '-', label='roll')
plt.plot(logData['timestamp'], logData['stabilizer.pitch'], '-', label='pitch')
plt.plot(logData['timestamp'], logData['stabilizer.yaw'], '-', label='yaw')
plt.plot(logData['timestamp'], logData['stabilizer.thrust'], '-', label='thrust')
plt.xlabel('timestamp [ms]')
plt.ylabel('Stabilizer')
plt.legend(loc=9, ncol=4, borderaxespad=0.)
plt.show()
| gpl-3.0 | -5,275,016,593,829,280,000 | 29.953488 | 85 | 0.623842 | false | 3.09775 | false | false | false |
y-usuzumi/survive-the-course | leetcode/23.Merge_k_Sorted_Lists/main.py | 1 | 1817 | from typing import List
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def mergeKLists(self, lists: List[ListNode]) -> ListNode:
l = len(lists)
if l == 0:
return None
if l == 1:
return lists[0]
if l == 2:
l1, l2 = lists[0], lists[1]
start = curr = ListNode(0)
while l1 != None and l2 != None:
if l1.val <= l2.val:
curr.next = l1
l1 = l1.next
else:
curr.next = l2
l2 = l2.next
curr = curr.next
if l1 != None:
curr.next = l1
else:
curr.next = l2
return start.next
split_idx = l // 2
return self.mergeKLists(
[
self.mergeKLists(lists[:split_idx]),
self.mergeKLists(lists[split_idx:])
]
)
def make_link_list(*vals):
if not vals:
return None
start = curr = ListNode(vals[0])
for idx in range(1, len(vals)):
curr.next = ListNode(vals[idx])
curr = curr.next
return start
def display_link_list(node):
from io import StringIO
vals = []
while node is not None:
vals.append(node.val)
node = node.next
return " -> ".join([str(val) for val in vals])
if __name__ == '__main__':
sol = Solution()
l1 = make_link_list(1, 4, 5)
l2 = make_link_list(1, 3, 4)
l3 = make_link_list(2, 6)
print(display_link_list(l1))
print(display_link_list(l2))
print(display_link_list(l3))
merged_list = sol.mergeKLists([l1, l2, l3])
print(display_link_list(merged_list))
| bsd-3-clause | 3,070,693,238,430,207,000 | 25.333333 | 61 | 0.489818 | false | 3.507722 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.