repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
datastore/datastore.pylru | datastore/pylru/__init__.py | 1 | 1507 |
__version__ = '0.1.1'
__author__ = 'Juan Batiz-Benet'
__email__ = '[email protected]'
__doc__ = '''
pylru datastore implementation.
Tested with:
* datastore 0.3.0
* pylru 1.0.5
'''
import pylru
import datastore.core
class LRUCacheDatastore(datastore.Datastore):
'''Represents an LRU cache datastore, backed by pylru.
Hello World:
>>> import datastore.pylru
>>>
>>> ds = datastore.pylru.LRUCacheDatastore(100)
>>>
>>> hello = datastore.Key('hello')
>>> ds.put(hello, 'world')
>>> ds.contains(hello)
True
>>> ds.get(hello)
'world'
>>> ds.delete(hello)
>>> ds.get(hello)
None
'''
def __init__(self, size):
self._cache = pylru.lrucache(size)
def __len__(self):
return len(self._cache)
def clear(self):
self._cache.clear()
def get(self, key):
'''Return the object named by key.'''
try:
return self._cache[key]
except KeyError, e:
return None
def put(self, key, value):
'''Stores the object.'''
self._cache[key] = value
def delete(self, key):
'''Removes the object.'''
if key in self._cache:
del self._cache[key]
def contains(self, key):
'''Returns whether the object is in this datastore.'''
return key in self._cache
def query(self, query):
'''Returns a sequence of objects matching criteria expressed in `query`'''
# entire dataset already in memory, so ok to apply query naively
return query(self._cache.values())
| mit | 7,439,266,530,720,450,000 | 20.225352 | 78 | 0.599867 | false | 3.512821 | false | false | false |
mindriot101/bokeh | bokeh/core/property/color.py | 3 | 4350 | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2018, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Provide color related properties.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
# Bokeh imports
from ... import colors
from .. import enums
from .bases import Property
from .container import Tuple
from .enum import Enum
from .either import Either
from .numeric import Byte, Percent
from .regex import Regex
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'Color',
'RGB',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
class RGB(Property):
''' Accept colors.RGB values.
'''
def validate(self, value, detail=True):
super(RGB, self).validate(value, detail)
if not (value is None or isinstance(value, colors.RGB)):
msg = "" if not detail else "expected RGB value, got %r" % (value,)
raise ValueError(msg)
class Color(Either):
''' Accept color values in a variety of ways.
For colors, because we support named colors and hex values prefaced
with a "#", when we are handed a string value, there is a little
interpretation: if the value is one of the 147 SVG named colors or
it starts with a "#", then it is interpreted as a value.
If a 3-tuple is provided, then it is treated as an RGB (0..255).
If a 4-tuple is provided, then it is treated as an RGBa (0..255), with
alpha as a float between 0 and 1. (This follows the HTML5 Canvas API.)
Example:
.. code-block:: python
>>> class ColorModel(HasProps):
... prop = Color()
...
>>> m = ColorModel()
>>> m.prop = "firebrick"
>>> m.prop = "#a240a2"
>>> m.prop = (100, 100, 255)
>>> m.prop = (100, 100, 255, 0.5)
>>> m.prop = "junk" # ValueError !!
>>> m.prop = (100.2, 57.3, 10.2) # ValueError !!
'''
def __init__(self, default=None, help=None):
types = (Enum(enums.NamedColor),
Regex("^#[0-9a-fA-F]{6}$"),
Regex("^rgba\(((25[0-5]|2[0-4]\d|1\d{1,2}|\d\d?)\s*,"
"\s*?){2}(25[0-5]|2[0-4]\d|1\d{1,2}|\d\d?)\s*,"
"\s*([01]\.?\d*?)\)"),
Regex("^rgb\(((25[0-5]|2[0-4]\d|1\d{1,2}|\d\d?)\s*,"
"\s*?){2}(25[0-5]|2[0-4]\d|1\d{1,2}|\d\d?)\s*?\)"),
Tuple(Byte, Byte, Byte),
Tuple(Byte, Byte, Byte, Percent),
RGB)
super(Color, self).__init__(*types, default=default, help=help)
def __str__(self):
return self.__class__.__name__
def transform(self, value):
if isinstance(value, tuple):
value = colors.RGB(*value).to_css()
return value
def _sphinx_type(self):
return self._sphinx_prop_link()
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| bsd-3-clause | 2,634,407,079,164,814,300 | 31.462687 | 82 | 0.392874 | false | 4.728261 | false | false | false |
ContributeToScience/participant-booking-app | booking/userprofile/forms.py | 1 | 3894 | from django import forms
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
from allauth.account.forms import SignupForm, LoginForm
from allauth.account import app_settings
from userprofile.models import UserProfile
from core.utils import USER_TYPE_CHOICES
from core.utils import USER_TYPE
PAYMENT_TYPE_CHOICES = (
('paypal', _(u'PayPal')),
('amazon', _(u'Amazon')),
)
class CommonForm(forms.Form):
first_name = forms.CharField(max_length=30, required=True, label=_(u'First name'))
middle_name = forms.CharField(max_length=30, required=False, label=_(u'Middle name'))
last_name = forms.CharField(max_length=30, required=True, label=_(u'Last name'))
address = forms.CharField(max_length=255, required=True, label=_(u'Approximate address'))
lng = forms.CharField(required=True, label=_(u'Longitude'), max_length=255)
lat = forms.CharField(required=True, label=_(u'Latitude'), max_length=255)
mobile = forms.CharField(max_length=30, required=False, label=_(u'Mobile number'))
payment_type = forms.ChoiceField(required=False, label=_(u'Payment type'), choices=PAYMENT_TYPE_CHOICES)
payment_account = forms.CharField(max_length=30, required=False, label=_(u'Payment id'))
class Meta:
model = UserProfile
exclude = (
'available_balance',
)
class ParticipantSignupForm(SignupForm):
def create_user(self, commit=True):
user = super(ParticipantSignupForm, self).create_user()
user_profile = UserProfile.objects.get_or_create(user=user)[0]
user_profile.is_participant = True
user_profile.save()
return user
class ScientistSignupForm(SignupForm):
def create_user(self, commit=True):
user = super(ScientistSignupForm, self).create_user()
user_profile = UserProfile.objects.get_or_create(user=user)[0]
user_profile.is_scientist = True
user_profile.is_participant = True
user_profile.save()
return user
class DepartmentSignupForm(SignupForm):
def create_user(self, commit=True):
user = super(DepartmentSignupForm, self).create_user()
user_profile = UserProfile.objects.get_or_create(user=user)[0]
user_profile.is_department = True
user_profile.is_scientist = True
user_profile.is_participant = True
user_profile.save()
return user
class NewLoginForm(LoginForm):
user_type = forms.ChoiceField(choices=USER_TYPE_CHOICES)
def clean(self):
if self._errors:
return
user = authenticate(**self.user_credentials())
if user:
if user.is_active:
self.user = user
user_type = self.data['user_type']
if not user_type or user_type not in USER_TYPE:
raise forms.ValidationError(_("Incorrect user type."))
profile = UserProfile.objects.get_or_create(user=user)[0]
if not profile.has_role(user_type):
raise forms.ValidationError(_("The user do not have the user type: %s." % user_type))
else:
raise forms.ValidationError(_("This account is currently"
" inactive."))
else:
if app_settings.AUTHENTICATION_METHOD == 'email':
error = _("The e-mail address and/or password you specified"
" are not correct.")
elif app_settings.AUTHENTICATION_METHOD == 'username':
error = _("The username and/or password you specified are"
" not correct.")
else:
error = _("The login and/or password you specified are not"
" correct.")
raise forms.ValidationError(error)
return self.cleaned_data | gpl-2.0 | -3,017,574,315,264,272,000 | 36.815534 | 108 | 0.627889 | false | 4.090336 | false | false | false |
mozilla/firefox-flicks | vendor-local/lib/python/celery/task/sets.py | 1 | 2587 | # -*- coding: utf-8 -*-
"""
celery.task.sets
~~~~~~~~~~~~~~~~
Old ``group`` implementation, this module should
not be used anymore use :func:`celery.group` instead.
"""
from __future__ import absolute_import
from __future__ import with_statement
from celery._state import get_current_worker_task
from celery.app import app_or_default
from celery.canvas import subtask, maybe_subtask # noqa
from celery.utils import uuid
class TaskSet(list):
"""A task containing several subtasks, making it possible
to track how many, or when all of the tasks have been completed.
:param tasks: A list of :class:`subtask` instances.
Example::
>>> urls = ('http://cnn.com/rss', 'http://bbc.co.uk/rss')
>>> s = TaskSet(refresh_feed.s(url) for url in urls)
>>> taskset_result = s.apply_async()
>>> list_of_return_values = taskset_result.join() # *expensive*
"""
app = None
def __init__(self, tasks=None, app=None, Publisher=None):
super(TaskSet, self).__init__(maybe_subtask(t) for t in tasks or [])
self.app = app_or_default(app or self.app)
self.Publisher = Publisher or self.app.amqp.TaskProducer
self.total = len(self) # XXX compat
def apply_async(self, connection=None, connect_timeout=None,
publisher=None, taskset_id=None):
"""Apply TaskSet."""
app = self.app
if app.conf.CELERY_ALWAYS_EAGER:
return self.apply(taskset_id=taskset_id)
with app.connection_or_acquire(connection, connect_timeout) as conn:
setid = taskset_id or uuid()
pub = publisher or self.Publisher(conn)
results = self._async_results(setid, pub)
result = app.TaskSetResult(setid, results)
parent = get_current_worker_task()
if parent:
parent.request.children.append(result)
return result
def _async_results(self, taskset_id, publisher):
return [task.apply_async(taskset_id=taskset_id, publisher=publisher)
for task in self]
def apply(self, taskset_id=None):
"""Applies the TaskSet locally by blocking until all tasks return."""
setid = taskset_id or uuid()
return self.app.TaskSetResult(setid, self._sync_results(setid))
def _sync_results(self, taskset_id):
return [task.apply(taskset_id=taskset_id) for task in self]
def _get_tasks(self):
return self
def _set_tasks(self, tasks):
self[:] = tasks
tasks = property(_get_tasks, _set_tasks)
| bsd-3-clause | -6,573,385,006,515,336,000 | 32.597403 | 77 | 0.621956 | false | 3.722302 | false | false | false |
google-research/korvapuusti | listening_test_summer_2020/analysis/data_processing/preprocess_extra_data.py | 1 | 5705 | # Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Preprocesses data from listening tests."""
import json
import os
import random
from typing import Dict, List, Any, Tuple
import data_plotting
import data_helpers
import pprint
import csv
from absl import app
from absl import flags
import numpy as np
FLAGS = flags.FLAGS
flags.DEFINE_string(
"input_file_path",
"extra_data/probes_two_tone_set_extra_data.csv",
"JSON file with answers per annotator.")
flags.DEFINE_string(
"output_directory",
"extra_data",
"Directory to save preprocessed data in.")
flags.DEFINE_integer(
"seed",
2,
"Random seed."
)
flags.DEFINE_integer("min_frequency", 20, "Minimum frequency for a tone.")
flags.DEFINE_integer("max_frequency", 20000, "Maximum frequency for a tone.")
flags.DEFINE_integer(
"unity_decibel_level",
90,
"Which decibel level equals a sine at unity in the wavefiles."
)
def get_data(
input_file: str, save_directory: str,
critical_bands: List[int], unity_db_level: int) -> List[Dict[str, Any]]:
"""Returns data."""
with open(input_file, "r") as infile:
csvreader = csv.reader(infile, delimiter=',')
data = []
for i, raw_example_line in enumerate(csvreader):
if i == 0:
continue
example_specs = raw_example_line[2].split("],[")
masker_frequency, masker_level = example_specs[0].strip("[[").split(",")
probe_frequency, probe_level = example_specs[1].strip("]]").split(",")
wavfile_identifier = raw_example_line[4].split("/")[-1]
example = {
"probe_frequency": float(probe_frequency),
"probe_level": int(probe_level),
"perceived_probe_levels": [],
"worker_ids": [],
"masker_frequency": float(masker_frequency),
"masker_level": int(masker_level),
"wavfile_identifier": wavfile_identifier
}
data.append(example)
return data
def prepare_data_modeling(train_set: List[Dict[str, Any]], curves_file: str,
save_directory: str):
lookup_table = data_helpers.AnswerLookupTable()
for example in train_set:
lookup_table.add(example["masker_frequency"], example["probe_level"],
example["masker_level"], example["probe_frequency"],
example)
preprocessed_train_set = []
with open(curves_file, "r") as infile:
answers_matched = 0
curve_data = json.load(infile)
for i, masker_probe_curves in enumerate(curve_data):
masker_frequency = float(masker_probe_curves["masker_frequency"])
probe_level = int(masker_probe_curves["probe_level"])
curves = masker_probe_curves["curves"]
for j, curve in enumerate(curves):
curve_data[i]["curves"][j]["failed"] = []
masker_level = int(curve["masker_level"])
probe_frequencies = curve["probe_frequencies"]
for k, probe_frequency in enumerate(probe_frequencies):
probe_frequency = float(probe_frequency)
example_answers = lookup_table.extract(masker_frequency, probe_level,
masker_level, probe_frequency)
if example_answers:
answers = example_answers["perceived_probe_levels"]
perceived_levels = np.array(answers)
# Hardcoded removal of failed probes (too high frequency).
if probe_frequency == 17625.0:
curve_data[i]["curves"][j]["failed"].append(k)
else:
masking = probe_level - perceived_levels
masking[masking < 0] = 0
curve_data[i]["curves"][j]["probe_masking"][k] = list(masking)
answers_matched += 1
preprocessed_train_set.append(example_answers)
else:
curve_data[i]["curves"][j]["failed"].append(k)
data_plotting.plot_masking_patterns_grid(curve_data,
save_directory=save_directory)
data_plotting.plot_masking_patterns(curve_data,
save_directory=save_directory)
with open(os.path.join(save_directory, "preprocessed_train_set.json"),
"w") as outfile:
json.dump(preprocessed_train_set, outfile, indent=4)
return answers_matched
def main(argv):
if len(argv) > 1:
raise app.UsageError("Too many command-line arguments.")
if not os.path.exists(FLAGS.input_file_path):
raise ValueError("No data found at %s" % FLAGS.input_file_path)
if not os.path.exists(FLAGS.output_directory):
os.mkdir(FLAGS.output_directory)
critical_bands = [
FLAGS.min_frequency, 100, 200, 300, 400, 505, 630, 770, 915, 1080, 1265,
1475, 1720, 1990, 2310, 2690, 3125, 3675, 4350, 5250, 6350, 7650, 9400,
11750, 15250, FLAGS.max_frequency
]
random.seed(FLAGS.seed)
data = get_data(FLAGS.input_file_path,
FLAGS.output_directory, critical_bands,
FLAGS.unity_decibel_level)
with open(os.path.join(FLAGS.output_directory, "extra_train_set.json"),
"w") as outfile:
json.dump(data, outfile, indent=4)
if __name__ == "__main__":
app.run(main)
| apache-2.0 | -7,068,827,731,667,365,000 | 35.33758 | 79 | 0.63085 | false | 3.685401 | false | false | false |
Voilier/obole | core/urls.py | 1 | 2003 | # -*- coding: utf-8 -*-
"""obole URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from __future__ import absolute_import, unicode_literals
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.models import User
from rest_framework import routers, serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
# Routers provide an easy way of automatically determining the URL conf.
router = routers.DefaultRouter()
#router.register(r'users', UserViewSet)
# Wire up our API using automatic URL routing.
# Additionally, we include login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^admin/', admin.site.urls),
url(r'^', include('users.urls')),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
if settings.DEBUG:
from django.conf.urls.static import static
urlpatterns += static(
settings.STATIC_URL, document_root=settings.STATIC_ROOT
) + static(
settings.MEDIA_URL, document_root=settings.MEDIA_ROOT
)
| mpl-2.0 | -5,145,231,204,531,826,000 | 32.383333 | 82 | 0.71343 | false | 3.743925 | false | false | false |
ssekulic/tikka-postagger | py/get_scores_eng.py | 2 | 2221 | #! /usr/bin/python
import os, sys, re
BROWN_LCURVE=( "learningcurve0008", "learningcurve0016", "learningcurve0032",\
"learningcurve0064", "learningcurve0128", "learningcurve0256", "full" )
WSJ_LCURVE=( "learningcurve0008", "learningcurve0016", "learningcurve0032", \
"learningcurve0064", "learningcurve0128", "learningcurve0256", \
"learningcurve0512", "learningcurve1024", "full" )
score_finder=re.compile(r"^(0\.\d+\s*)+$")
brown_map={}
counter = 1
for c in BROWN_LCURVE:
brown_map[c]="l%d" % counter
counter += 1
wsj_map={}
counter = 1
for c in WSJ_LCURVE:
wsj_map[c] = "l%d" % counter
counter += 1
models=("m1","m2","m3","m4","m6")
inpath=os.path.abspath(os.path.expanduser(sys.argv[1]))
content_states=0
function_states=0
model_id=""
data_id=""
corpus=""
dataline = "model.id,corpus,data.id,function.states,content.states,states,f1to1,fmto1,r1to1,rmto1"
print dataline
for fi in os.listdir(inpath):
fullpath=os.path.join(inpath,fi)
if os.path.isfile(fullpath):
labs = fi.split(".")
corpus=labs[0]
if corpus == "brown":
data_id=brown_map[labs[-2]]
else:
data_id=wsj_map[labs[-2]]
model_id=labs[-3]
function_states=labs[-5]
content_states=labs[-4]
states = "%d" % (int(function_states) + int(content_states))
handle = open(fullpath)
scores = ""
for line in handle:
m = score_finder.search(line)
if m:
scores=line
break
if len(scores) > 0:
scores = scores.split()
f1to1 = scores[0]
fmto1 = scores[1]
r1to1=scores[2]
rmto1=scores[3]
datam = {"model_id":model_id, "data_id":data_id, "corpus":corpus, \
"function_states":function_states, "content_states":content_states, \
"f1to1":f1to1, "fmto1":fmto1, "r1to1":r1to1, "rmto1":rmto1,"states":states}
dataline = "%(model_id)s,%(corpus)s,%(data_id)s,%(function_states)s,%(content_states)s,%(states)s,%(f1to1)s,%(fmto1)s,%(r1to1)s,%(rmto1)s" % datam
print dataline
| lgpl-3.0 | -4,207,469,603,122,749,400 | 30.728571 | 158 | 0.573615 | false | 2.949535 | false | false | false |
ovresko/erpnext | erpnext/manufacturing/report/bom_stock_calculated/bom_stock_calculated.py | 3 | 3037 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def execute(filters=None):
# if not filters: filters = {}
columns = get_columns()
summ_data = []
data = get_bom_stock(filters)
qty_to_make = filters.get("qty_to_make")
for row in data:
item_map = get_item_details(row.item_code)
reqd_qty = qty_to_make * row.actual_qty
last_pur_price = frappe.db.get_value("Item", row.item_code, "last_purchase_rate")
if row.to_build > 0:
diff_qty = row.to_build - reqd_qty
summ_data.append([row.item_code, row.description, item_map[row.item_code]["manufacturer"], item_map[row.item_code]["manufacturer_part_no"], row.actual_qty, row.to_build, reqd_qty, diff_qty, last_pur_price])
else:
diff_qty = 0 - reqd_qty
summ_data.append([row.item_code, row.description, item_map[row.item_code]["manufacturer"], item_map[row.item_code]["manufacturer_part_no"], row.actual_qty, "0.000", reqd_qty, diff_qty, last_pur_price])
return columns, summ_data
def get_columns():
"""return columns"""
columns = [
_("Item") + ":Link/Item:100",
_("Description") + "::150",
_("Manufacturer") + "::100",
_("Manufacturer Part Number") + "::100",
_("Qty") + ":Float:50",
_("Stock Qty") + ":Float:100",
_("Reqd Qty")+ ":Float:100",
_("Diff Qty")+ ":Float:100",
_("Last Purchase Price")+ ":Float:100",
]
return columns
def get_bom_stock(filters):
conditions = ""
bom = filters.get("bom")
table = "`tabBOM Item`"
qty_field = "qty"
if filters.get("show_exploded_view"):
table = "`tabBOM Explosion Item`"
qty_field = "stock_qty"
if filters.get("warehouse"):
warehouse_details = frappe.db.get_value("Warehouse", filters.get("warehouse"), ["lft", "rgt"], as_dict=1)
if warehouse_details:
conditions += " and exists (select name from `tabWarehouse` wh \
where wh.lft >= %s and wh.rgt <= %s and ledger.warehouse = wh.name)" % (warehouse_details.lft,
warehouse_details.rgt)
else:
conditions += " and ledger.warehouse = '%s'" % frappe.db.escape(filters.get("warehouse"))
else:
conditions += ""
return frappe.db.sql("""
SELECT
bom_item.item_code,
bom_item.description,
bom_item.{qty_field},
ifnull(sum(ledger.actual_qty), 0) as actual_qty,
ifnull(sum(FLOOR(ledger.actual_qty / bom_item.{qty_field})), 0) as to_build
FROM
{table} AS bom_item
LEFT JOIN `tabBin` AS ledger
ON bom_item.item_code = ledger.item_code
{conditions}
WHERE
bom_item.parent = '{bom}' and bom_item.parenttype='BOM'
GROUP BY bom_item.item_code""".format(qty_field=qty_field, table=table, conditions=conditions, bom=bom), as_dict=1)
def get_item_details(item_code):
items = frappe.db.sql("""select it.item_group, it.item_name, it.stock_uom, it.name, it.brand, it.description, it.manufacturer_part_no, it.manufacturer from tabItem it where it.item_code = %s""", item_code, as_dict=1)
return dict((d.name, d) for d in items)
| gpl-3.0 | -5,099,853,073,463,069,000 | 32.373626 | 218 | 0.663484 | false | 2.859699 | false | false | false |
ViDA-NYU/genotet | uploadBatch.py | 1 | 2222 | # This is a python script for uploading batch data to Genotet server.
# The user may write a *.tsv file, with each line as:
# file_path data_name file_type description
# The command line would be:
# python uploadBatch.py username example.tsv
# And then enter your password for Genotet.
from requests_toolbelt import MultipartEncoder
import requests
import sys
import getpass
import json
url = 'http://localhost:3000' # Please change it accordingly.
def upload_file(file_path, data_name, file_type, description, cookies):
upload_url = url + '/genotet/upload'
file_path_parts = file_path.split('\/')
file_name = file_path_parts[len(file_path_parts) - 1]
params = MultipartEncoder(
fields={'type': file_type,
'name': data_name,
'description': description,
'username': 'anonymous',
'file': (file_name, open(file_path, 'rb'), 'text/plain')})
headers = {'Content-Type': params.content_type}
cookie = {'genotet-session': cookies['genotet-session']}
response = requests.post(upload_url, data=params, headers=headers, cookies=cookie)
print response.status_code
return True
def auth(username, password):
auth_url = url + '/genotet/user'
params = {
'type': 'sign-in',
'username': username,
'password': password
}
params = {'data': json.dumps(params)}
response = requests.get(auth_url, params=params)
if response.status_code != 200:
return False
return response.cookies, True
def main(argv):
if len(argv) < 3:
print 'input not enough'
return
username = argv[1]
password = getpass.getpass('Password:')
cookies, auth_result = auth(username, password)
if not auth_result:
print 'username/password not correct'
return
else:
print 'sign in success'
file_path = argv[2]
tsv_file = open(file_path, 'r')
for line in tsv_file:
parts = line.split('\t')
result = upload_file(parts[0], parts[1], parts[2], parts[3], cookies)
if not result:
print 'failed to upload ' + parts[0]
return
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 | 8,550,396,749,831,677,000 | 31.202899 | 86 | 0.623312 | false | 3.715719 | false | false | false |
JasonDeving/python_game | game.py | 1 | 2342 | import sys
from character import Character
from monster import Dragon, Goblin, Troll
class Game:
def setup(self):
self.player = Character()
self.monster = [
Goblin(),
Troll(),
Dragon()
]
self.monster = self.get_next_monster()
def get_next_monster(self):
try:
return self.monster.pop(0)
except IndexError:
return None
def monster_turn(self):
if self.monster.attack():
print("{} is attacking!".format(self.monster))
if input("Dodge ? Y/N").lower() == 'y':
if self.player.dodge():
print("your dodged the attack!")
else:
print("you got hit anyway!")
self.player.hit_points -= 1
else:
print("you hot hit anyway!")
self.player.hit_points -= 1
else:
print("{} isn't attacking this turn.".format(self.monster))
def player_turn(self):
player_choice = input("[A]ttack, [R]est, [Q]uit? ").lower()
if player_choice == 'a':
print("you're attacking {}!".format(self.monster))
if self.player.attack():
if self.monster.dodge():
print("{} dodge your attack!".format(self.monster))
else:
if self.player.leveled_up():
self.monster.hit_points -= 2
else:
self.monster.hit_points -= 1
print("your hit {} with your {}!".format(
self.monster, self.player.weapon))
else:
print("your missed")
elif player_choice == 'r':
self.player.rest()
elif player_choice == 'q':
sys.exit()
else:
self.player_turn()
def cleanup(self):
if self.monster.hit_points <= 0:
self.player.experience += self.monster.experience
print("your killed {} ".format(self.monster))
self.monster = self.get_next_monster()
def __init__(self):
self.setup()
while self.player.hit_points and (self.monster or self.monsters):
print('\n'+'='*20)
print(self.player)
self.monster_turn()
print('-'*20)
self.player_turn()
self.cleanup()
print('\n'+'='*20)
if self.player.hit_points:
print("your win")
elif self.monsters or self.monster:
print("you lose")
sys.exit()
Game()
| mit | -2,062,134,807,052,411,600 | 24.193548 | 69 | 0.543126 | false | 3.586524 | false | false | false |
xiandiancloud/edx-platform | common/lib/calc/calc/calc.py | 61 | 13558 | """
Parser and evaluator for FormulaResponse and NumericalResponse
Uses pyparsing to parse. Main function as of now is evaluator().
"""
import math
import operator
import numbers
import numpy
import scipy.constants
import functions
from pyparsing import (
Word, Literal, CaselessLiteral, ZeroOrMore, MatchFirst, Optional, Forward,
Group, ParseResults, stringEnd, Suppress, Combine, alphas, nums, alphanums
)
DEFAULT_FUNCTIONS = {
'sin': numpy.sin,
'cos': numpy.cos,
'tan': numpy.tan,
'sec': functions.sec,
'csc': functions.csc,
'cot': functions.cot,
'sqrt': numpy.sqrt,
'log10': numpy.log10,
'log2': numpy.log2,
'ln': numpy.log,
'exp': numpy.exp,
'arccos': numpy.arccos,
'arcsin': numpy.arcsin,
'arctan': numpy.arctan,
'arcsec': functions.arcsec,
'arccsc': functions.arccsc,
'arccot': functions.arccot,
'abs': numpy.abs,
'fact': math.factorial,
'factorial': math.factorial,
'sinh': numpy.sinh,
'cosh': numpy.cosh,
'tanh': numpy.tanh,
'sech': functions.sech,
'csch': functions.csch,
'coth': functions.coth,
'arcsinh': numpy.arcsinh,
'arccosh': numpy.arccosh,
'arctanh': numpy.arctanh,
'arcsech': functions.arcsech,
'arccsch': functions.arccsch,
'arccoth': functions.arccoth
}
DEFAULT_VARIABLES = {
'i': numpy.complex(0, 1),
'j': numpy.complex(0, 1),
'e': numpy.e,
'pi': numpy.pi,
'k': scipy.constants.k, # Boltzmann: 1.3806488e-23 (Joules/Kelvin)
'c': scipy.constants.c, # Light Speed: 2.998e8 (m/s)
'T': 298.15, # Typical room temperature: 298.15 (Kelvin), same as 25C/77F
'q': scipy.constants.e # Fund. Charge: 1.602176565e-19 (Coulombs)
}
# We eliminated the following extreme suffixes:
# P (1e15), E (1e18), Z (1e21), Y (1e24),
# f (1e-15), a (1e-18), z (1e-21), y (1e-24)
# since they're rarely used, and potentially confusing.
# They may also conflict with variables if we ever allow e.g.
# 5R instead of 5*R
SUFFIXES = {
'%': 0.01, 'k': 1e3, 'M': 1e6, 'G': 1e9, 'T': 1e12,
'c': 1e-2, 'm': 1e-3, 'u': 1e-6, 'n': 1e-9, 'p': 1e-12
}
class UndefinedVariable(Exception):
"""
Indicate when a student inputs a variable which was not expected.
"""
pass
def lower_dict(input_dict):
"""
Convert all keys in a dictionary to lowercase; keep their original values.
Keep in mind that it is possible (but not useful?) to define different
variables that have the same lowercase representation. It would be hard to
tell which is used in the final dict and which isn't.
"""
return {k.lower(): v for k, v in input_dict.iteritems()}
# The following few functions define evaluation actions, which are run on lists
# of results from each parse component. They convert the strings and (previously
# calculated) numbers into the number that component represents.
def super_float(text):
"""
Like float, but with SI extensions. 1k goes to 1000.
"""
if text[-1] in SUFFIXES:
return float(text[:-1]) * SUFFIXES[text[-1]]
else:
return float(text)
def eval_number(parse_result):
"""
Create a float out of its string parts.
e.g. [ '7.13', 'e', '3' ] -> 7130
Calls super_float above.
"""
return super_float("".join(parse_result))
def eval_atom(parse_result):
"""
Return the value wrapped by the atom.
In the case of parenthesis, ignore them.
"""
# Find first number in the list
result = next(k for k in parse_result if isinstance(k, numbers.Number))
return result
def eval_power(parse_result):
"""
Take a list of numbers and exponentiate them, right to left.
e.g. [ 2, 3, 2 ] -> 2^3^2 = 2^(3^2) -> 512
(not to be interpreted (2^3)^2 = 64)
"""
# `reduce` will go from left to right; reverse the list.
parse_result = reversed(
[k for k in parse_result
if isinstance(k, numbers.Number)] # Ignore the '^' marks.
)
# Having reversed it, raise `b` to the power of `a`.
power = reduce(lambda a, b: b ** a, parse_result)
return power
def eval_parallel(parse_result):
"""
Compute numbers according to the parallel resistors operator.
BTW it is commutative. Its formula is given by
out = 1 / (1/in1 + 1/in2 + ...)
e.g. [ 1, 2 ] -> 2/3
Return NaN if there is a zero among the inputs.
"""
if len(parse_result) == 1:
return parse_result[0]
if 0 in parse_result:
return float('nan')
reciprocals = [1. / e for e in parse_result
if isinstance(e, numbers.Number)]
return 1. / sum(reciprocals)
def eval_sum(parse_result):
"""
Add the inputs, keeping in mind their sign.
[ 1, '+', 2, '-', 3 ] -> 0
Allow a leading + or -.
"""
total = 0.0
current_op = operator.add
for token in parse_result:
if token == '+':
current_op = operator.add
elif token == '-':
current_op = operator.sub
else:
total = current_op(total, token)
return total
def eval_product(parse_result):
"""
Multiply the inputs.
[ 1, '*', 2, '/', 3 ] -> 0.66
"""
prod = 1.0
current_op = operator.mul
for token in parse_result:
if token == '*':
current_op = operator.mul
elif token == '/':
current_op = operator.truediv
else:
prod = current_op(prod, token)
return prod
def add_defaults(variables, functions, case_sensitive):
"""
Create dictionaries with both the default and user-defined variables.
"""
all_variables = dict(DEFAULT_VARIABLES)
all_functions = dict(DEFAULT_FUNCTIONS)
all_variables.update(variables)
all_functions.update(functions)
if not case_sensitive:
all_variables = lower_dict(all_variables)
all_functions = lower_dict(all_functions)
return (all_variables, all_functions)
def evaluator(variables, functions, math_expr, case_sensitive=False):
"""
Evaluate an expression; that is, take a string of math and return a float.
-Variables are passed as a dictionary from string to value. They must be
python numbers.
-Unary functions are passed as a dictionary from string to function.
"""
# No need to go further.
if math_expr.strip() == "":
return float('nan')
# Parse the tree.
math_interpreter = ParseAugmenter(math_expr, case_sensitive)
math_interpreter.parse_algebra()
# Get our variables together.
all_variables, all_functions = add_defaults(variables, functions, case_sensitive)
# ...and check them
math_interpreter.check_variables(all_variables, all_functions)
# Create a recursion to evaluate the tree.
if case_sensitive:
casify = lambda x: x
else:
casify = lambda x: x.lower() # Lowercase for case insens.
evaluate_actions = {
'number': eval_number,
'variable': lambda x: all_variables[casify(x[0])],
'function': lambda x: all_functions[casify(x[0])](x[1]),
'atom': eval_atom,
'power': eval_power,
'parallel': eval_parallel,
'product': eval_product,
'sum': eval_sum
}
return math_interpreter.reduce_tree(evaluate_actions)
class ParseAugmenter(object):
"""
Holds the data for a particular parse.
Retains the `math_expr` and `case_sensitive` so they needn't be passed
around method to method.
Eventually holds the parse tree and sets of variables as well.
"""
def __init__(self, math_expr, case_sensitive=False):
"""
Create the ParseAugmenter for a given math expression string.
Do the parsing later, when called like `OBJ.parse_algebra()`.
"""
self.case_sensitive = case_sensitive
self.math_expr = math_expr
self.tree = None
self.variables_used = set()
self.functions_used = set()
def vpa(tokens):
"""
When a variable is recognized, store it in `variables_used`.
"""
varname = tokens[0][0]
self.variables_used.add(varname)
def fpa(tokens):
"""
When a function is recognized, store it in `functions_used`.
"""
varname = tokens[0][0]
self.functions_used.add(varname)
self.variable_parse_action = vpa
self.function_parse_action = fpa
def parse_algebra(self):
"""
Parse an algebraic expression into a tree.
Store a `pyparsing.ParseResult` in `self.tree` with proper groupings to
reflect parenthesis and order of operations. Leave all operators in the
tree and do not parse any strings of numbers into their float versions.
Adding the groups and result names makes the `repr()` of the result
really gross. For debugging, use something like
print OBJ.tree.asXML()
"""
# 0.33 or 7 or .34 or 16.
number_part = Word(nums)
inner_number = (number_part + Optional("." + Optional(number_part))) | ("." + number_part)
# pyparsing allows spaces between tokens--`Combine` prevents that.
inner_number = Combine(inner_number)
# SI suffixes and percent.
number_suffix = MatchFirst(Literal(k) for k in SUFFIXES.keys())
# 0.33k or 17
plus_minus = Literal('+') | Literal('-')
number = Group(
Optional(plus_minus) +
inner_number +
Optional(CaselessLiteral("E") + Optional(plus_minus) + number_part) +
Optional(number_suffix)
)
number = number("number")
# Predefine recursive variables.
expr = Forward()
# Handle variables passed in. They must start with letters/underscores
# and may contain numbers afterward.
inner_varname = Word(alphas + "_", alphanums + "_")
varname = Group(inner_varname)("variable")
varname.setParseAction(self.variable_parse_action)
# Same thing for functions.
function = Group(inner_varname + Suppress("(") + expr + Suppress(")"))("function")
function.setParseAction(self.function_parse_action)
atom = number | function | varname | "(" + expr + ")"
atom = Group(atom)("atom")
# Do the following in the correct order to preserve order of operation.
pow_term = atom + ZeroOrMore("^" + atom)
pow_term = Group(pow_term)("power")
par_term = pow_term + ZeroOrMore('||' + pow_term) # 5k || 4k
par_term = Group(par_term)("parallel")
prod_term = par_term + ZeroOrMore((Literal('*') | Literal('/')) + par_term) # 7 * 5 / 4
prod_term = Group(prod_term)("product")
sum_term = Optional(plus_minus) + prod_term + ZeroOrMore(plus_minus + prod_term) # -5 + 4 - 3
sum_term = Group(sum_term)("sum")
# Finish the recursion.
expr << sum_term # pylint: disable=W0104
self.tree = (expr + stringEnd).parseString(self.math_expr)[0]
def reduce_tree(self, handle_actions, terminal_converter=None):
"""
Call `handle_actions` recursively on `self.tree` and return result.
`handle_actions` is a dictionary of node names (e.g. 'product', 'sum',
etc&) to functions. These functions are of the following form:
-input: a list of processed child nodes. If it includes any terminal
nodes in the list, they will be given as their processed forms also.
-output: whatever to be passed to the level higher, and what to
return for the final node.
`terminal_converter` is a function that takes in a token and returns a
processed form. The default of `None` just leaves them as strings.
"""
def handle_node(node):
"""
Return the result representing the node, using recursion.
Call the appropriate `handle_action` for this node. As its inputs,
feed it the output of `handle_node` for each child node.
"""
if not isinstance(node, ParseResults):
# Then treat it as a terminal node.
if terminal_converter is None:
return node
else:
return terminal_converter(node)
node_name = node.getName()
if node_name not in handle_actions: # pragma: no cover
raise Exception(u"Unknown branch name '{}'".format(node_name))
action = handle_actions[node_name]
handled_kids = [handle_node(k) for k in node]
return action(handled_kids)
# Find the value of the entire tree.
return handle_node(self.tree)
def check_variables(self, valid_variables, valid_functions):
"""
Confirm that all the variables used in the tree are valid/defined.
Otherwise, raise an UndefinedVariable containing all bad variables.
"""
if self.case_sensitive:
casify = lambda x: x
else:
casify = lambda x: x.lower() # Lowercase for case insens.
# Test if casify(X) is valid, but return the actual bad input (i.e. X)
bad_vars = set(var for var in self.variables_used
if casify(var) not in valid_variables)
bad_vars.update(func for func in self.functions_used
if casify(func) not in valid_functions)
if bad_vars:
raise UndefinedVariable(' '.join(sorted(bad_vars)))
| agpl-3.0 | 5,806,268,756,468,296,000 | 31.435407 | 102 | 0.607464 | false | 3.76402 | false | false | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_04_01/operations/_express_route_ports_locations_operations.py | 1 | 7987 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ExpressRoutePortsLocationsOperations(object):
"""ExpressRoutePortsLocationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressRoutePortsLocationListResult"]
"""Retrieves all ExpressRoutePort peering locations. Does not return available bandwidths for each
location. Available bandwidths can only be obtained when retrieving a specific peering
location.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRoutePortsLocationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_04_01.models.ExpressRoutePortsLocationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePortsLocationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRoutePortsLocationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ExpressRoutePortsLocations'} # type: ignore
def get(
self,
location_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRoutePortsLocation"
"""Retrieves a single ExpressRoutePort peering location, including the list of available
bandwidths available at said peering location.
:param location_name: Name of the requested ExpressRoutePort peering location.
:type location_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRoutePortsLocation, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_04_01.models.ExpressRoutePortsLocation
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRoutePortsLocation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'locationName': self._serialize.url("location_name", location_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRoutePortsLocation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ExpressRoutePortsLocations/{locationName}'} # type: ignore
| mit | -1,426,520,225,900,759,600 | 45.707602 | 147 | 0.651684 | false | 4.527778 | true | false | false |
mhozza/covest | tools/experiment_table.py | 1 | 4557 | #! /usr/bin/env python
import argparse
from collections import defaultdict
from tools import templates
from tools.experiment_parser import parse_all
from tools.table_generator import format_table
SEPARATE_EF = True
def kmer_to_read_coverage(c, k, read_length=100):
if c is not None:
return c * read_length / (read_length - k + 1)
def compute_average(table_lines, std_key_suffix='_std'):
table_cnt = defaultdict(lambda: defaultdict(int))
table_sum = defaultdict(lambda: defaultdict(float))
table_avg = defaultdict(lambda: defaultdict(float))
table_std_sum = defaultdict(lambda: defaultdict(float))
for key, val in table_lines.items():
for k, v in val.items():
try:
table_sum[key[1:]][k] += v
table_cnt[key[1:]][k] += 1.0
except TypeError:
pass
for key, val in table_sum.items():
for k, v in val.items():
if table_cnt[key][k] == 0:
table_avg[key][k] = None
else:
table_avg[key][k] = v / table_cnt[key][k]
for key, val in table_lines.items():
for k, v in val.items():
try:
table_std_sum[key[1:]][k] += (v - table_avg[key[1:]][k]) ** 2
except TypeError:
pass
for key, val in table_std_sum.items():
for k, v in val.items():
if table_cnt[key][k] <= 1:
table_avg[key][k + std_key_suffix] = 0
else:
table_avg[key][k + std_key_suffix] = (v / (table_cnt[key][k] - 1)) ** 0.5
return table_avg
def main(args):
table_lines = parse_all(args.path, args.filter, not args.no_error, legacy=args.legacy)
header = [
'seq_name',
'provided_coverage', 'provided_error_rate', 'provided_k',
'coverage', 'error_rate',
'genome_size',
'q1', 'q2', 'q',
'guessed_coverage', 'guessed_error_rate',
'provided_loglikelihood', 'loglikelihood', 'guessed_loglikelihood',
]
header_avg = [
'provided_coverage',
'provided_error_rate',
'provided_k',
'coverage', 'coverage_std',
'error_rate', 'error_rate_std',
'genome_size', 'genome_size_std',
'q1', 'q1_std',
'q2', 'q2_std',
'q', 'q_std',
'guessed_coverage', 'guessed_coverage_std',
'guessed_error_rate', 'guessed_error_rate_std',
'provided_loglikelihood', 'provided_loglikelihood_std',
'loglikelihood', 'loglikelihood_std',
'guessed_loglikelihood', 'guessed_loglikelihood_std',
]
# header = [
# 'provided_coverage', 'provided_error_rate',
# 'coverage', 'error_rate',
# ]
format_templates = {
'html': templates.html,
'csv': templates.csv,
'tex': templates.tex,
}
format_escape = {
'tex': lambda x: x.replace('_', '\\_'),
}
titles = {
'provided_coverage': 'Coverage',
'provided_error_rate': 'Error Rate',
'coverage': 'Est. Coverage',
'coverage_std': 'Est. Coverage Std',
'error_rate': 'Est. Error Rate',
'error_rate_std': 'Est. Error Rate Std',
'genome_size': 'Est. Genome Size',
'genome_size_std': 'Est. Genome Size Std',
}
if args.average:
table_lines = compute_average(table_lines)
header = header_avg
print(format_table(
header,
titles,
sorted(
list(table_lines.values()),
key=lambda x: (
x['provided_coverage'],
x['provided_error_rate'],
x['provided_k'],
x.get('repeats', False),
x['seq_name'],
)
),
template_file=format_templates[args.format],
escape=format_escape.get(args.format, None),
))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse experiment output and generate table')
parser.add_argument('path', help='Experiment')
parser.add_argument('-f', '--format', default='html', help='Table format')
parser.add_argument('-i', '--filter', default='*.out', help='Filter files')
parser.add_argument('-a', '--average', action='store_true',
help='Compute average from all sequences')
parser.add_argument('-ne', '--no-error', action='store_true', help='Error is unknown')
parser.add_argument('--legacy', action='store_true', help='Run in legacy mode')
args = parser.parse_args()
main(args)
| gpl-3.0 | 365,333,557,470,087,900 | 31.091549 | 94 | 0.549923 | false | 3.605222 | false | false | false |
6809/dragonlib | dragonlib/api.py | 1 | 7826 | #!/usr/bin/env python
# encoding:utf8
"""
DragonPy - Dragon 32 emulator in Python
=======================================
:created: 2014 by Jens Diemer - www.jensdiemer.de
:copyleft: 2014 by the DragonLib team, see AUTHORS for more details.
:license: GNU GPL v3 or above, see LICENSE for more details.
"""
from __future__ import absolute_import, division, print_function
import six
import logging
from dragonlib.CoCo.basic_tokens import COCO_BASIC_TOKENS
from dragonlib.core.basic import BasicListing, RenumTool, BasicTokenUtil,\
BasicLine
from dragonlib.core.basic_parser import BASICParser
from dragonlib.core.binary_files import BinaryFile
from dragonlib.dragon32.basic_tokens import DRAGON32_BASIC_TOKENS
from dragonlib.utils.logging_utils import log_bytes
log=logging.getLogger(__name__)
DRAGON32 = "Dragon32"
COCO2B = "CoCo"
class BaseAPI(object):
RENUM_REGEX = r"""
(?P<statement> GOTO|GOSUB|THEN|ELSE ) (?P<space>\s*) (?P<no>[\d*,\s*]+)
"""
def __init__(self):
self.listing = BasicListing(self.BASIC_TOKENS)
self.renum_tool = RenumTool(self.RENUM_REGEX)
self.token_util = BasicTokenUtil(self.BASIC_TOKENS)
def program_dump2ascii_lines(self, dump, program_start=None):
"""
convert a memory dump of a tokensized BASIC listing into
ASCII listing list.
"""
dump = bytearray(dump)
# assert isinstance(dump, bytearray)
if program_start is None:
program_start = self.DEFAULT_PROGRAM_START
return self.listing.program_dump2ascii_lines(dump, program_start)
def parse_ascii_listing(self, basic_program_ascii):
parser = BASICParser()
parsed_lines = parser.parse(basic_program_ascii)
if not parsed_lines:
log.critical("No parsed lines %s from %s ?!?" % (
repr(parsed_lines), repr(basic_program_ascii)
))
log.debug("Parsed BASIC: %s", repr(parsed_lines))
return parsed_lines
def ascii_listing2basic_lines(self, basic_program_ascii, program_start):
parsed_lines = self.parse_ascii_listing(basic_program_ascii)
basic_lines = []
for line_no, code_objects in sorted(parsed_lines.items()):
basic_line = BasicLine(self.token_util)
basic_line.code_objects_load(line_no,code_objects)
basic_lines.append(basic_line)
return basic_lines
def ascii_listing2program_dump(self, basic_program_ascii, program_start=None):
"""
convert a ASCII BASIC program listing into tokens.
This tokens list can be used to insert it into the
Emulator RAM.
"""
if program_start is None:
program_start = self.DEFAULT_PROGRAM_START
basic_lines = self.ascii_listing2basic_lines(basic_program_ascii, program_start)
program_dump=self.listing.basic_lines2program_dump(basic_lines, program_start)
assert isinstance(program_dump, bytearray), (
"is type: %s and not bytearray: %s" % (type(program_dump), repr(program_dump))
)
return program_dump
def pformat_tokens(self, tokens):
"""
format a tokenized BASIC program line. Useful for debugging.
returns a list of formated string lines.
"""
return self.listing.token_util.pformat_tokens(tokens)
def pformat_program_dump(self, program_dump, program_start=None):
"""
format a BASIC program dump. Useful for debugging.
returns a list of formated string lines.
"""
assert isinstance(program_dump, bytearray)
if program_start is None:
program_start = self.DEFAULT_PROGRAM_START
return self.listing.pformat_program_dump(program_dump, program_start)
def renum_ascii_listing(self, content):
return self.renum_tool.renum(content)
def reformat_ascii_listing(self, basic_program_ascii):
parsed_lines = self.parse_ascii_listing(basic_program_ascii)
ascii_lines = []
for line_no, code_objects in sorted(parsed_lines.items()):
print()
print(line_no, code_objects)
basic_line = BasicLine(self.token_util)
basic_line.code_objects_load(line_no,code_objects)
print(basic_line)
basic_line.reformat()
new_line = basic_line.get_content()
print(new_line)
ascii_lines.append(new_line)
return "\n".join(ascii_lines)
def bas2bin(self, basic_program_ascii, load_address=None, exec_address=None):
# FIXME: load_address/exec_address == program_start ?!?!
if load_address is None:
load_address = self.DEFAULT_PROGRAM_START
if exec_address is None:
exec_address = self.DEFAULT_PROGRAM_START
tokenised_dump = self.ascii_listing2program_dump(basic_program_ascii, load_address)
log.debug(type(tokenised_dump))
log.debug(repr(tokenised_dump))
log_bytes(tokenised_dump, msg="tokenised: %s")
binary_file = BinaryFile()
binary_file.load_tokenised_dump(tokenised_dump,
load_address=load_address,
exec_address=exec_address,
)
binary_file.debug2log(level=logging.CRITICAL)
data = binary_file.dump_DragonDosBinary()
return data
def bin2bas(self, data):
"""
convert binary files to a ASCII basic string.
Supported are:
* Dragon DOS Binary Format
* TODO: CoCo DECB (Disk Extended Color BASIC) Format
see:
http://archive.worldofdragon.org/phpBB3/viewtopic.php?f=8&t=348&p=10139#p10139
"""
data = bytearray(data)
binary_file = BinaryFile()
binary_file.load_from_bin(data)
if binary_file.file_type != 0x01:
log.error("ERROR: file type $%02X is not $01 (tokenised BASIC)!", binary_file.file_type)
ascii_lines = self.program_dump2ascii_lines(dump=binary_file.data,
# FIXME:
#program_start=bin.exec_address
program_start=binary_file.load_address
)
return "\n".join(ascii_lines)
class Dragon32API(BaseAPI):
CONFIG_NAME = DRAGON32
MACHINE_NAME = "Dragon 32"
BASIC_TOKENS = DRAGON32_BASIC_TOKENS
PROGRAM_START_ADDR = 0x0019
VARIABLES_START_ADDR = 0x001B
ARRAY_START_ADDR = 0x001D
FREE_SPACE_START_ADDR = 0x001F
# Default memory location of BASIC listing start
DEFAULT_PROGRAM_START = 0x1E01
class CoCoAPI(Dragon32API):
"""
http://sourceforge.net/p/toolshed/code/ci/default/tree/cocoroms/dragon_equivs.asm
"""
CONFIG_NAME = COCO2B
MACHINE_NAME = "CoCo"
BASIC_TOKENS = COCO_BASIC_TOKENS
def example_renum_ascii_listing():
api = Dragon32API()
ascii_listing="\n".join([
'1 PRINT "LINE 10"',
'2 PRINT "LINE 20"',
'3 GOTO 1',
])
print(
api.renum_ascii_listing(ascii_listing)
)
def test_bin2bas():
api = Dragon32API()
with open(os.path.expanduser("~/DragonEnvPy3/DwRoot/AUTOLOAD.DWL"), "rb") as f:
data1=f.read()
ascii_listing=api.bin2bas(data1)
print(ascii_listing)
data2 = api.bas2bin(ascii_listing, load_address=0x1e01, exec_address=0x1e01)
log_bytes(data1, "data1: %s", level=logging.CRITICAL)
log_bytes(data2, "data2: %s", level=logging.CRITICAL)
if __name__ == '__main__':
import os
from dragonlib.utils.logging_utils import setup_logging
setup_logging(
# level=1 # hardcore debug ;)
# level=10 # DEBUG
# level=20 # INFO
level=30 # WARNING
# level=40 # ERROR
# level=50 # CRITICAL/FATAL
# level=99
)
# example_renum_ascii_listing()
test_bin2bas()
| gpl-3.0 | 7,832,205,112,748,475,000 | 30.055556 | 100 | 0.629951 | false | 3.522052 | false | false | false |
leotrubach/sourceforge-allura | Allura/setup.py | 1 | 4521 | # -*- coding: utf-8 -*-
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
exec open('allura/version.py').read()
PROJECT_DESCRIPTION='''
Allura is an open source implementation of a software "forge", a web site
that manages source code repositories, bug reports, discussions, mailing
lists, wiki pages, blogs and more for any number of individual projects.
'''
setup(
name='Allura',
version=__version__,
description='Base distribution of the Allura development platform',
long_description=PROJECT_DESCRIPTION,
author='SourceForge Team',
author_email='[email protected]',
url='http://sourceforge.net/p/allura',
keywords='sourceforge allura turbogears pylons jinja2 mongodb rabbitmq',
license='Apache License, http://www.apache.org/licenses/LICENSE-2.0',
platforms=[
'Linux',
'MacOS X',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Pylons',
'Framework :: TurboGears',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.6',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'License :: OSI Approved :: Apache Software License',
],
install_requires=[
"TurboGears2",
"pypeline",
"datadiff",
"BeautifulSoup",
"PasteScript",
"Babel >= 0.9.4",
"jinja2",
"pysolr",
"Markdown >= 2.0.3",
"Pygments >= 1.1.1",
"python-openid >= 2.2.4",
"EasyWidgets >= 0.1.1",
"PIL >= 1.1.7",
"iso8601",
"chardet >= 1.0.1",
"feedparser >= 5.0.1",
"oauth2 >= 1.2.0",
"Ming >= 0.2.2dev-20110930",
],
setup_requires=["PasteScript >= 1.7"],
paster_plugins=['PasteScript', 'Pylons', 'TurboGears2', 'Ming'],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
tests_require=['WebTest >= 1.2', 'BeautifulSoup', 'poster', 'nose'],
package_data={'allura': ['i18n/*/LC_MESSAGES/*.mo',
'templates/**.html',
'templates/**.py',
'templates/**.xml',
'templates/**.txt',
'public/*/*/*/*/*',
]},
message_extractors={'allura': [
('**.py', 'python', None),
('templates/**.mako', 'mako', None),
('templates/**.html', 'genshi', None),
('public/**', 'ignore', None)]},
entry_points="""
[paste.app_factory]
main = allura.config.middleware:make_app
task = allura.config.middleware:make_task_app
tool_test = allura.config.middleware:make_tool_test_app
[paste.app_install]
main = pylons.util:PylonsInstaller
tool_test = pylons.util:PylonsInstaller
[allura]
profile = allura.ext.user_profile:UserProfileApp
admin = allura.ext.admin:AdminApp
search = allura.ext.search:SearchApp
home = allura.ext.project_home:ProjectHomeApp
[allura.auth]
local = allura.lib.plugin:LocalAuthenticationProvider
ldap = allura.lib.plugin:LdapAuthenticationProvider
[allura.user_prefs]
local = allura.lib.plugin:LocalUserPreferencesProvider
[allura.project_registration]
local = allura.lib.plugin:LocalProjectRegistrationProvider
[allura.theme]
allura = allura.lib.plugin:ThemeProvider
[paste.paster_command]
taskd = allura.command.taskd:TaskdCommand
task = allura.command.taskd:TaskCommand
models = allura.command:ShowModelsCommand
reindex = allura.command:ReindexCommand
ensure_index = allura.command:EnsureIndexCommand
script = allura.command:ScriptCommand
set-tool-access = allura.command:SetToolAccessCommand
smtp_server=allura.command:SMTPServerCommand
create-neighborhood = allura.command:CreateNeighborhoodCommand
update-neighborhood-home-tool = allura.command:UpdateNeighborhoodCommand
create-trove-categories = allura.command:CreateTroveCategoriesCommand
set-neighborhood-features = allura.command:SetNeighborhoodFeaturesCommand
[easy_widgets.resources]
ew_resources=allura.config.resources:register_ew_resources
[easy_widgets.engines]
jinja = allura.config.app_cfg:JinjaEngine
""",
)
| apache-2.0 | -6,682,024,921,666,146,000 | 34.046512 | 77 | 0.628622 | false | 3.727123 | true | false | false |
gwu-libraries/vivo2notld | vivo2notld_service.py | 1 | 9071 | from flask import Flask, render_template, request, session, Response
import argparse
from vivo2notld.definitions import definitions, list_definitions
from vivo2notld.utility import execute, execute_list
formats = {
"xml": "text/xml",
"json": "text/json",
"yaml": "text/yaml"
}
app = Flask(__name__)
default_definition = None
default_list_definition = None
default_subject_namespace = None
default_subject_identifier = None
default_list_subject_namespace = None
default_list_subject_identifier = None
default_endpoint = None
default_username = None
default_password = None
default_format = None
default_definition_type = "list"
default_list_limit = 10
default_is_limited = False
default_list_offset = None
default_is_offset = False
def get_definitions(defs):
return {definition: definition.replace("_", " ") for definition in defs}
@app.route('/', methods=["GET"])
def crosswalk_form(output=None, obj=None, graph=None, query=None, select_query=None, count_query=None):
return render_template("crosswalk_form.html",
definition_type=session.get("definition_type") or default_definition_type,
definitions=get_definitions(definitions),
list_definitions=get_definitions(list_definitions),
definition=session.get("definition") or default_definition,
list_definition=session.get("list_definition") or default_list_definition,
subject_namespace=session.get("subject_namespace") or default_subject_namespace,
subject_identifier=session.get("subject_identifier") or default_subject_identifier,
list_subject_namespace=session.get("list_subject_namespace")
or default_list_subject_namespace,
list_subject_identifier=session.get("list_subject_identifier")
or default_list_subject_identifier,
list_limit=session.get("list_limit") or default_list_limit,
is_limited=session.get("is_limited") or default_is_limited,
list_offset=session.get("list_offset") or default_list_offset,
is_offset=session.get("is_offset") or default_is_offset,
endpoint=session.get("endpoint") or default_endpoint,
username=session.get("username") or default_username,
password=session.get("password") or default_password,
format=session.get("format") or default_format,
output_html=session.get("output_html", True),
output=output,
obj=obj,
graph=graph.serialize(format="turtle").decode("utf-8") if graph else None,
query=query,
select_query=select_query,
count_query=count_query)
@app.route('/', methods=["POST"])
def crosswalk():
session["definition"] = request.form.get("definition")
session["list_definition"] = request.form.get("list_definition")
session["subject_namespace"] = request.form.get("subject_namespace")
session["subject_identifier"] = request.form.get("subject_identifier")
session["list_subject_namespace"] = request.form.get("list_subject_namespace")
session["list_subject_identifier"] = request.form.get("list_subject_identifier")
session["list_limit"] = request.form.get("list_limit")
session["is_limited"] = True if "is_limited" in request.form else False
session["list_offset"] = request.form.get("list_offset")
session["is_offset"] = True if "is_offset" in request.form else False
session["endpoint"] = request.form.get("endpoint")
session["username"] = request.form.get("username")
session["password"] = request.form.get("password")
session["format"] = request.form.get("format")
session["output_html"] = True if "output_html" in request.form else False
session["definition_type"] = request.form.get("definition_type")
select_q = None
count_q = None
definition_type = request.form.get("definition_type")
if not definition_type:
if "definition" in request.form and "list_definition" not in request.form:
definition_type = "individual"
elif "definition" not in request.form and "list_definition" in request.form:
definition_type = "list"
else:
definition_type = default_definition_type
if definition_type == "individual":
o, s, g, q = execute(definitions[request.form.get("definition", default_definition)],
request.form.get("subject_namespace", default_subject_namespace),
request.form.get("subject_identifier", default_subject_identifier),
request.form.get("endpoint", default_endpoint),
request.form.get("username", default_username),
request.form.get("password", default_password),
serialization_format=request.form.get("format", default_format))
else:
o, s, g, q, select_q, count_q = execute_list(
list_definitions[request.form.get("list_definition", default_list_definition)],
request.form.get("list_subject_namespace", default_subject_namespace),
request.form.get("list_subject_identifier", default_subject_identifier),
request.form.get("endpoint", default_endpoint),
request.form.get("username", default_username),
request.form.get("password", default_password),
serialization_format=request.form.get("format", default_format),
offset=request.form.get("list_offset", default_list_offset) if "is_offset" in request.form else None,
limit=request.form.get("list_limit", default_list_limit) if "is_limited" in request.form else None,
)
if "output_html" in request.form:
return crosswalk_form(output=o, obj=s, graph=g, query=q, select_query=select_q,
count_query=count_q)
else:
return Response(o, content_type=formats[request.form.get("format", default_format)])
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--debug", action="store_true")
parser.add_argument("--port", type=int, default="5000", help="The port the service should run on. Default is 5000.")
parser.add_argument("--format", default="json", choices=formats.keys(),
help="The format for serializing. Default is json.")
parser.add_argument("--endpoint", dest="endpoint",
help="Endpoint for SPARQL Query of VIVO instance,e.g., http://localhost/vivo/api/sparqlQuery.")
parser.add_argument("--username", dest="username", help="Username for VIVO root.")
parser.add_argument("--password", dest="password",
help="Password for VIVO root.")
parser.add_argument("--namespace", default="http://vivo.mydomain.edu/individual/",
help="Namespace for the subject. Default is http://vivo.mydomain.edu/individual/.")
parser.add_argument("--identifier", help="Identifier for the subject, e.g., n123.")
parser.add_argument("--list-namespace", default="http://vivo.mydomain.edu/individual/",
help="Namespace for the list subject. Default is http://vivo.mydomain.edu/individual/.")
parser.add_argument("--list-identifier", help="Identifier for the list subject, e.g., n123.")
parser.add_argument("--definition", default="person", choices=definitions.keys(),
help="Default is person.")
parser.add_argument("--list-definition", default="person_summary_with_positions_in",
choices=list_definitions.keys(),
help="Default is person_summary_with_positions_in.")
parser.add_argument("--limit", type=int, help="List limit.")
parser.add_argument("--offset", type=int, help="List offset.")
#Parse
args = parser.parse_args()
app.debug = args.debug
app.secret_key = "vivo2notld"
default_definition = args.definition
default_list_definition = args.list_definition
default_subject_namespace = args.namespace
default_subject_identifier = args.identifier
default_list_subject_namespace = args.list_namespace
default_list_subject_identifier = args.list_identifier
default_endpoint = args.endpoint
default_username = args.username
default_password = args.password
default_format = args.format
if args.limit:
default_list_limit = args.limit
default_is_limited = True
if args.offset:
default_list_offset = args.offset
default_is_offset = True
app.run(host="0.0.0.0", port=args.port) | mit | -3,851,115,307,438,112,000 | 51.439306 | 120 | 0.625179 | false | 4.280793 | false | false | false |
timstaley/drive-ami | driveami/__init__.py | 1 | 2912 | from __future__ import absolute_import
import logging
import os
import json
from colorlog import ColoredFormatter
import driveami.keys as keys
import driveami.scripts as scripts
from driveami.reduce import (Reduce, AmiVersion)
from driveami.serialization import (Datatype, make_serializable,
save_calfile_listing, save_rawfile_listing,
load_listing)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
logger = logging.getLogger('ami')
def ensure_dir(dirname):
if not os.path.isdir(dirname):
os.makedirs(dirname)
def process_rawfile(rawfile, output_dir,
reduce,
script,
file_logging=True
):
"""
A convenience function applying sensible defaults to reduce a rawfile.
Args:
rawfile: Name of a file in the ami data dir, e.g. "SWIFT121101-121101.raw"
output_dir: Folder where UVFITS for the target and calibrator will be output.
reduce: instance of ami.Reduce
array: 'LA' or 'SA' (Default: LA)
script: Reduction commands.
Returns:
- A dictionary containing information about the rawfile,
e.g. pointing, calibrator name, rain modulation.
See also: ``ami.keys``
"""
r = reduce
if file_logging:
file_logdir = output_dir
else:
file_logdir = None
r.set_active_file(rawfile, file_logdir)
r.run_script(script)
r.update_flagging_info()
write_command_overrides = {}
if r.ami_version=='legacy':
write_command_overrides['channels'] = '3-8'
if r.files[rawfile]['raster']:
write_command_overrides['fits_or_multi'] = 'multi'
write_command_overrides['offsets'] = 'all'
r.write_files(rawfile, output_dir,
write_command_overrides=write_command_overrides)
r.files[rawfile][keys.obs_name] = os.path.splitext(rawfile)[0]
info_filename = os.path.splitext(rawfile)[0] + '.json'
with open(os.path.join(output_dir, info_filename), 'w') as f:
json.dump(make_serializable(r.files[rawfile]), f,
sort_keys=True, indent=4)
return r.files[rawfile]
def get_color_log_formatter():
date_fmt = "%y-%m-%d (%a) %H:%M:%S"
color_formatter = ColoredFormatter(
"%(log_color)s%(asctime)s:%(levelname)-8s%(reset)s %(blue)s%(message)s",
datefmt=date_fmt,
reset=True,
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red',
}
)
return color_formatter
def get_color_stdout_loghandler(level):
stdout_loghandler = logging.StreamHandler()
stdout_loghandler.setFormatter(get_color_log_formatter())
stdout_loghandler.setLevel(level)
return stdout_loghandler
| bsd-3-clause | 486,487,046,025,874,240 | 29.652632 | 81 | 0.616758 | false | 3.723785 | false | false | false |
poppogbr/genropy | tutorial/projects/musiccd/packages/libcd/webpages/album.py | 1 | 1253 | #!/usr/bin/env python
# encoding: utf-8
"""
Created by Softwell on 2008-07-10.
Copyright (c) 2008 Softwell. All rights reserved.
"""
class GnrCustomWebPage(object):
maintable = 'libcd.album'
py_requires = 'public:Public,standard_tables:TableHandler,public:IncludedView'
######################## STANDARD TABLE OVERRIDDEN METHODS ###############
def windowTitle(self):
return '!!Album'
def pageAuthTags(self, method=None, **kwargs):
return ''
def tableWriteTags(self):
return ''
def tableDeleteTags(self):
return ''
def barTitle(self):
return '!!Album'
def columnsBase(self, ):
return """year,title,rating"""
def orderBase(self):
return 'title'
def conditionBase(self):
pass
def queryBase(self):
return dict(column='title', op='contains', val='%')
############################## FORM METHODS ##################################
def formBase(self, parentBC, disabled=False, **kwargs):
pane = parentBC.contentPane(**kwargs)
fb = pane.formbuilder(cols=2, border_spacing='4px', disabled=disabled)
fb.field('title')
fb.field('year')
fb.field('rating')
fb.field('artist_id') | lgpl-2.1 | 2,486,052,560,708,539,000 | 25.680851 | 82 | 0.569034 | false | 3.940252 | false | false | false |
rahulunair/nova | nova/api/openstack/compute/views/servers.py | 1 | 34816 | # Copyright 2010-2011 OpenStack Foundation
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_serialization import jsonutils
from nova.api.openstack import api_version_request
from nova.api.openstack import common
from nova.api.openstack.compute.views import addresses as views_addresses
from nova.api.openstack.compute.views import flavors as views_flavors
from nova.api.openstack.compute.views import images as views_images
from nova import availability_zones as avail_zone
from nova.compute import api as compute
from nova.compute import vm_states
from nova import context as nova_context
from nova import exception
from nova.network import security_group_api
from nova import objects
from nova.objects import fields
from nova.objects import virtual_interface
from nova.policies import extended_server_attributes as esa_policies
from nova.policies import flavor_extra_specs as fes_policies
from nova.policies import servers as servers_policies
from nova import utils
LOG = logging.getLogger(__name__)
class ViewBuilder(common.ViewBuilder):
"""Model a server API response as a python dictionary."""
_collection_name = "servers"
_progress_statuses = (
"ACTIVE",
"BUILD",
"REBUILD",
"RESIZE",
"VERIFY_RESIZE",
"MIGRATING",
)
_fault_statuses = (
"ERROR", "DELETED"
)
# These are the lazy-loadable instance attributes required for showing
# details about an instance. Add to this list as new things need to be
# shown.
_show_expected_attrs = ['flavor', 'info_cache', 'metadata']
def __init__(self):
"""Initialize view builder."""
super(ViewBuilder, self).__init__()
self._address_builder = views_addresses.ViewBuilder()
self._image_builder = views_images.ViewBuilder()
self._flavor_builder = views_flavors.ViewBuilder()
self.compute_api = compute.API()
def create(self, request, instance):
"""View that should be returned when an instance is created."""
server = {
"server": {
"id": instance["uuid"],
"links": self._get_links(request,
instance["uuid"],
self._collection_name),
# NOTE(sdague): historically this was the
# os-disk-config extension, but now that extensions
# are gone, we merge these attributes here.
"OS-DCF:diskConfig": (
'AUTO' if instance.get('auto_disk_config') else 'MANUAL'),
},
}
self._add_security_grps(request, [server["server"]], [instance],
create_request=True)
return server
def basic(self, request, instance, show_extra_specs=False,
show_extended_attr=None, show_host_status=None,
show_sec_grp=None, bdms=None, cell_down_support=False,
show_user_data=False):
"""Generic, non-detailed view of an instance."""
if cell_down_support and 'display_name' not in instance:
# NOTE(tssurya): If the microversion is >= 2.69, this boolean will
# be true in which case we check if there are instances from down
# cells (by checking if their objects have missing keys like
# `display_name`) and return partial constructs based on the
# information available from the nova_api database.
return {
"server": {
"id": instance.uuid,
"status": "UNKNOWN",
"links": self._get_links(request,
instance.uuid,
self._collection_name),
},
}
return {
"server": {
"id": instance["uuid"],
"name": instance["display_name"],
"links": self._get_links(request,
instance["uuid"],
self._collection_name),
},
}
def get_show_expected_attrs(self, expected_attrs=None):
"""Returns a list of lazy-loadable expected attributes used by show
This should be used when getting the instances from the database so
that the necessary attributes are pre-loaded before needing to build
the show response where lazy-loading can fail if an instance was
deleted.
:param list expected_attrs: The list of expected attributes that will
be requested in addition to what this view builder requires. This
method will merge the two lists and return what should be
ultimately used when getting an instance from the database.
:returns: merged and sorted list of expected attributes
"""
if expected_attrs is None:
expected_attrs = []
# NOTE(mriedem): We sort the list so we can have predictable test
# results.
return sorted(list(set(self._show_expected_attrs + expected_attrs)))
def _show_from_down_cell(self, request, instance, show_extra_specs,
show_server_groups):
"""Function that constructs the partial response for the instance."""
ret = {
"server": {
"id": instance.uuid,
"status": "UNKNOWN",
"tenant_id": instance.project_id,
"created": utils.isotime(instance.created_at),
"links": self._get_links(
request, instance.uuid, self._collection_name),
},
}
if 'flavor' in instance:
# If the key 'flavor' is present for an instance from a down cell
# it means that the request is ``GET /servers/{server_id}`` and
# thus we include the information from the request_spec of the
# instance like its flavor, image, avz, and user_id in addition to
# the basic information from its instance_mapping.
# If 'flavor' key is not present for an instance from a down cell
# down cell it means the request is ``GET /servers/detail`` and we
# do not expose the flavor in the response when listing servers
# with details for performance reasons of fetching it from the
# request specs table for the whole list of instances.
ret["server"]["image"] = self._get_image(request, instance)
ret["server"]["flavor"] = self._get_flavor(request, instance,
show_extra_specs)
# in case availability zone was not requested by the user during
# boot time, return UNKNOWN.
avz = instance.availability_zone or "UNKNOWN"
ret["server"]["OS-EXT-AZ:availability_zone"] = avz
ret["server"]["OS-EXT-STS:power_state"] = instance.power_state
# in case its an old request spec which doesn't have the user_id
# data migrated, return UNKNOWN.
ret["server"]["user_id"] = instance.user_id or "UNKNOWN"
if show_server_groups:
context = request.environ['nova.context']
ret['server']['server_groups'] = self._get_server_groups(
context, instance)
return ret
@staticmethod
def _get_host_status_unknown_only(context):
# We will use the unknown_only variable to tell us what host status we
# can show, if any:
# * unknown_only = False means we can show any host status.
# * unknown_only = True means that we can only show host
# status: UNKNOWN. If the host status is anything other than
# UNKNOWN, we will not include the host_status field in the
# response.
# * unknown_only = None means we cannot show host status at all and
# we will not include the host_status field in the response.
unknown_only = None
# Check show:host_status policy first because if it passes, we know we
# can show any host status and need not check the more restrictive
# show:host_status:unknown-only policy.
if context.can(
servers_policies.SERVERS % 'show:host_status',
fatal=False):
unknown_only = False
# If we are not allowed to show any/all host status, check if we can at
# least show only the host status: UNKNOWN.
elif context.can(
servers_policies.SERVERS %
'show:host_status:unknown-only',
fatal=False):
unknown_only = True
return unknown_only
def show(self, request, instance, extend_address=True,
show_extra_specs=None, show_AZ=True, show_config_drive=True,
show_extended_attr=None, show_host_status=None,
show_keypair=True, show_srv_usg=True, show_sec_grp=True,
show_extended_status=True, show_extended_volumes=True,
bdms=None, cell_down_support=False, show_server_groups=False,
show_user_data=True):
"""Detailed view of a single instance."""
if show_extra_specs is None:
# detail will pre-calculate this for us. If we're doing show,
# then figure it out here.
show_extra_specs = False
if api_version_request.is_supported(request, min_version='2.47'):
context = request.environ['nova.context']
show_extra_specs = context.can(
fes_policies.POLICY_ROOT % 'index', fatal=False)
if cell_down_support and 'display_name' not in instance:
# NOTE(tssurya): If the microversion is >= 2.69, this boolean will
# be true in which case we check if there are instances from down
# cells (by checking if their objects have missing keys like
# `display_name`) and return partial constructs based on the
# information available from the nova_api database.
return self._show_from_down_cell(
request, instance, show_extra_specs, show_server_groups)
ip_v4 = instance.get('access_ip_v4')
ip_v6 = instance.get('access_ip_v6')
server = {
"server": {
"id": instance["uuid"],
"name": instance["display_name"],
"status": self._get_vm_status(instance),
"tenant_id": instance.get("project_id") or "",
"user_id": instance.get("user_id") or "",
"metadata": self._get_metadata(instance),
"hostId": self._get_host_id(instance),
"image": self._get_image(request, instance),
"flavor": self._get_flavor(request, instance,
show_extra_specs),
"created": utils.isotime(instance["created_at"]),
"updated": utils.isotime(instance["updated_at"]),
"addresses": self._get_addresses(request, instance,
extend_address),
"accessIPv4": str(ip_v4) if ip_v4 is not None else '',
"accessIPv6": str(ip_v6) if ip_v6 is not None else '',
"links": self._get_links(request,
instance["uuid"],
self._collection_name),
# NOTE(sdague): historically this was the
# os-disk-config extension, but now that extensions
# are gone, we merge these attributes here.
"OS-DCF:diskConfig": (
'AUTO' if instance.get('auto_disk_config') else 'MANUAL'),
},
}
if server["server"]["status"] in self._fault_statuses:
_inst_fault = self._get_fault(request, instance)
if _inst_fault:
server['server']['fault'] = _inst_fault
if server["server"]["status"] in self._progress_statuses:
server["server"]["progress"] = instance.get("progress", 0)
context = request.environ['nova.context']
if show_AZ:
az = avail_zone.get_instance_availability_zone(context, instance)
# NOTE(mriedem): The OS-EXT-AZ prefix should not be used for new
# attributes after v2.1. They are only in v2.1 for backward compat
# with v2.0.
server["server"]["OS-EXT-AZ:availability_zone"] = az or ''
if show_config_drive:
server["server"]["config_drive"] = instance["config_drive"]
if show_keypair:
server["server"]["key_name"] = instance["key_name"]
if show_srv_usg:
for k in ['launched_at', 'terminated_at']:
key = "OS-SRV-USG:" + k
# NOTE(danms): Historically, this timestamp has been generated
# merely by grabbing str(datetime) of a TZ-naive object. The
# only way we can keep that with instance objects is to strip
# the tzinfo from the stamp and str() it.
server["server"][key] = (instance[k].replace(tzinfo=None)
if instance[k] else None)
if show_sec_grp:
self._add_security_grps(request, [server["server"]], [instance])
if show_extended_attr is None:
show_extended_attr = context.can(
esa_policies.BASE_POLICY_NAME, fatal=False)
if show_extended_attr:
properties = ['host', 'name', 'node']
if api_version_request.is_supported(request, min_version='2.3'):
# NOTE(mriedem): These will use the OS-EXT-SRV-ATTR prefix
# below and that's OK for microversion 2.3 which is being
# compatible with v2.0 for the ec2 API split out from Nova.
# After this, however, new microversions should not be using
# the OS-EXT-SRV-ATTR prefix.
properties += ['reservation_id', 'launch_index',
'hostname', 'kernel_id', 'ramdisk_id',
'root_device_name']
# NOTE(gmann): Since microversion 2.75, PUT and Rebuild
# response include all the server attributes including these
# extended attributes also. But microversion 2.57 already
# adding the 'user_data' in Rebuild response in API method.
# so we will skip adding the user data attribute for rebuild
# case. 'show_user_data' is false only in case of rebuild.
if show_user_data:
properties += ['user_data']
for attr in properties:
if attr == 'name':
key = "OS-EXT-SRV-ATTR:instance_%s" % attr
elif attr == 'node':
key = "OS-EXT-SRV-ATTR:hypervisor_hostname"
else:
# NOTE(mriedem): Nothing after microversion 2.3 should use
# the OS-EXT-SRV-ATTR prefix for the attribute key name.
key = "OS-EXT-SRV-ATTR:%s" % attr
server["server"][key] = getattr(instance, attr)
if show_extended_status:
# NOTE(gmann): Removed 'locked_by' from extended status
# to make it same as V2. If needed it can be added with
# microversion.
for state in ['task_state', 'vm_state', 'power_state']:
# NOTE(mriedem): The OS-EXT-STS prefix should not be used for
# new attributes after v2.1. They are only in v2.1 for backward
# compat with v2.0.
key = "%s:%s" % ('OS-EXT-STS', state)
server["server"][key] = instance[state]
if show_extended_volumes:
# NOTE(mriedem): The os-extended-volumes prefix should not be used
# for new attributes after v2.1. They are only in v2.1 for backward
# compat with v2.0.
add_delete_on_termination = api_version_request.is_supported(
request, min_version='2.3')
if bdms is None:
bdms = objects.BlockDeviceMappingList.bdms_by_instance_uuid(
context, [instance["uuid"]])
self._add_volumes_attachments(server["server"],
bdms,
add_delete_on_termination)
if (api_version_request.is_supported(request, min_version='2.16')):
if show_host_status is None:
unknown_only = self._get_host_status_unknown_only(context)
# If we're not allowed by policy to show host status at all,
# don't bother requesting instance host status from the compute
# API.
if unknown_only is not None:
host_status = self.compute_api.get_instance_host_status(
instance)
# If we are allowed to show host status of some kind, set
# the host status field only if:
# * unknown_only = False, meaning we can show any status
# OR
# * if unknown_only = True and host_status == UNKNOWN
if (not unknown_only or
host_status == fields.HostStatus.UNKNOWN):
server["server"]['host_status'] = host_status
if api_version_request.is_supported(request, min_version="2.9"):
server["server"]["locked"] = (True if instance["locked_by"]
else False)
if api_version_request.is_supported(request, min_version="2.73"):
server["server"]["locked_reason"] = (instance.system_metadata.get(
"locked_reason"))
if api_version_request.is_supported(request, min_version="2.19"):
server["server"]["description"] = instance.get(
"display_description")
if api_version_request.is_supported(request, min_version="2.26"):
server["server"]["tags"] = [t.tag for t in instance.tags]
if api_version_request.is_supported(request, min_version="2.63"):
trusted_certs = None
if instance.trusted_certs:
trusted_certs = instance.trusted_certs.ids
server["server"]["trusted_image_certificates"] = trusted_certs
if show_server_groups:
server['server']['server_groups'] = self._get_server_groups(
context,
instance)
return server
def index(self, request, instances, cell_down_support=False):
"""Show a list of servers without many details."""
coll_name = self._collection_name
return self._list_view(self.basic, request, instances, coll_name,
False, cell_down_support=cell_down_support)
def detail(self, request, instances, cell_down_support=False):
"""Detailed view of a list of instance."""
coll_name = self._collection_name + '/detail'
context = request.environ['nova.context']
if api_version_request.is_supported(request, min_version='2.47'):
# Determine if we should show extra_specs in the inlined flavor
# once before we iterate the list of instances
show_extra_specs = context.can(fes_policies.POLICY_ROOT % 'index',
fatal=False)
else:
show_extra_specs = False
show_extended_attr = context.can(
esa_policies.BASE_POLICY_NAME, fatal=False)
instance_uuids = [inst['uuid'] for inst in instances]
bdms = self._get_instance_bdms_in_multiple_cells(context,
instance_uuids)
# NOTE(gmann): pass show_sec_grp=False in _list_view() because
# security groups for detail method will be added by separate
# call to self._add_security_grps by passing the all servers
# together. That help to avoid multiple neutron call for each server.
servers_dict = self._list_view(self.show, request, instances,
coll_name, show_extra_specs,
show_extended_attr=show_extended_attr,
# We process host_status in aggregate.
show_host_status=False,
show_sec_grp=False,
bdms=bdms,
cell_down_support=cell_down_support)
if api_version_request.is_supported(request, min_version='2.16'):
unknown_only = self._get_host_status_unknown_only(context)
# If we're not allowed by policy to show host status at all, don't
# bother requesting instance host status from the compute API.
if unknown_only is not None:
self._add_host_status(list(servers_dict["servers"]), instances,
unknown_only=unknown_only)
self._add_security_grps(request, list(servers_dict["servers"]),
instances)
return servers_dict
def _list_view(self, func, request, servers, coll_name, show_extra_specs,
show_extended_attr=None, show_host_status=None,
show_sec_grp=False, bdms=None, cell_down_support=False):
"""Provide a view for a list of servers.
:param func: Function used to format the server data
:param request: API request
:param servers: List of servers in dictionary format
:param coll_name: Name of collection, used to generate the next link
for a pagination query
:param show_extended_attr: If the server extended attributes should be
included in the response dict.
:param show_host_status: If the host status should be included in
the response dict.
:param show_sec_grp: If the security group should be included in
the response dict.
:param bdms: Instances bdms info from multiple cells.
:param cell_down_support: True if the API (and caller) support
returning a minimal instance
construct if the relevant cell is
down.
:returns: Server data in dictionary format
"""
server_list = [func(request, server,
show_extra_specs=show_extra_specs,
show_extended_attr=show_extended_attr,
show_host_status=show_host_status,
show_sec_grp=show_sec_grp, bdms=bdms,
cell_down_support=cell_down_support)["server"]
for server in servers
# Filter out the fake marker instance created by the
# fill_virtual_interface_list online data migration.
if server.uuid != virtual_interface.FAKE_UUID]
servers_links = self._get_collection_links(request,
servers,
coll_name)
servers_dict = dict(servers=server_list)
if servers_links:
servers_dict["servers_links"] = servers_links
return servers_dict
@staticmethod
def _get_metadata(instance):
return instance.metadata or {}
@staticmethod
def _get_vm_status(instance):
# If the instance is deleted the vm and task states don't really matter
if instance.get("deleted"):
return "DELETED"
return common.status_from_state(instance.get("vm_state"),
instance.get("task_state"))
@staticmethod
def _get_host_id(instance):
host = instance.get("host")
project = str(instance.get("project_id"))
return utils.generate_hostid(host, project)
def _get_addresses(self, request, instance, extend_address=False):
# Hide server addresses while the server is building.
if instance.vm_state == vm_states.BUILDING:
return {}
context = request.environ["nova.context"]
networks = common.get_networks_for_instance(context, instance)
return self._address_builder.index(networks,
extend_address)["addresses"]
def _get_image(self, request, instance):
image_ref = instance["image_ref"]
if image_ref:
image_id = str(common.get_id_from_href(image_ref))
bookmark = self._image_builder._get_bookmark_link(request,
image_id,
"images")
return {
"id": image_id,
"links": [{
"rel": "bookmark",
"href": bookmark,
}],
}
else:
return ""
def _get_flavor_dict(self, request, instance_type, show_extra_specs):
flavordict = {
"vcpus": instance_type.vcpus,
"ram": instance_type.memory_mb,
"disk": instance_type.root_gb,
"ephemeral": instance_type.ephemeral_gb,
"swap": instance_type.swap,
"original_name": instance_type.name
}
if show_extra_specs:
flavordict['extra_specs'] = instance_type.extra_specs
return flavordict
def _get_flavor(self, request, instance, show_extra_specs):
instance_type = instance.get_flavor()
if not instance_type:
LOG.warning("Instance has had its instance_type removed "
"from the DB", instance=instance)
return {}
if api_version_request.is_supported(request, min_version="2.47"):
return self._get_flavor_dict(request, instance_type,
show_extra_specs)
flavor_id = instance_type["flavorid"]
flavor_bookmark = self._flavor_builder._get_bookmark_link(request,
flavor_id,
"flavors")
return {
"id": str(flavor_id),
"links": [{
"rel": "bookmark",
"href": flavor_bookmark,
}],
}
def _load_fault(self, request, instance):
try:
mapping = objects.InstanceMapping.get_by_instance_uuid(
request.environ['nova.context'], instance.uuid)
if mapping.cell_mapping is not None:
with nova_context.target_cell(instance._context,
mapping.cell_mapping):
return instance.fault
except exception.InstanceMappingNotFound:
pass
# NOTE(danms): No instance mapping at all, or a mapping with no cell,
# which means a legacy environment or instance.
return instance.fault
def _get_fault(self, request, instance):
if 'fault' in instance:
fault = instance.fault
else:
fault = self._load_fault(request, instance)
if not fault:
return None
fault_dict = {
"code": fault["code"],
"created": utils.isotime(fault["created_at"]),
"message": fault["message"],
}
if fault.get('details', None):
is_admin = False
context = request.environ["nova.context"]
if context:
is_admin = getattr(context, 'is_admin', False)
if is_admin or fault['code'] != 500:
fault_dict['details'] = fault["details"]
return fault_dict
def _add_host_status(self, servers, instances, unknown_only=False):
"""Adds the ``host_status`` field to the list of servers
This method takes care to filter instances from down cells since they
do not have a host set and as such we cannot determine the host status.
:param servers: list of detailed server dicts for the API response
body; this list is modified by reference by updating the server
dicts within the list
:param instances: list of Instance objects
:param unknown_only: whether to show only UNKNOWN host status
"""
# Filter out instances from down cells which do not have a host field.
instances = [instance for instance in instances if 'host' in instance]
# Get the dict, keyed by instance.uuid, of host status values.
host_statuses = self.compute_api.get_instances_host_statuses(instances)
for server in servers:
# Filter out anything that is not in the resulting dict because
# we had to filter the list of instances above for down cells.
if server['id'] in host_statuses:
host_status = host_statuses[server['id']]
if unknown_only and host_status != fields.HostStatus.UNKNOWN:
# Filter servers that are not allowed by policy to see
# host_status values other than UNKNOWN.
continue
server['host_status'] = host_status
def _add_security_grps(self, req, servers, instances,
create_request=False):
if not len(servers):
return
# If request is a POST create server we get the security groups
# intended for an instance from the request. This is necessary because
# the requested security groups for the instance have not yet been sent
# to neutron.
# Starting from microversion 2.75, security groups is returned in
# PUT and POST Rebuild response also.
if not create_request:
context = req.environ['nova.context']
sg_instance_bindings = (
security_group_api.get_instances_security_groups_bindings(
context, servers))
for server in servers:
groups = sg_instance_bindings.get(server['id'])
if groups:
server['security_groups'] = groups
# This section is for POST create server request. There can be
# only one security group for POST create server request.
else:
# try converting to json
req_obj = jsonutils.loads(req.body)
# Add security group to server, if no security group was in
# request add default since that is the group it is part of
servers[0]['security_groups'] = req_obj['server'].get(
'security_groups', [{'name': 'default'}])
@staticmethod
def _get_instance_bdms_in_multiple_cells(ctxt, instance_uuids):
inst_maps = objects.InstanceMappingList.get_by_instance_uuids(
ctxt, instance_uuids)
cell_mappings = {}
for inst_map in inst_maps:
if (inst_map.cell_mapping is not None and
inst_map.cell_mapping.uuid not in cell_mappings):
cell_mappings.update(
{inst_map.cell_mapping.uuid: inst_map.cell_mapping})
bdms = {}
results = nova_context.scatter_gather_cells(
ctxt, cell_mappings.values(),
nova_context.CELL_TIMEOUT,
objects.BlockDeviceMappingList.bdms_by_instance_uuid,
instance_uuids)
for cell_uuid, result in results.items():
if isinstance(result, Exception):
LOG.warning('Failed to get block device mappings for cell %s',
cell_uuid)
elif result is nova_context.did_not_respond_sentinel:
LOG.warning('Timeout getting block device mappings for cell '
'%s', cell_uuid)
else:
bdms.update(result)
return bdms
def _add_volumes_attachments(self, server, bdms,
add_delete_on_termination):
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in the 'detail' or 'show' method.
# If that instance has since been deleted, it won't be in the
# 'bdms' dictionary though, so use 'get' to avoid KeyErrors.
instance_bdms = bdms.get(server['id'], [])
volumes_attached = []
for bdm in instance_bdms:
if bdm.get('volume_id'):
volume_attached = {'id': bdm['volume_id']}
if add_delete_on_termination:
volume_attached['delete_on_termination'] = (
bdm['delete_on_termination'])
volumes_attached.append(volume_attached)
# NOTE(mriedem): The os-extended-volumes prefix should not be used for
# new attributes after v2.1. They are only in v2.1 for backward compat
# with v2.0.
key = "os-extended-volumes:volumes_attached"
server[key] = volumes_attached
@staticmethod
def _get_server_groups(context, instance):
try:
sg = objects.InstanceGroup.get_by_instance_uuid(context,
instance.uuid)
return [sg.uuid]
except exception.InstanceGroupNotFound:
return []
| apache-2.0 | -3,837,467,761,640,274,400 | 46.562842 | 79 | 0.554056 | false | 4.637187 | false | false | false |
hmpf/nav | python/nav/web/report/urls.py | 2 | 1432 | #
# Copyright (C) 2012-2018 Uninett AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License version 3 as published by the Free
# Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Report backend URL config."""
from django.conf.urls import url
from nav.web.report import views
# Subsystem: Report
# Naming convention: report-<result>-<query>
urlpatterns = [
url(r'^$', views.index,
name='report-index'),
url(r'^matrix$', views.matrix_report,
name='report-matrix'),
url(r'^matrix/(?P<scope>[^&]+)$', views.matrix_report,
name='report-matrix-scope'),
url(r'^reportlist$', views.report_list,
name='report-reportlist'),
url(r'^(?P<report_name>[^/]+)$', views.get_report,
name='report-by-name'),
url(r'^widget/add/', views.add_report_widget,
name='report-add-widget'),
url(r'^widget/(?P<report_name>[^/]+)$', views.get_report_for_widget,
name='widget-report-by-name'),
]
| gpl-3.0 | -8,578,340,943,052,494,000 | 34.8 | 77 | 0.679469 | false | 3.607053 | false | false | false |
Mappy/luigi | test/util_test.py | 4 | 1453 | # Copyright (c) 2012 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import unittest
import luigi
import luigi.util
import luigi.notifications
luigi.notifications.DEBUG = True
class A(luigi.Task):
x = luigi.IntParameter(default=3)
class B(luigi.util.Derived(A)):
y = luigi.IntParameter(default=4)
class A2(luigi.Task):
x = luigi.IntParameter(default=3)
g = luigi.IntParameter(is_global=True, default=42)
class B2(luigi.util.Derived(A2)):
pass
class UtilTest(unittest.TestCase):
def test_derived_extended(self):
b = B(1, 2)
self.assertEquals(b.x, 1)
self.assertEquals(b.y, 2)
a = A(1)
self.assertEquals(b.parent_obj, a)
def test_derived_extended_default(self):
b = B()
self.assertEquals(b.x, 3)
self.assertEquals(b.y, 4)
def test_derived_global_param(self):
# Had a bug with this
b = B2()
self.assertEquals(b.g, 42)
| apache-2.0 | -7,100,238,086,051,630,000 | 25.418182 | 79 | 0.686167 | false | 3.476077 | true | false | false |
legoktm/pywikipedia-scripts | start_date.py | 1 | 2610 | #!/usr/bin/env python
"""
Copyright (C) 2013 Legoktm
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
import re
import pywikibot
import mwparserfromhell
SITE = pywikibot.Site()
months=['January','February','March','April','May','June','July','August','September','October','November','December']
j='|'.join(months)
regex=re.compile('(?P<month>'+j+'|)\s*(?P<year>\d\d\d\d)')
def gen():
page = pywikibot.Page(SITE, 'Template:Infobox NRHP')
for c in page.getReferences(onlyTemplateInclusion=True,namespaces=[0], content=True):
yield c
def process_page(page):
text = original = page.get()
code = mwparserfromhell.parse(text)
for template in code.filter_templates():
if template.name.lower().strip() == 'infobox nrhp':
if template.has_param('built'):
val = template.get('built').value.strip()
s=regex.search(val)
if not s:
return
d=s.groupdict()
if int(d['year']) < 1583:
return
if d['month']:
d['month'] = months.index(d['month'])+1
template.get('built').value = '{{Start date|{year}|{month}}}'.format(**d)
else:
template.get('built').value = '{{Start date|{year}}}'.format(**d)
text = unicode(code)
if original == text:
return
page.put(text, 'Bot: Wrapping date in {{start date}} to add [[WP:UF|microformats]]')
def main():
for page in gen():
process_page(page)
if __name__ == "__main__":
try:
main()
finally:
pass
| mit | 4,212,860,460,665,823,000 | 38.545455 | 118 | 0.647126 | false | 3.996937 | false | false | false |
willb/wallaroo | clients/python-wallaroo/wallaroo/client/group.py | 1 | 2344 | # Copyright (c) 2013 Red Hat, Inc.
# Author: William Benton ([email protected])
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from proxy import Proxy, proxied_attr
from proxy import proxied_attr_get as pag, proxied_attr_set as pas, proxied_attr_getset as pags
from arc_utils import arcmethod
import errors
from errors import not_implemented, fail
import urllib
class group(Proxy):
name = property(pag("name"))
features = property(*pags("features"))
parameters = property(*pags("parameters"))
# alias for backwards-compatibility
params = property(pag("parameters"))
modifyFeatures = arcmethod(*pags("features"), heterogeneous=True, preserve_order=True)
def getConfig(self, **options):
if len(options) > 0:
not_implemented()
return self.cm.fetch_json_resource("/config/group/%s" % urllib.quote_plus(self.name))
def explain(self):
not_implemented()
def modifyParams(self, command, params, **options):
command = command.upper()
if command == "ADD":
for k, v in params.iteritems():
self.parameters[k] = v
elif command == "REMOVE":
for k in [k for k in params if k in self.parameters]:
del self.parameters[k]
elif command == "REPLACE":
self.parameters = params
else:
fail(errors.make(errors.BAD_COMMAND, errors.GROUP), "Invalid command %s" % command)
self.update()
def members(self):
all_nodes = [self.cm.make_proxy_object("node", node, True) for node in self.cm.list_objects("node")]
return [node.name for node in all_nodes if self.name in node.memberships]
membership = property(members)
proxied_attr(group, "name")
proxied_attr(group, "features")
proxied_attr(group, "parameters")
| apache-2.0 | 2,279,567,860,863,919,600 | 34.515152 | 108 | 0.665529 | false | 3.893688 | false | false | false |
escobar022/cens_django | ob_census_project/settings.py | 1 | 2712 | """
Django settings for ob_census project.
Generated by 'django-admin startproject' using Django 1.8.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'w58)8zhyk$h8$f!tau@b3k0utr4az1-1n(q_tnl%4f%cow$ofy'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'census',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'ob_census_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ob_census_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| mit | -1,832,807,004,185,533,000 | 25.330097 | 71 | 0.692478 | false | 3.415617 | false | false | false |
prasanna08/oppia | core/storage/feedback/gae_models.py | 1 | 28683 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for Oppia feedback threads and messages."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.platform import models
import feconf
import python_utils
import utils
from google.appengine.ext import ndb
(base_models, user_models) = models.Registry.import_models([
models.NAMES.base_model, models.NAMES.user])
# Allowed feedback thread statuses.
STATUS_CHOICES_OPEN = 'open'
STATUS_CHOICES_FIXED = 'fixed'
STATUS_CHOICES_IGNORED = 'ignored'
STATUS_CHOICES_COMPLIMENT = 'compliment'
STATUS_CHOICES_NOT_ACTIONABLE = 'not_actionable'
STATUS_CHOICES = [
STATUS_CHOICES_OPEN,
STATUS_CHOICES_FIXED,
STATUS_CHOICES_IGNORED,
STATUS_CHOICES_COMPLIMENT,
STATUS_CHOICES_NOT_ACTIONABLE,
]
# Constants used for generating new ids.
_MAX_RETRIES = 10
_RAND_RANGE = 127 * 127
class GeneralFeedbackThreadModel(base_models.BaseModel):
"""Threads for each entity.
The id of instances of this class has the form
[entity_type].[entity_id].[generated_string]
"""
# The type of entity the thread is linked to.
entity_type = ndb.StringProperty(required=True, indexed=True)
# The ID of the entity the thread is linked to.
entity_id = ndb.StringProperty(required=True, indexed=True)
# ID of the user who started the thread. This may be None if the feedback
# was given anonymously by a learner.
original_author_id = ndb.StringProperty(indexed=True)
# Latest status of the thread.
status = ndb.StringProperty(
default=STATUS_CHOICES_OPEN,
choices=STATUS_CHOICES,
required=True,
indexed=True,
)
# Latest subject of the thread.
subject = ndb.StringProperty(indexed=True, required=True)
# Summary text of the thread.
summary = ndb.TextProperty(indexed=False)
# Specifies whether this thread has a related suggestion.
has_suggestion = (
ndb.BooleanProperty(indexed=True, default=False, required=True))
# Cached value of the number of messages in the thread.
message_count = ndb.IntegerProperty(indexed=True, default=0)
# Cached text of the last message in the thread with non-empty content, or
# None if there is no such message.
last_nonempty_message_text = ndb.TextProperty(indexed=False)
# Cached ID for the user of the last message in the thread with non-empty
# content, or None if the message was made anonymously or if there is no
# such message.
last_nonempty_message_author_id = ndb.StringProperty(indexed=True)
@staticmethod
def get_deletion_policy():
"""General feedback thread needs to be pseudonymized for the user."""
return base_models.DELETION_POLICY.LOCALLY_PSEUDONYMIZE
@classmethod
def get_export_policy(cls):
"""Model contains user data."""
return dict(super(cls, cls).get_export_policy(), **{
'entity_type': base_models.EXPORT_POLICY.EXPORTED,
'entity_id': base_models.EXPORT_POLICY.EXPORTED,
'original_author_id': base_models.EXPORT_POLICY.EXPORTED,
'status': base_models.EXPORT_POLICY.EXPORTED,
'subject': base_models.EXPORT_POLICY.EXPORTED,
'summary': base_models.EXPORT_POLICY.EXPORTED,
'has_suggestion': base_models.EXPORT_POLICY.EXPORTED,
'message_count': base_models.EXPORT_POLICY.EXPORTED,
'last_nonempty_message_text':
base_models.EXPORT_POLICY.EXPORTED,
'last_nonempty_message_author_id':
base_models.EXPORT_POLICY.EXPORTED
})
@classmethod
def has_reference_to_user_id(cls, user_id):
"""Check whether GeneralFeedbackThreadModel exists for user.
Args:
user_id: str. The ID of the user whose data should be checked.
Returns:
bool. Whether any models refer to the given user ID.
"""
return cls.query(ndb.OR(
cls.original_author_id == user_id,
cls.last_nonempty_message_author_id == user_id
)).get(keys_only=True) is not None
@classmethod
def export_data(cls, user_id):
"""Exports the data from GeneralFeedbackThreadModel
into dict format for Takeout.
Args:
user_id: str. The ID of the user whose data should be exported.
Returns:
dict. Dictionary of the data from GeneralFeedbackThreadModel.
"""
user_data = dict()
feedback_models = cls.get_all().filter(
cls.original_author_id == user_id).fetch()
for feedback_model in feedback_models:
user_data[feedback_model.id] = {
'entity_type': feedback_model.entity_type,
'entity_id': feedback_model.entity_id,
'status': feedback_model.status,
'subject': feedback_model.subject,
'has_suggestion': feedback_model.has_suggestion,
'summary': feedback_model.summary,
'message_count': feedback_model.message_count,
'last_updated_msec': utils.get_time_in_millisecs(
feedback_model.last_updated)
}
return user_data
@classmethod
def generate_new_thread_id(cls, entity_type, entity_id):
"""Generates a new thread ID which is unique.
Args:
entity_type: str. The type of the entity.
entity_id: str. The ID of the entity.
Returns:
str. A thread ID that is different from the IDs of all
the existing threads within the given entity.
Raises:
Exception. There were too many collisions with existing thread IDs
when attempting to generate a new thread ID.
"""
for _ in python_utils.RANGE(_MAX_RETRIES):
thread_id = (
entity_type + '.' + entity_id + '.' +
utils.base64_from_int(utils.get_current_time_in_millisecs()) +
utils.base64_from_int(utils.get_random_int(_RAND_RANGE)))
if not cls.get_by_id(thread_id):
return thread_id
raise Exception(
'New thread id generator is producing too many collisions.')
@classmethod
def create(cls, thread_id):
"""Creates a new FeedbackThreadModel entry.
Args:
thread_id: str. Thread ID of the newly-created thread.
Returns:
GeneralFeedbackThreadModel. The newly created FeedbackThreadModel
instance.
Raises:
Exception. A thread with the given thread ID exists already.
"""
if cls.get_by_id(thread_id):
raise Exception('Feedback thread ID conflict on create.')
return cls(id=thread_id)
@classmethod
def get_threads(
cls, entity_type, entity_id, limit=feconf.DEFAULT_QUERY_LIMIT):
"""Returns a list of threads associated with the entity, ordered
by their "last updated" field. The number of entities fetched is
limited by the `limit` argument to this method, whose default
value is equal to the default query limit.
Args:
entity_type: str. The type of the entity.
entity_id: str. The ID of the entity.
limit: int. The maximum possible number of items in the returned
list.
Returns:
list(GeneralFeedbackThreadModel). List of threads associated with
the entity. Doesn't include deleted entries.
"""
return cls.get_all().filter(cls.entity_type == entity_type).filter(
cls.entity_id == entity_id).order(-cls.last_updated).fetch(limit)
class GeneralFeedbackMessageModel(base_models.BaseModel):
"""Feedback messages. One or more of these messages make a thread.
The id of instances of this class has the form [thread_id].[message_id]
"""
# ID corresponding to an entry of FeedbackThreadModel.
thread_id = ndb.StringProperty(required=True, indexed=True)
# 0-based sequential numerical ID. Sorting by this field will create the
# thread in chronological order.
message_id = ndb.IntegerProperty(required=True, indexed=True)
# ID of the user who posted this message. This may be None if the feedback
# was given anonymously by a learner.
author_id = ndb.StringProperty(indexed=True)
# New thread status. Must exist in the first message of a thread. For the
# rest of the thread, should exist only when the status changes.
updated_status = ndb.StringProperty(choices=STATUS_CHOICES, indexed=True)
# New thread subject. Must exist in the first message of a thread. For the
# rest of the thread, should exist only when the subject changes.
updated_subject = ndb.StringProperty(indexed=False)
# Message text. Allowed not to exist (e.g. post only to update the status).
text = ndb.TextProperty(indexed=False)
# Whether the incoming message is received by email (as opposed to via
# the web).
received_via_email = (
ndb.BooleanProperty(default=False, indexed=True, required=True))
@staticmethod
def get_deletion_policy():
"""General feedback message needs to be pseudonymized for the user."""
return base_models.DELETION_POLICY.LOCALLY_PSEUDONYMIZE
@classmethod
def get_export_policy(cls):
"""Model contains user data."""
return dict(super(cls, cls).get_export_policy(), **{
'thread_id': base_models.EXPORT_POLICY.EXPORTED,
'message_id': base_models.EXPORT_POLICY.EXPORTED,
'author_id': base_models.EXPORT_POLICY.EXPORTED,
'updated_status': base_models.EXPORT_POLICY.EXPORTED,
'updated_subject': base_models.EXPORT_POLICY.EXPORTED,
'text': base_models.EXPORT_POLICY.EXPORTED,
'received_via_email': base_models.EXPORT_POLICY.EXPORTED
})
@classmethod
def has_reference_to_user_id(cls, user_id):
"""Check whether GeneralFeedbackMessageModel exists for user.
Args:
user_id: str. The ID of the user whose data should be checked.
Returns:
bool. Whether any models refer to the given user ID.
"""
return cls.query(cls.author_id == user_id).get(
keys_only=True) is not None
@classmethod
def export_data(cls, user_id):
"""Exports the data from GeneralFeedbackMessageModel
into dict format for Takeout.
Args:
user_id: str. The ID of the user whose data should be exported.
Returns:
dict. Dictionary of the data from GeneralFeedbackMessageModel.
"""
user_data = dict()
feedback_models = cls.get_all().filter(cls.author_id == user_id).fetch()
for feedback_model in feedback_models:
user_data[feedback_model.id] = {
'thread_id': feedback_model.thread_id,
'message_id': feedback_model.message_id,
'updated_status': feedback_model.updated_status,
'updated_subject': feedback_model.updated_subject,
'text': feedback_model.text,
'received_via_email': feedback_model.received_via_email
}
return user_data
@classmethod
def _generate_id(cls, thread_id, message_id):
"""Generates full message ID given the thread ID and message ID.
Args:
thread_id: str. Thread ID of the thread to which the message
belongs.
message_id: int. Message ID of the message.
Returns:
str. Full message ID.
"""
return '.'.join([thread_id, python_utils.UNICODE(message_id)])
@property
def entity_id(self):
"""Returns the entity_id corresponding to this thread instance.
Returns:
str. The entity_id.
"""
return self.id.split('.')[1]
@property
def entity_type(self):
"""Returns the entity_type corresponding to this thread instance.
Returns:
str. The entity_type.
"""
return self.id.split('.')[0]
@classmethod
def create(cls, message_identifier):
"""Creates a new GeneralFeedbackMessageModel entry.
Args:
message_identifier: FullyQualifiedMessageIdentifier. The message
identifier consists of the thread_id and its corresponding
message_id.
Returns:
GeneralFeedbackMessageModel. Instance of the new
GeneralFeedbackMessageModel entry.
Raises:
Exception. A message with the same ID already exists
in the given thread.
"""
return cls.create_multi([message_identifier])[0]
@classmethod
def create_multi(cls, message_identifiers):
"""Creates a new GeneralFeedbackMessageModel entry for each
(thread_id, message_id) pair.
Args:
message_identifiers: list(FullyQualifiedMessageIdentifier). Each
message identifier consists of the thread_id and its
corresponding message_id.
Returns:
list(GeneralFeedbackMessageModel). Instances of the new
GeneralFeedbackMessageModel entries.
Raises:
Exception. The number of thread_ids must be equal to the number of
message_ids.
Exception. A message with the same ID already exists
in the given thread.
"""
thread_ids = [
message_identifier.thread_id for message_identifier
in message_identifiers]
message_ids = [
message_identifier.message_id for message_identifier
in message_identifiers]
# Generate the new ids.
instance_ids = [
cls._generate_id(thread_id, message_id) for thread_id, message_id
in python_utils.ZIP(thread_ids, message_ids)
]
# Check if the new ids are valid.
current_instances = cls.get_multi(instance_ids)
conflict_ids = [
current_instance.id for current_instance in current_instances if
current_instance is not None
]
if len(conflict_ids) > 0:
raise Exception(
'The following feedback message ID(s) conflicted on '
'create: %s' % (' '.join(conflict_ids))
)
return [cls(id=instance_id) for instance_id in instance_ids]
@classmethod
def get(cls, thread_id, message_id, strict=True):
"""Gets the GeneralFeedbackMessageModel entry for the given ID. Raises
an error if no undeleted message with the given ID is found and
strict == True.
Args:
thread_id: str. ID of the thread.
message_id: int. ID of the message.
strict: bool. Whether to raise an error if no FeedbackMessageModel
entry is found for the given IDs.
Returns:
GeneralFeedbackMessageModel or None. If strict == False and no
undeleted message with the given message_id exists in the
datastore, then returns None. Otherwise, returns the
GeneralFeedbackMessageModel instance that corresponds to the
given ID.
Raises:
EntityNotFoundError. The value of strict is True and either
(i) message ID is not valid
(ii) message is marked as deleted.
No error will be raised if strict == False.
"""
instance_id = cls._generate_id(thread_id, message_id)
return super(GeneralFeedbackMessageModel, cls).get(
instance_id, strict=strict)
@classmethod
def get_messages(cls, thread_id):
"""Returns a list of messages in the given thread. The number of
messages returned is capped by feconf.DEFAULT_QUERY_LIMIT.
Args:
thread_id: str. ID of the thread.
Returns:
list(GeneralFeedbackMessageModel). A list of messages in the
given thread, up to a maximum of feconf.DEFAULT_QUERY_LIMIT
messages.
"""
return cls.get_all().filter(
cls.thread_id == thread_id).fetch(feconf.DEFAULT_QUERY_LIMIT)
@classmethod
def get_most_recent_message(cls, thread_id):
"""Returns the last message in the thread.
Args:
thread_id: str. ID of the thread.
Returns:
GeneralFeedbackMessageModel. Last message in the thread.
"""
thread = GeneralFeedbackThreadModel.get_by_id(thread_id)
return cls.get(thread_id, thread.message_count - 1)
@classmethod
def get_message_count(cls, thread_id):
"""Returns the number of messages in the thread. Includes the
deleted entries.
Args:
thread_id: str. ID of the thread.
Returns:
int. Number of messages in the thread.
"""
return cls.get_message_counts([thread_id])[0]
@classmethod
def get_message_counts(cls, thread_ids):
"""Returns a list containing the number of messages in the threads.
Includes the deleted entries.
Args:
thread_ids: list(str). ID of the threads.
Returns:
list(int). List of the message counts for the threads.
"""
thread_models = GeneralFeedbackThreadModel.get_multi(thread_ids)
return [thread_model.message_count for thread_model in thread_models]
@classmethod
def get_all_messages(cls, page_size, urlsafe_start_cursor):
"""Fetches a list of all the messages sorted by their last updated
attribute.
Args:
page_size: int. The maximum number of messages to be returned.
urlsafe_start_cursor: str or None. If provided, the list of
returned messages starts from this datastore cursor.
Otherwise, the returned messages start from the beginning
of the full list of messages.
Returns:
3-tuple of (results, cursor, more). Where:
results: List of query results.
cursor: str or None. A query cursor pointing to the next
batch of results. If there are no more results, this might
be None.
more: bool. If True, there are (probably) more results after
this batch. If False, there are no further results after
this batch.
"""
return cls._fetch_page_sorted_by_last_updated(
cls.query(), page_size, urlsafe_start_cursor)
class GeneralFeedbackThreadUserModel(base_models.BaseModel):
"""Model for storing the ids of the messages in the thread that are read by
the user.
Instances of this class have keys of the form [user_id].[thread_id]
"""
user_id = ndb.StringProperty(required=True, indexed=True)
thread_id = ndb.StringProperty(required=True, indexed=True)
message_ids_read_by_user = ndb.IntegerProperty(repeated=True, indexed=True)
@staticmethod
def get_deletion_policy():
"""General feedback thread user can be deleted since it only contains
information relevant to the one user.
"""
return base_models.DELETION_POLICY.DELETE
@classmethod
def get_export_policy(cls):
"""Model contains user data."""
return dict(super(cls, cls).get_export_policy(), **{
'user_id': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'thread_id': base_models.EXPORT_POLICY.EXPORTED,
'message_ids_read_by_user':
base_models.EXPORT_POLICY.EXPORTED
})
@classmethod
def apply_deletion_policy(cls, user_id):
"""Delete instance of GeneralFeedbackThreadUserModel for the user.
Args:
user_id: str. The ID of the user whose data should be deleted.
"""
ndb.delete_multi(
cls.query(cls.user_id == user_id).fetch(keys_only=True))
@classmethod
def has_reference_to_user_id(cls, user_id):
"""Check whether GeneralFeedbackThreadUserModel exists for user.
Args:
user_id: str. The ID of the user whose data should be checked.
Returns:
bool. Whether any models refer to the given user ID.
"""
return cls.query(cls.user_id == user_id).get(keys_only=True) is not None
@classmethod
def generate_full_id(cls, user_id, thread_id):
"""Generates the full message id of the format:
<user_id.thread_id>.
Args:
user_id: str. The user id.
thread_id: str. The thread id.
Returns:
str. The full message id.
"""
return '%s.%s' % (user_id, thread_id)
@classmethod
def get(cls, user_id, thread_id):
"""Gets the FeedbackThreadUserModel corresponding to the given user and
the thread.
Args:
user_id: str. The id of the user.
thread_id: str. The id of the thread.
Returns:
FeedbackThreadUserModel. The FeedbackThreadUserModel instance which
matches with the given user_id, and thread id.
"""
instance_id = cls.generate_full_id(user_id, thread_id)
return super(GeneralFeedbackThreadUserModel, cls).get(
instance_id, strict=False)
@classmethod
def create(cls, user_id, thread_id):
"""Creates a new FeedbackThreadUserModel instance and returns it.
Args:
user_id: str. The id of the user.
thread_id: str. The id of the thread.
Returns:
FeedbackThreadUserModel. The newly created FeedbackThreadUserModel
instance.
"""
return cls.create_multi(user_id, [thread_id])[0]
@classmethod
def create_multi(cls, user_id, thread_ids):
"""Creates new FeedbackThreadUserModel instances for user_id for each
of the thread_ids.
Args:
user_id: str. The id of the user.
thread_ids: list(str). The ids of the threads.
Returns:
list(FeedbackThreadUserModel). The newly created
FeedbackThreadUserModel instances.
"""
new_instances = []
for thread_id in thread_ids:
instance_id = cls.generate_full_id(user_id, thread_id)
new_instance = cls(
id=instance_id, user_id=user_id, thread_id=thread_id)
new_instances.append(new_instance)
GeneralFeedbackThreadUserModel.put_multi(new_instances)
return new_instances
@classmethod
def get_multi(cls, user_id, thread_ids):
"""Gets the ExplorationUserDataModel corresponding to the given user and
the thread ids.
Args:
user_id: str. The id of the user.
thread_ids: list(str). The ids of the threads.
Returns:
list(FeedbackThreadUserModel). The FeedbackThreadUserModels
corresponding to the given user ans thread ids.
"""
instance_ids = [
cls.generate_full_id(user_id, thread_id)
for thread_id in thread_ids]
return super(GeneralFeedbackThreadUserModel, cls).get_multi(
instance_ids)
@classmethod
def export_data(cls, user_id):
"""Takeout: Export GeneralFeedbackThreadUserModel user-based properties.
Args:
user_id: str. The user_id denotes which user's data to extract.
Returns:
dict. A dict containing the user-relevant properties of
GeneralFeedbackThreadUserModel, i.e., which messages have been
read by the user (as a list of ids) in each thread.
"""
found_models = cls.get_all().filter(cls.user_id == user_id)
user_data = {}
for user_model in found_models:
user_data[user_model.thread_id] = (
user_model.message_ids_read_by_user)
return user_data
class FeedbackAnalyticsModel(base_models.BaseMapReduceBatchResultsModel):
"""Model for storing feedback thread analytics for an exploration.
The key of each instance is the exploration ID.
"""
# The number of open feedback threads for this exploration.
num_open_threads = ndb.IntegerProperty(default=None, indexed=True)
# Total number of feedback threads for this exploration.
num_total_threads = ndb.IntegerProperty(default=None, indexed=True)
@staticmethod
def get_deletion_policy():
"""Feedback analytic model should be kept if the associated exploration
is public.
"""
return base_models.DELETION_POLICY.KEEP_IF_PUBLIC
@classmethod
def get_export_policy(cls):
"""Model does not contain user data."""
return dict(super(cls, cls).get_export_policy(), **{
'num_open_threads': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'num_total_threads': base_models.EXPORT_POLICY.NOT_APPLICABLE
})
@classmethod
def has_reference_to_user_id(cls, unused_user_id):
"""FeedbackAnalyticsModel doesn't reference any user_id directly.
Args:
unused_user_id: str. The (unused) ID of the user whose data
should be checked.
Returns:
bool. Whether any models refer to the given user ID.
"""
return False
@classmethod
def create(cls, model_id, num_open_threads, num_total_threads):
"""Creates a new FeedbackAnalyticsModel entry.
Args:
model_id: str. ID of the model instance to be created. This
is the same as the exploration ID.
num_open_threads: int. Number of open feedback threads for
this exploration.
num_total_threads: int. Total number of feedback threads for
this exploration.
"""
cls(
id=model_id,
num_open_threads=num_open_threads,
num_total_threads=num_total_threads
).put()
class UnsentFeedbackEmailModel(base_models.BaseModel):
"""Model for storing feedback messages that need to be sent to creators.
Instances of this model contain information about feedback messages that
have been received by the site, but have not yet been sent to creators.
The model instances will be deleted once the corresponding email has been
sent.
The id of each model instance is the user_id of the user who should receive
the messages.
"""
# The list of feedback messages that need to be sent to this user.
# Each element in this list is a dict with keys 'entity_type', 'entity_id',
# 'thread_id' and 'message_id'; this information is used to retrieve
# corresponding FeedbackMessageModel instance.
feedback_message_references = ndb.JsonProperty(repeated=True)
# The number of failed attempts that have been made (so far) to
# send an email to this user.
retries = ndb.IntegerProperty(default=0, required=True, indexed=True)
@staticmethod
def get_deletion_policy():
"""Unsent feedback email is kept until sent."""
return base_models.DELETION_POLICY.KEEP
@classmethod
def get_export_policy(cls):
"""Model does not contain user data."""
return dict(super(cls, cls).get_export_policy(), **{
'feedback_message_references':
base_models.EXPORT_POLICY.NOT_APPLICABLE,
'retries': base_models.EXPORT_POLICY.NOT_APPLICABLE
})
@classmethod
def has_reference_to_user_id(cls, user_id):
"""Check whether UnsentFeedbackEmailModel exists for user.
Args:
user_id: str. The ID of the user whose data should be checked.
Returns:
bool. Whether the model for user_id exists.
"""
return cls.get_by_id(user_id) is not None
| apache-2.0 | 907,305,869,170,521,900 | 35.632184 | 80 | 0.627654 | false | 4.288085 | false | false | false |
nkgilley/home-assistant | homeassistant/components/airly/air_quality.py | 5 | 3907 | """Support for the Airly air_quality service."""
from homeassistant.components.air_quality import (
ATTR_AQI,
ATTR_PM_2_5,
ATTR_PM_10,
AirQualityEntity,
)
from homeassistant.const import CONF_NAME
from .const import (
ATTR_API_ADVICE,
ATTR_API_CAQI,
ATTR_API_CAQI_DESCRIPTION,
ATTR_API_CAQI_LEVEL,
ATTR_API_PM10,
ATTR_API_PM10_LIMIT,
ATTR_API_PM10_PERCENT,
ATTR_API_PM25,
ATTR_API_PM25_LIMIT,
ATTR_API_PM25_PERCENT,
DOMAIN,
)
ATTRIBUTION = "Data provided by Airly"
LABEL_ADVICE = "advice"
LABEL_AQI_DESCRIPTION = f"{ATTR_AQI}_description"
LABEL_AQI_LEVEL = f"{ATTR_AQI}_level"
LABEL_PM_2_5_LIMIT = f"{ATTR_PM_2_5}_limit"
LABEL_PM_2_5_PERCENT = f"{ATTR_PM_2_5}_percent_of_limit"
LABEL_PM_10_LIMIT = f"{ATTR_PM_10}_limit"
LABEL_PM_10_PERCENT = f"{ATTR_PM_10}_percent_of_limit"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Airly air_quality entity based on a config entry."""
name = config_entry.data[CONF_NAME]
coordinator = hass.data[DOMAIN][config_entry.entry_id]
async_add_entities(
[AirlyAirQuality(coordinator, name, config_entry.unique_id)], False
)
def round_state(func):
"""Round state."""
def _decorator(self):
res = func(self)
if isinstance(res, float):
return round(res)
return res
return _decorator
class AirlyAirQuality(AirQualityEntity):
"""Define an Airly air quality."""
def __init__(self, coordinator, name, unique_id):
"""Initialize."""
self.coordinator = coordinator
self._name = name
self._unique_id = unique_id
self._icon = "mdi:blur"
@property
def name(self):
"""Return the name."""
return self._name
@property
def should_poll(self):
"""Return the polling requirement of the entity."""
return False
@property
def icon(self):
"""Return the icon."""
return self._icon
@property
@round_state
def air_quality_index(self):
"""Return the air quality index."""
return self.coordinator.data[ATTR_API_CAQI]
@property
@round_state
def particulate_matter_2_5(self):
"""Return the particulate matter 2.5 level."""
return self.coordinator.data[ATTR_API_PM25]
@property
@round_state
def particulate_matter_10(self):
"""Return the particulate matter 10 level."""
return self.coordinator.data[ATTR_API_PM10]
@property
def attribution(self):
"""Return the attribution."""
return ATTRIBUTION
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return self._unique_id
@property
def available(self):
"""Return True if entity is available."""
return self.coordinator.last_update_success
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
LABEL_AQI_DESCRIPTION: self.coordinator.data[ATTR_API_CAQI_DESCRIPTION],
LABEL_ADVICE: self.coordinator.data[ATTR_API_ADVICE],
LABEL_AQI_LEVEL: self.coordinator.data[ATTR_API_CAQI_LEVEL],
LABEL_PM_2_5_LIMIT: self.coordinator.data[ATTR_API_PM25_LIMIT],
LABEL_PM_2_5_PERCENT: round(self.coordinator.data[ATTR_API_PM25_PERCENT]),
LABEL_PM_10_LIMIT: self.coordinator.data[ATTR_API_PM10_LIMIT],
LABEL_PM_10_PERCENT: round(self.coordinator.data[ATTR_API_PM10_PERCENT]),
}
async def async_added_to_hass(self):
"""Connect to dispatcher listening for entity data notifications."""
self.async_on_remove(
self.coordinator.async_add_listener(self.async_write_ha_state)
)
async def async_update(self):
"""Update Airly entity."""
await self.coordinator.async_request_refresh()
| apache-2.0 | -7,348,221,353,660,290,000 | 27.518248 | 86 | 0.633222 | false | 3.485281 | false | false | false |
udapi/udapi-python | udapi/block/ud/pt/addmwt.py | 1 | 6120 | """Block ud.pt.AddMwt for heuristic detection of Portuguese contractions.
According to the UD guidelines, contractions such as "dele" = "de ele"
should be annotated using multi-word tokens.
Note that this block should be used only for converting legacy conllu files.
Ideally a tokenizer should have already split the MWTs.
"""
import udapi.block.ud.addmwt
MWTS = {
'à': {'form': 'a a', 'lemma': 'a o'},
'às': {'form': 'a as', 'lemma': 'a o'},
'ao': {'form': 'a o', 'lemma': 'a o'},
'aos': {'form': 'a os', 'lemma': 'a o'},
'da': {'form': 'de a', 'lemma': 'de o'},
'das': {'form': 'de as', 'lemma': 'de o'},
'dessa': {'form': 'de essa', 'lemma': 'de esse'},
'dessas': {'form': 'de essas', 'lemma': 'de esse'},
'desse': {'form': 'de esse', 'lemma': 'de esse'},
'desses': {'form': 'de esses', 'lemma': 'de esse'},
'desta': {'form': 'de esta', 'lemma': 'de este'},
'destas': {'form': 'de estas', 'lemma': 'de este'},
'deste': {'form': 'de este', 'lemma': 'de este'},
'destes': {'form': 'de estes', 'lemma': 'de este'},
'disso': {'form': 'de isso', 'lemma': 'de este'},
'disto': {'form': 'de isto', 'lemma': 'de este'},
'do': {'form': 'de o', 'lemma': 'de o'}, # 'upos': 'ADP PRON', 'deprel': 'case *''
'dos': {'form': 'de os', 'lemma': 'de o'},
'dum': {'form': 'de um', 'lemma': 'de um'},
'duma': {'form': 'de uma', 'lemma': 'de um'},
'dumas': {'form': 'de umas', 'lemma': 'de um'},
'duns': {'form': 'de uns', 'lemma': 'de um'},
'na': {'form': 'em a', 'lemma': 'em o'},
'nas': {'form': 'em as', 'lemma': 'em o'}, # ADP PRON
'nesses': {'form': 'em esses', 'lemma': 'em esse'},
'nesta': {'form': 'em esta', 'lemma': 'em este'},
'neste': {'form': 'em este', 'lemma': 'em este'},
'nisso': {'form': 'em isso', 'lemma': 'em este'},
'nisto': {'form': 'em isto', 'lemma': 'em este',
'upos': 'ADP PRON', 'main': 1, 'shape': 'subtree'},
'no': {'form': 'em o', 'lemma': 'em o'}, # PRON cases are excluded below
'nos': {'form': 'em os', 'lemma': 'em o'}, # PRON cases are excluded below
'num': {'form': 'em um', 'lemma': 'em um'},
'numa': {'form': 'em uma', 'lemma': 'em um'},
'numas': {'form': 'em umas', 'lemma': 'em um'},
'nuns': {'form': 'em uns', 'lemma': 'em um'},
'pela': {'form': 'por a', 'lemma': 'por o'},
'pelas': {'form': 'por as', 'lemma': 'por o'},
'pelos': {'form': 'por os', 'lemma': 'por o'},
'pelo': {'form': 'por o', 'lemma': 'por o'},
# TODO daí = de aí = ADP ADV = case advmod
}
# shared values for all entries in MWTS
for v in MWTS.values():
if not v.get('upos'):
v['upos'] = 'ADP DET'
if not v.get('deprel'):
v['deprel'] = 'case det'
v['feats'] = '_ *'
# The following are the default values
# v['main'] = 0 # which of the two words will inherit the original children (if any)
# v['shape'] = 'siblings', # the newly created nodes will be siblings
for pronoun in 'ela ele eles elas'.split():
MWTS['d' + pronoun] = {
'form': 'de ' + pronoun,
'lemma': 'de ' + pronoun,
'upos': 'ADP PRON',
'deprel': 'case *',
'main': 1,
'shape': 'subtree',
}
class AddMwt(udapi.block.ud.addmwt.AddMwt):
"""Detect and mark MWTs (split them into words and add the words to the tree)."""
def multiword_analysis(self, node):
"""Return a dict with MWT info or None if `node` does not represent a multiword token."""
# "no" can be either a contraction of "em o", or a pronoun
if node.form.lower() in ('no', 'nos') and node.upos == 'PRON':
return
analysis = MWTS.get(node.form.lower(), None)
# If the input is e.g.:
# 1 na _ ADP _ _ deprel_x ?
# 2 verdade _ NOUN _ _ fixed 1
# The expected output is:
# 1-2 na _ _ _ _ _ _
# 1 em _ ADP _ _ deprel_x ?
# 2 a _ DET _ _ fixed 1
# 3 verdade _ NOUN _ _ fixed 1
if analysis and analysis['deprel'] == 'case det' and node.udeprel != 'case':
copy = dict(analysis)
copy['deprel'] = '* det'
copy['shape'] = 'subtree'
first_child = next((c for c in node.children if node.precedes(c)), None)
if first_child is not None and first_child.udeprel == 'fixed':
copy['deprel'] = '* fixed'
return copy
if analysis is not None:
return analysis
if node.form.lower().endswith('-se') and node.upos == 'VERB':
return {
'form': node.form.lower()[:-3] + ' se',
'lemma': '* se',
'upos': '* PRON',
'feats': '* _',
'deprel': '* nsubj', # or '* expl'
'main': 0,
'shape': 'subtree',
}
elif node.form.lower().endswith('-lo') and node.upos == 'VERB':
return {
'form': node.form.lower()[:-3] + ' lo',
'lemma': '* ele',
'upos': '* PRON',
'feats': '* _',
'deprel': '* obj',
'main': 0,
'shape': 'subtree',
}
elif node.form.lower().endswith('-los') and node.upos == 'VERB':
return {
'form': node.form.lower()[:-4] + ' los',
'lemma': '* eles',
'upos': '* PRON',
'feats': '* _',
'deprel': '* obj',
'main': 0,
'shape': 'subtree',
}
elif node.form.lower().endswith('-o') and node.upos == 'VERB':
return {
'form': node.form.lower()[:-2] + ' o',
'lemma': '* ele',
'upos': '* PRON',
'feats': '* _',
'deprel': '* obj',
'main': 0,
'shape': 'subtree',
}
return None
| gpl-3.0 | -9,146,337,873,444,123,000 | 40.324324 | 97 | 0.454872 | false | 3.071823 | false | false | false |
mehulsbhatt/modoboa | modoboa/lib/migrations/0001_initial.py | 4 | 1213 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Parameter',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('value', models.CharField(max_length=255)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='UserParameter',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('value', models.CharField(max_length=255)),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
]
| isc | 3,374,385,217,575,492,600 | 30.921053 | 114 | 0.538335 | false | 4.594697 | false | false | false |
benzkji/djangocms-misc | djangocms_misc/global_untranslated_placeholder/models.py | 2 | 1427 | # coding: utf-8
from cms.models import Placeholder # noqa - needed, circular import otherwise
from cms.plugin_rendering import ContentRenderer # , RenderedPlaceholder
try:
# cms 3.5 or 3.7+
from cms.plugin_rendering import StructureRenderer
except ImportError:
StructureRenderer = None
# load conf at startup
from .conf import UntranslatedPlaceholderConf # noqa
# import signals at startup
from .signals import * # noqa (will forget to update otherwise!)
from .utils import get_untranslated_default_language_if_enabled
def new_renderer__init__(self, request):
self.__original_init__(request)
lang = get_untranslated_default_language_if_enabled()
if lang:
self.request_language = lang
# monkey patch!
# for normal plugin rendering.
ContentRenderer.__original_init__ = ContentRenderer.__init__
ContentRenderer.__init__ = new_renderer__init__
def new_structure_render_placeholder(self, placeholder, language, page=None):
language = language or self.request_language
return self.__original_render_placeholder(placeholder, language, page)
if StructureRenderer:
# for structure mode
StructureRenderer.__original_init__ = StructureRenderer.__init__
StructureRenderer.__init__ = new_renderer__init__
StructureRenderer.__original_render_placeholder = StructureRenderer.render_placeholder
StructureRenderer.render_placeholder = new_structure_render_placeholder
| mit | -3,587,462,458,460,309,500 | 32.186047 | 90 | 0.751927 | false | 4.197059 | false | false | false |
schleichdi2/OPENNFR-6.3-CORE | bitbake/lib/bb/fetch2/repo.py | 1 | 3020 | """
BitBake "Fetch" repo (git) implementation
"""
# Copyright (C) 2009 Tom Rini <[email protected]>
#
# Based on git.py which is:
# Copyright (C) 2005 Richard Purdie
#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import bb
from bb.fetch2 import FetchMethod
from bb.fetch2 import runfetchcmd
from bb.fetch2 import logger
class Repo(FetchMethod):
"""Class to fetch a module or modules from repo (git) repositories"""
def supports(self, ud, d):
"""
Check to see if a given url can be fetched with repo.
"""
return ud.type in ["repo"]
def urldata_init(self, ud, d):
"""
We don"t care about the git rev of the manifests repository, but
we do care about the manifest to use. The default is "default".
We also care about the branch or tag to be used. The default is
"master".
"""
ud.basecmd = d.getVar("FETCHCMD_repo") or "/usr/bin/env repo"
ud.proto = ud.parm.get('protocol', 'git')
ud.branch = ud.parm.get('branch', 'master')
ud.manifest = ud.parm.get('manifest', 'default.xml')
if not ud.manifest.endswith('.xml'):
ud.manifest += '.xml'
ud.localfile = d.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch))
def download(self, ud, d):
"""Fetch url"""
if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
return
repodir = d.getVar("REPODIR") or (d.getVar("DL_DIR") + "/repo")
gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
codir = os.path.join(repodir, gitsrcname, ud.manifest)
if ud.user:
username = ud.user + "@"
else:
username = ""
repodir = os.path.join(codir, "repo")
bb.utils.mkdirhier(repodir)
if not os.path.exists(os.path.join(repodir, ".repo")):
bb.fetch2.check_network_access(d, "%s init -m %s -b %s -u %s://%s%s%s" % (ud.basecmd, ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
runfetchcmd("%s init -m %s -b %s -u %s://%s%s%s" % (ud.basecmd, ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)
bb.fetch2.check_network_access(d, "%s sync %s" % (ud.basecmd, ud.url), ud.url)
runfetchcmd("%s sync" % ud.basecmd, d, workdir=repodir)
scmdata = ud.parm.get("scmdata", "")
if scmdata == "keep":
tar_flags = ""
else:
tar_flags = "--exclude='.repo' --exclude='.git'"
# Create a cache
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d, workdir=codir)
def supports_srcrev(self):
return False
def _build_revision(self, ud, d):
return ud.manifest
def _want_sortable_revision(self, ud, d):
return False
| gpl-2.0 | 6,874,726,667,973,711,000 | 33.712644 | 168 | 0.578146 | false | 3.185654 | false | false | false |
ehashman/oh-mainline | vendor/packages/PyYaml/lib/yaml/serializer.py | 560 | 4171 |
__all__ = ['Serializer', 'SerializerError']
from error import YAMLError
from events import *
from nodes import *
class SerializerError(YAMLError):
pass
class Serializer(object):
ANCHOR_TEMPLATE = u'id%03d'
def __init__(self, encoding=None,
explicit_start=None, explicit_end=None, version=None, tags=None):
self.use_encoding = encoding
self.use_explicit_start = explicit_start
self.use_explicit_end = explicit_end
self.use_version = version
self.use_tags = tags
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
self.closed = None
def open(self):
if self.closed is None:
self.emit(StreamStartEvent(encoding=self.use_encoding))
self.closed = False
elif self.closed:
raise SerializerError("serializer is closed")
else:
raise SerializerError("serializer is already opened")
def close(self):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif not self.closed:
self.emit(StreamEndEvent())
self.closed = True
#def __del__(self):
# self.close()
def serialize(self, node):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif self.closed:
raise SerializerError("serializer is closed")
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
version=self.use_version, tags=self.use_tags))
self.anchor_node(node)
self.serialize_node(node, None, None)
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
def anchor_node(self, node):
if node in self.anchors:
if self.anchors[node] is None:
self.anchors[node] = self.generate_anchor(node)
else:
self.anchors[node] = None
if isinstance(node, SequenceNode):
for item in node.value:
self.anchor_node(item)
elif isinstance(node, MappingNode):
for key, value in node.value:
self.anchor_node(key)
self.anchor_node(value)
def generate_anchor(self, node):
self.last_anchor_id += 1
return self.ANCHOR_TEMPLATE % self.last_anchor_id
def serialize_node(self, node, parent, index):
alias = self.anchors[node]
if node in self.serialized_nodes:
self.emit(AliasEvent(alias))
else:
self.serialized_nodes[node] = True
self.descend_resolver(parent, index)
if isinstance(node, ScalarNode):
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
default_tag = self.resolve(ScalarNode, node.value, (False, True))
implicit = (node.tag == detected_tag), (node.tag == default_tag)
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
style=node.style))
elif isinstance(node, SequenceNode):
implicit = (node.tag
== self.resolve(SequenceNode, node.value, True))
self.emit(SequenceStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
index = 0
for item in node.value:
self.serialize_node(item, node, index)
index += 1
self.emit(SequenceEndEvent())
elif isinstance(node, MappingNode):
implicit = (node.tag
== self.resolve(MappingNode, node.value, True))
self.emit(MappingStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
for key, value in node.value:
self.serialize_node(key, node, None)
self.serialize_node(value, node, key)
self.emit(MappingEndEvent())
self.ascend_resolver()
| agpl-3.0 | -1,717,988,015,721,316,000 | 36.576577 | 82 | 0.566771 | false | 4.291152 | false | false | false |
xjw1001001/IGCexpansion | IdenticalParalogTest/GeneratePamlResults.py | 1 | 4127 | import os
import subprocess
from Bio import Seq, SeqIO, AlignIO
from Bio.Phylo.PAML import codeml, baseml
import numpy as np
if __name__ == '__main__':
path = '/Users/xji3/Genconv/IdenticalParalogAlignment/'
pairs = []
with open('./All_Pairs.txt', 'r') as f:
for line in f.readlines():
pairs.append(line.replace('\n','').split('_'))
pairs.remove(['YLR028C', 'YMR120C'])
pairs.append(['YLR284C','YOR180C']) # this pair didn't appear this time
#pairs.remove(['YML026C', 'YDR450W'])# remove it for now
#pairs = [pairs[-1]]
tree_pair = ['YML026C', 'YDR450W']
with open('./YeastTree_paml.newick', 'r') as f:
all_tree_lines = f.readlines()
with open('./codeml_tail.ctl', 'r') as f:
all_codeml_ctl_lines = f.readlines()
with open('./baseml_tail.ctl', 'r') as f:
all_baseml_ctl_lines = f.readlines()
codeml = '/Users/xji3/Downloads/paml4.8/bin/codeml'
baseml = '/Users/xji3/Downloads/paml4.8/bin/baseml'
for pair in pairs:
print 'Now run paml on pair ' + ' '.join(pair)
seqfile = path + '_'.join(pair) + '/' + '_'.join(pair) + '_IdenticalParalog_paml_input.fasta'
treefile = path + '_'.join(pair) + '/' + '_'.join(pair) + '_tree.newick'
with open(treefile, 'w+') as f:
for line in all_tree_lines:
new_line = line.replace(tree_pair[0], pair[0])
new_line = new_line.replace(tree_pair[1], pair[1])
f.write(new_line)
outfile_codeml = path + '_'.join(pair) + '/' + '_'.join(pair) + '_IdenticalParalog_codeml'
codeml_ctlfile = path + '_'.join(pair) + '/' + '_'.join(pair) + '_IdenticalParalog_codeml_control.ctl'
with open(codeml_ctlfile, 'w+') as f:
f.writelines(['seqfile = ' + seqfile + '\n', 'treefile = ' + treefile + '\n', 'outfile = ' + outfile_codeml + '\n'])
f.writelines(all_codeml_ctl_lines)
codeml_cmd = [codeml, '_'.join(pair) + '_IdenticalParalog_codeml_control.ctl']
os.chdir(path + '_'.join(pair) + '/')
#os.system(' '.join(codeml_cmd))
subprocess.check_output(codeml_cmd)
outfile_baseml = path + '_'.join(pair) + '/' + '_'.join(pair) + '_IdenticalParalog_baseml'
baseml_ctlfile = path + '_'.join(pair) + '/' + '_'.join(pair) + '_IdenticalParalog_baseml_control.ctl'
with open(baseml_ctlfile, 'w+') as f:
f.writelines(['seqfile = ' + seqfile + '\n', 'treefile = ' + treefile + '\n', 'outfile = ' + outfile_baseml + '\n'])
f.writelines(all_baseml_ctl_lines)
baseml_cmd = [baseml, '_'.join(pair) + '_IdenticalParalog_baseml_control.ctl']
subprocess.check_output(baseml_cmd)
## summary_mat = []
## finished_list = []
## label = ['MG94_codeml_tree_length', 'MG94_codeml_lnL', 'MG94_codeml_omega', 'MG94_codeml_kappa',
## 'HKY_baseml_tree_length', 'HKY_baseml_lnL', 'HKY_baseml_kappa']
## footer = ' '.join(label)
##
## for pair in pairs:
## codeml_result = codeml.read('/Users/xji3/Genconv/NewClusterPackRun/NewPairsAlignment/' + '_'.join(pair) + '/' + '_'.join(pair) + '_codeml')
## baseml_result = baseml.read('/Users/xji3/Genconv/NewClusterPackRun/NewPairsAlignment/' + '_'.join(pair) + '/' + '_'.join(pair) + '_baseml')
## summary_mat.append([codeml_result['NSsites'][0]['tree length'],
## codeml_result['NSsites'][0]['lnL'],
## codeml_result['NSsites'][0]['parameters']['omega'],
## codeml_result['NSsites'][0]['parameters']['kappa'],
## baseml_result['tree length'],
## baseml_result['lnL'],
## baseml_result['parameters']['kappa']])
## finished_list.append(pair)
##
## header = ' '.join(['_'.join(pair) for pair in finished_list]) # column labels
## np.savetxt(open('/Users/xji3/Genconv/NewClusterPackRun/NewPairsAlignment/paml_summary.txt', 'w+'), np.matrix(summary_mat).T, delimiter = ' ', footer = footer, header = header)
| gpl-3.0 | 5,950,376,985,468,264,000 | 48.130952 | 181 | 0.561182 | false | 3.124148 | false | false | false |
munhyunsu/Hobby | Fitcraft/fitness_organize/data_organize_second.py | 1 | 1765 | #!/usr/bin/env python3
import sys
import os
import json
def main(argv):
"""
read all of files in directory inputed recursively.
rename and copy(instead move for safety) file to specific rules.
"""
os.makedirs('./outputs', exist_ok = True)
dir_queue = argv[1:]
file_queue = get_all_files(dir_queue)
process_files(file_queue)
def process_files(file_queue):
for target in file_queue:
filename = target.split('/')[-1]
user = filename.split('_')[0]
user_num = user[1:]
user_num = int(user_num)
user = 'B' + '{:02}'.format(user_num)
date = filename.split('_')[1]
data = (filename.split('_')[2]).split('.')[0]
target_path = (data
+ '/'
+ date
+ '/'
+ user
+ '.json')
target_path = './outputs/' + target_path
os.makedirs('./outputs/' + data + '/' + date, exist_ok = True)
with open(target, 'r') as read_filep:
with open(target_path, 'w') as write_filep:
read_json = json.load(read_filep)
json.dump(read_json, write_filep,
indent = 4)
print(target_path)
def get_all_files(dir_queue):
file_queue = list()
while len(dir_queue) > 0:
path = dir_queue.pop()
with os.scandir(path) as it:
for entry in it:
if not entry.name.startswith('.') and entry.is_file():
file_queue.append(entry.path)
else:
dir_queue.append(entry.path)
return file_queue
# is it good thing, right?
if __name__ == '__main__':
sys.exit(main(sys.argv))
| gpl-3.0 | 31,466,746,324,951,550 | 23.513889 | 70 | 0.49915 | false | 3.747346 | false | false | false |
turbokongen/home-assistant | homeassistant/components/synology_dsm/camera.py | 1 | 4928 | """Support for Synology DSM cameras."""
import logging
from typing import Dict
from synology_dsm.api.surveillance_station import SynoSurveillanceStation
from synology_dsm.exceptions import SynologyDSMAPIErrorException
from homeassistant.components.camera import SUPPORT_STREAM, Camera
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from . import SynoApi, SynologyDSMCoordinatorEntity
from .const import (
COORDINATOR_SURVEILLANCE,
DOMAIN,
ENTITY_CLASS,
ENTITY_ENABLE,
ENTITY_ICON,
ENTITY_NAME,
ENTITY_UNIT,
SYNO_API,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up the Synology NAS cameras."""
data = hass.data[DOMAIN][entry.unique_id]
api = data[SYNO_API]
if SynoSurveillanceStation.CAMERA_API_KEY not in api.dsm.apis:
return
# initial data fetch
coordinator = data[COORDINATOR_SURVEILLANCE]
await coordinator.async_refresh()
async_add_entities(
SynoDSMCamera(api, coordinator, camera_id)
for camera_id in coordinator.data["cameras"]
)
class SynoDSMCamera(SynologyDSMCoordinatorEntity, Camera):
"""Representation a Synology camera."""
def __init__(
self, api: SynoApi, coordinator: DataUpdateCoordinator, camera_id: int
):
"""Initialize a Synology camera."""
super().__init__(
api,
f"{SynoSurveillanceStation.CAMERA_API_KEY}:{camera_id}",
{
ENTITY_NAME: coordinator.data["cameras"][camera_id].name,
ENTITY_ENABLE: coordinator.data["cameras"][camera_id].is_enabled,
ENTITY_CLASS: None,
ENTITY_ICON: None,
ENTITY_UNIT: None,
},
coordinator,
)
Camera.__init__(self)
self._camera_id = camera_id
self._api = api
@property
def camera_data(self):
"""Camera data."""
return self.coordinator.data["cameras"][self._camera_id]
@property
def device_info(self) -> Dict[str, any]:
"""Return the device information."""
return {
"identifiers": {
(
DOMAIN,
self._api.information.serial,
self.camera_data.id,
)
},
"name": self.camera_data.name,
"model": self.camera_data.model,
"via_device": (
DOMAIN,
self._api.information.serial,
SynoSurveillanceStation.INFO_API_KEY,
),
}
@property
def available(self) -> bool:
"""Return the availability of the camera."""
return self.camera_data.is_enabled and self.coordinator.last_update_success
@property
def supported_features(self) -> int:
"""Return supported features of this camera."""
return SUPPORT_STREAM
@property
def is_recording(self):
"""Return true if the device is recording."""
return self.camera_data.is_recording
@property
def motion_detection_enabled(self):
"""Return the camera motion detection status."""
return self.camera_data.is_motion_detection_enabled
def camera_image(self) -> bytes:
"""Return bytes of camera image."""
_LOGGER.debug(
"SynoDSMCamera.camera_image(%s)",
self.camera_data.name,
)
if not self.available:
return None
try:
return self._api.surveillance_station.get_camera_image(self._camera_id)
except (SynologyDSMAPIErrorException) as err:
_LOGGER.debug(
"SynoDSMCamera.camera_image(%s) - Exception:%s",
self.camera_data.name,
err,
)
return None
async def stream_source(self) -> str:
"""Return the source of the stream."""
_LOGGER.debug(
"SynoDSMCamera.stream_source(%s)",
self.camera_data.name,
)
if not self.available:
return None
return self.camera_data.live_view.rtsp
def enable_motion_detection(self):
"""Enable motion detection in the camera."""
_LOGGER.debug(
"SynoDSMCamera.enable_motion_detection(%s)",
self.camera_data.name,
)
self._api.surveillance_station.enable_motion_detection(self._camera_id)
def disable_motion_detection(self):
"""Disable motion detection in camera."""
_LOGGER.debug(
"SynoDSMCamera.disable_motion_detection(%s)",
self.camera_data.name,
)
self._api.surveillance_station.disable_motion_detection(self._camera_id)
| apache-2.0 | 2,447,922,313,075,051,500 | 29.8 | 83 | 0.601461 | false | 4.026144 | false | false | false |
Perkville/django-tastypie | docs/code/myproject/settings.py | 6 | 2067 | """
Django settings for myproject project.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%ovvje%lh&k-%0v!@_c1gygt#aq-!o3*t$(hpee7@aj&35cr3a'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'myapp',
'tastypie',
)
MIDDLEWARE = MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'myproject.urls'
WSGI_APPLICATION = 'myproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| bsd-3-clause | -3,888,527,607,295,375,400 | 23.317647 | 71 | 0.721335 | false | 3.3125 | false | false | false |
PersonalGenomesOrg/vcf2clinvar | vcf2clinvar/clinvar.py | 1 | 4488 | from __future__ import unicode_literals
from collections import OrderedDict
import json
from .common import Allele, VCFLine
CLNSIG_INDEX = {
'0': "unknown",
'1': "untested",
'2': "non-pathogenic",
'3': "probably non-pathogenic",
'4': "probably pathogenic",
'5': "pathogenic",
'6': "affecting drug response",
'7': "affecting histocompatibility",
'255': "other"}
class ClinVarAllele(Allele):
"""Store ClinVar data relating to one allele."""
def __init__(self, *args, **kwargs):
"""
Initialize ClinVarAllele object
A ClinVarAllele is an allele for a genomic position that has data
from ClinVar associated with it.
Required arguments:
sequence: String of DNA letters (A, C, G, or T) for the allele;
may be empty (to represent a deletion)
frequency: Preferred allele frequency
alleleid: ClinVar Allele ID
clnhgvs: HGVS nomenclature for this allele
clnsig: ClinVar clinical significance
clndn: ClinVar disease name
clndisdb: Database IDs of disease database entries (tag-value pairs)
clnvi: Database IDs of clinical sources (tag-value pairs)
"""
(self.clnalleleid, self.hgvs, self.clnsig,
self.clndn, self.clndisdb, self.clnvi) = [
kwargs[x] for x in
['alleleid', 'clnhgvs', 'clnsig', 'clndn', 'clndisdb', 'clnvi']]
super(ClinVarAllele, self).__init__(*args, **kwargs)
def as_dict(self, *args, **kwargs):
"""Return ClinVarAllele data as dict object."""
self_as_dict = super(ClinVarAllele, self).as_dict(*args, **kwargs)
self_as_dict['hgvs'] = self.hgvs
self_as_dict['clnalleleid'] = self.clnalleleid
self_as_dict['clnsig'] = self.clnsig
self_as_dict['clndn'] = self.clndn
self_as_dict['clndisdb'] = self.clndisdb
self_as_dict['clnvi'] = self.clnvi
return self_as_dict
class ClinVarVCFLine(VCFLine):
"""Store ClinVar data from a VCF line."""
def __init__(self, *args, **kwargs):
"""Initialize ClinVarVCFLine with VCF line"""
kwargs['skip_info'] = False
super(ClinVarVCFLine, self).__init__(self, *args, **kwargs)
def as_dict(self):
"""Dict representation of parsed ClinVar VCF line"""
return {'chrom': self.chrom,
'start': self.start,
'ref_allele': self.ref_allele,
'alt_alleles': self.alt_alleles,
'info': self.info,
'alleles': [x.as_dict() for x in self.alleles]}
def _parse_frequencies(self):
"""Parse frequency data in ClinVar VCF"""
frequencies = OrderedDict([
('EXAC', 'Unknown'),
('ESP', 'Unknown'),
('TGP', 'Unknown')])
pref_freq = 'Unknown'
for source in frequencies.keys():
freq_key = 'AF_' + source
if freq_key in self.info:
frequencies[source] = self.info[freq_key]
if pref_freq == 'Unknown':
pref_freq = frequencies[source]
return pref_freq, frequencies
def _parse_allele_data(self):
"""Parse alleles for ClinVar VCF, overrides parent method."""
# Get allele frequencies if they exist.
pref_freq, frequencies = self._parse_frequencies()
info_clnvar_single_tags = ['ALLELEID', 'CLNSIG', 'CLNHGVS']
cln_data = {x.lower(): self.info[x] if x in self.info else None
for x in info_clnvar_single_tags}
cln_data.update(
{'clndisdb': [x.split(',') for x in
self.info['CLNDISDB'].split('|')]
if 'CLNDISDB' in self.info else []})
cln_data.update({'clndn': self.info['CLNDN'].split('|') if
'CLNDN' in self.info else []})
cln_data.update({'clnvi': self.info['CLNVI'].split(',')
if 'CLNVI' in self.info else []})
try:
sequence = self.alt_alleles[0]
except IndexError:
sequence = self.ref_allele
allele = ClinVarAllele(frequency=pref_freq, sequence=sequence,
**cln_data)
# A few ClinVar variants are only reported as a combination with
# other variants, and no single-variant effect is proposed. Skip these.
if not cln_data['clnsig']:
return []
return [allele]
| mit | -9,173,766,841,808,944,000 | 35.786885 | 79 | 0.568627 | false | 3.487179 | false | false | false |
CompassionCH/compassion-switzerland | report_compassion/models/res_partner.py | 3 | 2342 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo.addons.thankyou_letters.models.res_partner import setlocale
from datetime import date, datetime
from odoo import api, models, fields, _
class ResPartner(models.Model):
""" Add fields for retrieving values for communications.
"""
_inherit = 'res.partner'
@api.multi
def get_receipt_text(self, year):
""" Formats the donation amount for the tax receipt. """
return '{:,.2f}'.format(self.get_receipt(year)).replace(
'.00', '.-').replace(',', "'")
@api.multi
def get_receipt(self, year):
"""
Return the amount paid from the partner in the given year
:param year: int: year of selection
:return: float: total amount
"""
self.ensure_one()
start_date = date(year, 1, 1)
end_date = date(year, 12, 31)
invoice_lines = self.env['account.invoice.line'].search([
('last_payment', '>=', fields.Date.to_string(start_date)),
('last_payment', '<=', fields.Date.to_string(end_date)),
('state', '=', 'paid'),
('product_id.requires_thankyou', '=', True),
'|', ('partner_id', '=', self.id),
('partner_id.parent_id', '=', self.id),
])
return sum(invoice_lines.mapped('price_subtotal'))
@api.multi
def _compute_date_communication(self):
lang_map = {
'fr_CH': u'le %d %B %Y',
'fr': u'le %d %B %Y',
'de_DE': u'%d. %B %Y',
'de_CH': u'%d. %B %Y',
'en_US': u'%d %B %Y',
'it_IT': u'%d %B %Y',
}
today = datetime.today()
city = _("Yverdon-les-Bains")
for partner in self:
lang = partner.lang
with setlocale(lang):
date = today.strftime(
lang_map.get(lang, lang_map['en_US'])).decode('utf-8')
partner.date_communication = city + u", " + date
| agpl-3.0 | -7,011,012,724,866,295,000 | 35.030769 | 78 | 0.496157 | false | 3.711569 | false | false | false |
MaStr/UBox | src/core/python_lib/messages.py | 2 | 1170 | # PirateBox Message lib (C)2012-2014
# Matthias Strubel
import string
import socket
import base64
import sys
class message:
def __init__(self, name="generate"):
if name == "generate":
self.name = socket.gethostname()
else:
self.name = name
self.type = "gc"
self.decoded = ""
def set(self, content=" "):
base64content = base64.b64encode(content)
self.decoded = "piratebox;" + self.type + ";01;" + self.name + ";" + \
base64content
def get(self):
# TODO Split decoded part
message_parts = string.split(self.decoded, ";")
if message_parts[0] != "piratebox":
return None
b64_content_part = message_parts[4]
content = base64.b64decode(b64_content_part)
return content
def get_sendername(self):
return self.name
def get_message(self):
return self.decoded
def set_message(self, decoded):
self.decoded = decoded
class shoutbox_message(message):
def __init__(self, name="generate"):
message.__init__(self, name)
self.type = "sb"
| gpl-3.0 | 5,100,245,739,514,293,000 | 22.4 | 78 | 0.568376 | false | 3.75 | false | false | false |
aequologica/acigolouqea | transitiveReduction.py | 1 | 1191 | # cf. Michael Clerx answer @ http://stackoverflow.com/questions/1690953/transitive-reduction-algorithm-pseudocode
def prima(m, title=None):
""" Prints a matrix to the terminal """
if title:
print(title)
for row in m:
print(', '.join([str(x) for x in row]))
print ('')
def path(m):
""" Returns a path matrix """
p = [list(row) for row in m]
n = len(p)
for i in range(0, n):
for j in range(0, n):
if i == j:
continue
if p[j][i]:
for k in range(0, n):
if p[j][k] == 0:
p[j][k] = p[i][k]
return p
def hsu(m):
""" Transforms a given directed acyclic graph into its minimal equivalent """
n = len(m)
for j in range(n):
for i in range(n):
if m[i][j]:
for k in range(n):
if m[j][k]:
m[i][k] = 0
m = [ [0, 1, 1, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 1, 1],
[0, 0, 0, 0, 1],
[0, 1, 0, 0, 0]]
prima(m, 'Original matrix')
hsu(m)
prima(m, 'After Hsu')
p = path(m)
prima(p, 'Path matrix')
hsu(p)
prima(p, 'After Hsu') | mit | 6,401,661,161,687,506,000 | 23.833333 | 113 | 0.453401 | false | 2.962687 | false | false | false |
atcsecure/blocknet | blocknet-routing-protocol/pyblockrouternode/pyblocknet.py | 1 | 27248 | #!/usr/bin/python
#
# Originally from node.py - Bitcoin P2P network half-a-node
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Forked by atcsecure
#
# Blocknet's Routing Protocol node
import struct
import socket
import asyncore
import binascii
import time
import sys
import re
import random
import cStringIO
import hashlib
MY_VERSION = 71037
MY_SUBVERSION = "/pynode:0.0.2/"
# Default Settings if no configuration file is given
settings = {
"host": "104.131.186.93",
"port": 21357,
"debug": True,
"network": "mainnet"
}
def new_block_event(block):
if block.is_valid():
print " - Valid Block: %s" % block.hash
else:
print " - Invalid Block: %s" % block.hash
def new_transaction_event(tx):
if tx.is_valid():
print " - Valid TX: %s" % tx.hash
else:
print " - Invalid TX: %s" % tx.hash
def sha256(s):
return hashlib.new('sha256', s).digest()
def hash256(s):
return sha256(sha256(s))
def deser_string(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return f.read(nit)
def ser_string(s):
if len(s) < 253:
return chr(len(s)) + s
elif len(s) < 0x10000:
return chr(253) + struct.pack("<H", len(s)) + s
elif len(s) < 0x100000000L:
return chr(254) + struct.pack("<I", len(s)) + s
return chr(255) + struct.pack("<Q", len(s)) + s
def deser_uint256(f):
r = 0L
for i in xrange(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = ""
for i in xrange(8):
rs += struct.pack("<I", u & 0xFFFFFFFFL)
u >>= 32
return rs
def uint256_from_str(s):
r = 0L
t = struct.unpack("<IIIIIIII", s[:32])
for i in xrange(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFFL) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
def ser_vector(l):
r = ""
if len(l) < 253:
r = chr(len(l))
elif len(l) < 0x10000:
r = chr(253) + struct.pack("<H", len(l))
elif len(l) < 0x100000000L:
r = chr(254) + struct.pack("<I", len(l))
else:
r = chr(255) + struct.pack("<Q", len(l))
for i in l:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = ""
if len(l) < 253:
r = chr(len(l))
elif len(s) < 0x10000:
r = chr(253) + struct.pack("<H", len(l))
elif len(s) < 0x100000000L:
r = chr(254) + struct.pack("<I", len(l))
else:
r = chr(255) + struct.pack("<Q", len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = ""
if len(l) < 253:
r = chr(len(l))
elif len(s) < 0x10000:
r = chr(253) + struct.pack("<H", len(l))
elif len(s) < 0x100000000L:
r = chr(254) + struct.pack("<I", len(l))
else:
r = chr(255) + struct.pack("<Q", len(l))
for sv in l:
r += ser_string(sv)
return r
def deser_int_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = struct.unpack("<i", f.read(4))[0]
r.append(t)
return r
def ser_int_vector(l):
r = ""
if len(l) < 253:
r = chr(len(l))
elif len(s) < 0x10000:
r = chr(253) + struct.pack("<H", len(l))
elif len(s) < 0x100000000L:
r = chr(254) + struct.pack("<I", len(l))
else:
r = chr(255) + struct.pack("<Q", len(l))
for i in l:
r += struct.pack("<i", i)
return r
def show_debug_msg(msg):
if settings['debug']:
print "DEBUG: " + msg
class CAddress(object):
def __init__(self):
self.nServices = 1
self.pchReserved = "\x00" * 10 + "\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = ""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices, self.ip, self.port)
class CInv(object):
typemap = {
0: "Error",
1: "TX",
2: "Block"}
def __init__(self):
self.type = 0
self.hash = 0L
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = ""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" % (self.typemap[self.type], self.hash)
class CBlockLocator(object):
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = ""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" % (self.nVersion, repr(self.vHave))
class COutPoint(object):
def __init__(self):
self.hash = 0
self.n = 0
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = ""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
class CTxIn(object):
def __init__(self):
self.prevout = COutPoint()
self.scriptSig = ""
self.nSequence = 0
def deserialize(self, f):
self.prevout = COutPoint()
self.prevout.deserialize(f)
self.scriptSig = deser_string(f)
self.nSequence = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = ""
r += self.prevout.serialize()
r += ser_string(self.scriptSig)
r += struct.pack("<I", self.nSequence)
return r
def __repr__(self):
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" % (
repr(self.prevout), binascii.hexlify(self.scriptSig), self.nSequence)
class CTxOut(object):
def __init__(self):
self.nValue = 0
self.scriptPubKey = ""
def deserialize(self, f):
self.nValue = struct.unpack("<q", f.read(8))[0]
self.scriptPubKey = deser_string(f)
def serialize(self):
r = ""
r += struct.pack("<q", self.nValue)
r += ser_string(self.scriptPubKey)
return r
def __repr__(self):
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" % (
self.nValue // 100000000, self.nValue % 100000000, binascii.hexlify(self.scriptPubKey))
class CTransaction(object):
def __init__(self):
self.nVersion = 1
self.vin = []
self.vout = []
self.nLockTime = 0
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vin = deser_vector(f, CTxIn)
self.vout = deser_vector(f, CTxOut)
self.nLockTime = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = ""
r += struct.pack("<i", self.nVersion)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
r += struct.pack("<I", self.nLockTime)
return r
def calc_sha256(self):
if self.sha256 is None:
self.sha256 = uint256_from_str(hash256(self.serialize()))
self.hash = hash256(self.serialize())[::-1].encode('hex_codec')
def is_valid(self):
self.calc_sha256()
for tout in self.vout:
if tout.nValue < 0 or tout.nValue > 21000000L * 100000000L:
return False
return True
def __repr__(self):
return "CTransaction(nVersion=%i vin=%s vout=%s nLockTime=%i)" % (
self.nVersion, repr(self.vin), repr(self.vout), self.nLockTime)
class CBlock(object):
def __init__(self):
self.nVersion = 1
self.hashPrevBlock = 0
self.hashMerkleRoot = 0
self.nTime = 0
self.nBits = 0
self.nNonce = 0
self.vtx = []
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.hashPrevBlock = deser_uint256(f)
self.hashMerkleRoot = deser_uint256(f)
self.nTime = struct.unpack("<I", f.read(4))[0]
self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0]
self.vtx = deser_vector(f, CTransaction)
def serialize(self):
r = ""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
r += ser_vector(self.vtx)
return r
def calc_sha256(self):
if self.sha256 is None:
r = ""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
self.sha256 = uint256_from_str(hash256(r))
self.hash = hash256(r)[::-1].encode('hex_codec')
def is_valid(self):
self.calc_sha256()
target = uint256_from_compact(self.nBits)
if self.sha256 > target:
return False
hashes = []
for tx in self.vtx:
if not tx.is_valid():
return False
tx.calc_sha256()
hashes.append(ser_uint256(tx.sha256))
while len(hashes) > 1:
newhashes = []
for i in xrange(0, len(hashes), 2):
i2 = min(i + 1, len(hashes) - 1)
newhashes.append(hash256(hashes[i] + hashes[i2]))
hashes = newhashes
if uint256_from_str(hashes[0]) != self.hashMerkleRoot:
return False
return True
def __repr__(self):
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" % (
self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, time.ctime(self.nTime), self.nBits, self.nNonce,
repr(self.vtx))
class CUnsignedAlert(object):
def __init__(self):
self.nVersion = 1
self.nRelayUntil = 0
self.nExpiration = 0
self.nID = 0
self.nCancel = 0
self.setCancel = []
self.nMinVer = 0
self.nMaxVer = 0
self.setSubVer = []
self.nPriority = 0
self.strComment = ""
self.strStatusBar = ""
self.strReserved = ""
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.nRelayUntil = struct.unpack("<q", f.read(8))[0]
self.nExpiration = struct.unpack("<q", f.read(8))[0]
self.nID = struct.unpack("<i", f.read(4))[0]
self.nCancel = struct.unpack("<i", f.read(4))[0]
self.setCancel = deser_int_vector(f)
self.nMinVer = struct.unpack("<i", f.read(4))[0]
self.nMaxVer = struct.unpack("<i", f.read(4))[0]
self.setSubVer = deser_string_vector(f)
self.nPriority = struct.unpack("<i", f.read(4))[0]
self.strComment = deser_string(f)
self.strStatusBar = deser_string(f)
self.strReserved = deser_string(f)
def serialize(self):
r = ""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<q", self.nRelayUntil)
r += struct.pack("<q", self.nExpiration)
r += struct.pack("<i", self.nID)
r += struct.pack("<i", self.nCancel)
r += ser_int_vector(self.setCancel)
r += struct.pack("<i", self.nMinVer)
r += struct.pack("<i", self.nMaxVer)
r += ser_string_vector(self.setSubVer)
r += struct.pack("<i", self.nPriority)
r += ser_string(self.strComment)
r += ser_string(self.strStatusBar)
r += ser_string(self.strReserved)
return r
def __repr__(self):
return "CUnsignedAlert(nVersion %d, nRelayUntil %d, nExpiration %d, nID %d, nCancel %d, nMinVer %d, nMaxVer %d, nPriority %d, strComment %s, strStatusBar %s, strReserved %s)" % (
self.nVersion, self.nRelayUntil, self.nExpiration, self.nID, self.nCancel, self.nMinVer, self.nMaxVer,
self.nPriority, self.strComment, self.strStatusBar, self.strReserved)
class CAlert(object):
def __init__(self):
self.vchMsg = ""
self.vchSig = ""
def deserialize(self, f):
self.vchMsg = deser_string(f)
self.vchSig = deser_string(f)
def serialize(self):
r = ""
r += ser_string(self.vchMsg)
r += ser_string(self.vchSig)
return r
def __repr__(self):
return "CAlert(vchMsg.sz %d, vchSig.sz %d)" % (len(self.vchMsg), len(self.vchSig))
class msg_version(object):
command = "version"
def __init__(self):
self.nVersion = MY_VERSION
self.nServices = 1
self.nTime = time.time()
self.addrTo = CAddress()
self.addrFrom = CAddress()
self.nNonce = random.getrandbits(64)
self.strSubVer = MY_SUBVERSION
self.nStartingHeight = -1
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
if self.nVersion == 10300:
self.nVersion = 300
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.nTime = struct.unpack("<q", f.read(8))[0]
self.addrTo = CAddress()
self.addrTo.deserialize(f)
if self.nVersion >= 106:
self.addrFrom = CAddress()
self.addrFrom.deserialize(f)
self.nNonce = struct.unpack("<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
if self.nVersion >= 209:
self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
else:
self.nStartingHeight = None
else:
self.addrFrom = None
self.nNonce = None
self.strSubVer = None
self.nStartingHeight = None
def serialize(self):
r = ""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<Q", self.nServices)
r += struct.pack("<q", self.nTime)
r += self.addrTo.serialize()
r += self.addrFrom.serialize()
r += struct.pack("<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack("<i", self.nStartingHeight)
return r
def __repr__(self):
return "msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i)" % (
self.nVersion, self.nServices, time.ctime(self.nTime), repr(self.addrTo), repr(self.addrFrom), self.nNonce,
self.strSubVer, self.nStartingHeight)
class msg_verack(object):
command = "verack"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return ""
def __repr__(self):
return "msg_verack()"
class msg_addr(object):
command = "addr"
def __init__(self):
self.addrs = []
def deserialize(self, f):
self.addrs = deser_vector(f, CAddress)
def serialize(self):
return ser_vector(self.addrs)
def __repr__(self):
return "msg_addr(addrs=%s)" % (repr(self.addrs))
class msg_alert(object):
command = "alert"
def __init__(self):
self.alert = CAlert()
def deserialize(self, f):
self.alert = CAlert()
self.alert.deserialize(f)
def serialize(self):
r = ""
r += self.alert.serialize()
return r
def __repr__(self):
return "msg_alert(alert=%s)" % (repr(self.alert),)
class msg_inv(object):
command = "inv"
def __init__(self):
self.inv = []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_inv(inv=%s)" % (repr(self.inv))
class msg_getdata(object):
command = "getdata"
def __init__(self):
self.inv = []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_getdata(inv=%s)" % (repr(self.inv))
class msg_getblocks(object):
command = "getblocks"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0L
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = ""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getblocks(locator=%s hashstop=%064x)" % (repr(self.locator), self.hashstop)
class msg_tx(object):
command = "tx"
def __init__(self):
self.tx = CTransaction()
def deserialize(self, f):
self.tx.deserialize(f)
def serialize(self):
return self.tx.serialize()
def __repr__(self):
return "msg_tx(tx=%s)" % (repr(self.tx))
class msg_block(object):
command = "block"
def __init__(self):
self.block = CBlock()
def deserialize(self, f):
self.block.deserialize(f)
def serialize(self):
return self.block.serialize()
def __repr__(self):
return "msg_block(block=%s)" % (repr(self.block))
class msg_getaddr(object):
command = "getaddr"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return ""
def __repr__(self):
return "msg_getaddr()"
class msg_checkpoint(object):
command = "msg_checkpoint"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return ""
def __repr__(self):
return "msg_checkpoint()"
# msg_checkorder
# msg_submitorder
# msg_reply
class msg_ping(object):
command = "ping"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return ""
def __repr__(self):
return "msg_ping()"
class NodeConn(asyncore.dispatcher):
messagemap = {
"version": msg_version,
"verack": msg_verack,
"addr": msg_addr,
"alert": msg_alert,
"inv": msg_inv,
"getdata": msg_getdata,
"getblocks": msg_getblocks,
"tx": msg_tx,
"block": msg_block,
"getaddr": msg_getaddr,
"ping": msg_ping,
"checkpoint": msg_checkpoint
}
MAGIC_BYTES = {
"mainnet": "\xa1\xa0\xa2\xa3", # mainnet
"testnet3": "\x0b\x11\x09\x07" # testnet3
}
def __init__(self, dstaddr, dstport):
asyncore.dispatcher.__init__(self)
self.dstaddr = dstaddr
self.dstport = dstport
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.sendbuf = ""
self.recvbuf = ""
self.ver_send = 209
self.ver_recv = 209
self.last_sent = 0
self.state = "connecting"
# stuff version msg into sendbuf
vt = msg_version()
vt.addrTo.ip = self.dstaddr
vt.addrTo.port = self.dstport
vt.addrFrom.ip = "0.0.0.0"
vt.addrFrom.port = 0
self.send_message(vt, True)
print "\n PyNode - MiniNode"
print " -------------------------------------------------------------------------"
print " Connecting to Bitcoin Node IP # " + settings['host'] + ":" + str(settings['port'])
try:
self.connect((dstaddr, dstport))
except:
self.handle_close()
self.send_message(vt, True)
def handle_connect(self):
print " Connected & Listening :)\n"
self.state = "connected"
#send version msg
t = msg_version()
t.addrTo.ip = self.dstaddr
t.addrTo.port = self.dstport
t.addrFrom.ip = "0.0.0.0"
t.addrFrom.port = 0
self.send_message(t)
def handle_close(self):
print " Closing Conection ... bye :)"
self.state = "closed"
self.recvbuf = ""
self.sendbuf = ""
try:
self.close()
except:
pass
def handle_read(self):
try:
t = self.recv(8192)
except:
self.handle_close()
return
if len(t) == 0:
print 'len is zero...'
self.handle_close()
return
self.recvbuf += t
self.got_data()
def readable(self):
return True
def writable(self):
return (len(self.sendbuf) > 0)
def handle_write(self):
try:
sent = self.send(self.sendbuf)
except:
self.handle_close()
return
self.sendbuf = self.sendbuf[sent:]
def got_data(self):
while True:
if len(self.recvbuf) < 4:
return
if self.recvbuf[:4] != self.MAGIC_BYTES[settings['network']]:
raise ValueError("got garbage %s" % repr(self.recvbuf))
if self.ver_recv < 209:
if len(self.recvbuf) < 4 + 12 + 4:
return
command = self.recvbuf[4:4 + 12].split("\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4 + 12:4 + 12 + 4])[0]
checksum = None
if len(self.recvbuf) < 4 + 12 + 4 + msglen:
return
msg = self.recvbuf[4 + 12 + 4:4 + 12 + 4 + msglen]
self.recvbuf = self.recvbuf[4 + 12 + 4 + msglen:]
else:
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
command = self.recvbuf[4:4 + 12].split("\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4 + 12:4 + 12 + 4])[0]
checksum = self.recvbuf[4 + 12 + 4:4 + 12 + 4 + 4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
return
msg = self.recvbuf[4 + 12 + 4 + 4:4 + 12 + 4 + 4 + msglen]
th = sha256(msg)
h = sha256(th)
if checksum != h[:4]:
raise ValueError("got bad checksum %s" % repr(self.recvbuf))
self.recvbuf = self.recvbuf[4 + 12 + 4 + 4 + msglen:]
if command in self.messagemap:
f = cStringIO.StringIO(msg)
t = self.messagemap[command]()
t.deserialize(f)
self.got_message(t)
else:
show_debug_msg("Unknown command: '" + command + "' " + repr(msg))
def send_message(self, message, pushbuf=False):
if self.state != "connected" and not pushbuf:
return
show_debug_msg("Send %s" % repr(message))
print 'Sending Message...%s' % repr(message)
command = message.command
data = message.serialize()
tmsg = self.MAGIC_BYTES[settings['network']]
tmsg += command
tmsg += "\x00" * (12 - len(command))
tmsg += struct.pack("<I", len(data))
if self.ver_send >= 209:
th = sha256(data)
h = sha256(th)
tmsg += h[:4]
tmsg += data
self.sendbuf += tmsg
self.last_sent = time.time()
def got_message(self, message):
print 'got message'
if self.last_sent + 30 * 60 < time.time():
self.send_message(msg_ping())
show_debug_msg("Recv %s" % repr(message))
if message.command == "version":
if message.nVersion >= 209:
self.send_message(msg_verack())
print 'version is greater than 209'
self.ver_send = min(MY_VERSION, message.nVersion)
if message.nVersion < 209:
self.ver_recv = self.ver_send
print 'version is less than 209'
elif message.command == "verack":
self.ver_recv = self.ver_send
elif message.command == "inv":
want = msg_getdata()
for i in message.inv:
if i.type == 1:
want.inv.append(i)
elif i.type == 2:
want.inv.append(i)
if len(want.inv):
self.send_message(want)
elif message.command == "tx":
new_transaction_event(message.tx)
elif message.command == "block":
new_block_event(message.block)
if __name__ == '__main__':
if len(sys.argv) == 2:
f = open(sys.argv[1])
for line in f:
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
settings['port'] = int(settings['port'])
c = NodeConn(settings['host'], settings['port'])
asyncore.loop()
| mit | 1,407,246,570,491,105,300 | 26.551062 | 186 | 0.527011 | false | 3.30439 | false | false | false |
kizniche/Mycodo | mycodo/scripts/restart_daemon.py | 1 | 2603 | # -*- coding: utf-8 -*-
import argparse
import logging
import time
import sys
import os
sys.path.append(os.path.abspath(os.path.join(__file__, "../../..")))
from mycodo.config import DAEMON_PID_FILE
from mycodo.config import KEEPUP_LOG_FILE
def check_daemon(print_msg=True, start_daemon=True):
if os.path.exists(DAEMON_PID_FILE):
with open(DAEMON_PID_FILE, 'r') as pid_file:
if not os.path.exists("/proc/{pid}".format(pid=pid_file.read())):
message = "Daemon is not running, restarting"
logging.info(message)
if print_msg:
print(message)
try:
os.remove(DAEMON_PID_FILE)
if start_daemon:
rcode = os.system('/usr/sbin/service mycodo restart')
if rcode != 0:
logging.error("Unable to execute restart command "
"{}".format(rcode))
except OSError as e:
message = "Unable to remove pid file: {}".format(e)
logging.warning(message)
if print_msg:
print(message)
else:
if print_msg:
message = "Daemon is currently running"
logging.info(message)
print(message)
elif print_msg:
message = "Mycodo previously shut down properly"
logging.info(message)
print(message)
def parseargs(par):
par.add_argument('-c', '--continuouscheck', action='store_true',
help="Continually check if the daemon has crashed and start it")
par.add_argument('-d', '--deletepid', action='store_true',
help="Only delete the PID file if the daemon isn't running. Don't start it.")
return par.parse_args()
if __name__ == '__main__':
log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(filename=KEEPUP_LOG_FILE, format=log_format, level=logging.DEBUG)
parser = argparse.ArgumentParser(
description="Script to check if the Mycodo daemon has crashed and "
"restart it if so.")
args = parseargs(parser)
if args.continuouscheck:
print("Beginning monitor of the Mycodo daemon and start it if it is found to not be running")
while True:
check_daemon(print_msg=False)
time.sleep(30)
elif args.deletepid:
check_daemon(start_daemon=False)
else:
check_daemon()
| gpl-3.0 | 4,466,207,919,963,957,000 | 35.661972 | 101 | 0.551671 | false | 4.151515 | false | false | false |
marinho/PyNFe | pynfe/entidades/cliente.py | 1 | 1189 | # -*- coding: utf-8 -*-
from base import Entidade
from pynfe.utils.flags import TIPOS_DOCUMENTO, CODIGO_BRASIL
class Cliente(Entidade):
# Dados do Cliente
# - Nome/Razão Social (obrigatorio)
razao_social = str()
# - Tipo de Documento (obrigatorio) - default CNPJ - TIPOS_DOCUMENTO
tipo_documento = 'CNPJ'
# - Numero do Documento (obrigatorio)
numero_documento = str()
# - Inscricao Estadual
inscricao_estadual = str()
# - Inscricao SUFRAMA
inscricao_suframa = str()
# - Isento do ICMS (Sim/Nao)
isento_icms = False
# Endereco
# - Logradouro (obrigatorio)
endereco_logradouro = str()
# - Numero (obrigatorio)
endereco_numero = str()
# - Complemento
endereco_complemento = str()
# - Bairro (obrigatorio)
endereco_bairro = str()
# - CEP
endereco_cep = str()
# - Pais (seleciona de lista)
endereco_pais = CODIGO_BRASIL
# - UF (obrigatorio)
endereco_uf = str()
# - Municipio (obrigatorio)
endereco_municipio = str()
# - Telefone
endereco_telefone = str()
def __str__(self):
return ' '.join([self.tipo_documento, self.numero_documento])
| lgpl-3.0 | -1,672,870,928,814,430,500 | 20.6 | 72 | 0.62037 | false | 2.605263 | false | false | false |
samdroid-apps/ManageBacToTheFuture | lib/__init__.py | 1 | 4085 | # ManageBacToTheFuture: ManageBac for Humans
# Copyright (C) 2015 Sam Parkinson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import requests
from datetime import datetime
from bs4 import BeautifulSoup
from dateutil.relativedelta import relativedelta
from files import Files
from message import Messages
from calender import Calender
import errors
def login(username, password):
'''
Logs into ManageBac
Returns a token
Raises:
ManageBacCommunicationException, BadLogin
'''
r = requests.post('https://telopeapark.managebac.com/sessions',
data={'login': username, 'password': password})
if r.ok and r.status_code == 200:
if 'Invalid login or password, please try again.' in r.text:
# I wish managebac was more RESTful
raise errors.BadLogin
else:
return {'_managebac_session': r.cookies['_managebac_session']}
else:
raise errors.ManageBacCommunicationException
class Class():
'''
Represents a class on managebac
'''
def __init__(self, id_, name=None):
self.id_ = id_
self.name = name
def get_files(self, token):
'''
Get the class's files section
Returns :class:`managebac.files.Files`
'''
return Files('https://telopeapark.managebac.com/classes/'
'{}/assets'.format(self.id_), token)
def get_messages(self, token):
'''
Get the class's files section
Returns :class:`managebac.message.Messages`
'''
return Messages('https://telopeapark.managebac.com/classes/'
'{}/messages'.format(self.id_), token)
def get_calender(self, token, start=0, end=3000000000):
'''
Get the class's calender section
Returns :class:`managebac.calender.Calender`
'''
return Calender(self.id_, token, start=start, end=end)
def get_merged(self, token):
fil = self.get_files(token)
msg = self.get_messages(token)
cal = self.get_calender(token)
for m in msg:
if not m.loaded:
m.load(token)
l = fil + msg + cal
# <HACK>
# Naive convertion between tz and non-tz objects
for x in l:
x.time = x.time.replace(tzinfo=None)
# </HACK>
l.sort(key=lambda x: x.time)
l.reverse()
return l
class Classes(list):
'''
Gets and holds a list of :class:`Class` es for a given user
Downloads the classes of the user behind the token.
Raises:
BadToken, ManageBacCommunicationException
'''
def __init__(self, token):
r = requests.get('https://telopeapark.managebac.com/home',
cookies=token)
if r.ok and r.status_code == 200:
soup = BeautifulSoup(r.text)
# Dashboard | Profile | MYP | [Classes] | Groups
menu = soup.find(id='menu').findAll('li')[3]
# The 1st a is just a link to a classes list
for a in menu.findAll('a')[1:]:
self.append(Class(
id_=int(re.search(
'/classes/([0-9]+)', a['href']).group(1)),
name=a.text[len('\nIB MYP\n\n'):].strip('\n')
))
elif r.status_code == 302:
raise errors.BadToken
else:
raise errors.ManageBacCommunicationException
| agpl-3.0 | 4,964,905,154,468,792,000 | 28.601449 | 74 | 0.602448 | false | 4.064677 | false | false | false |
miquelo/exectask | packages/exectask/__main__.py | 1 | 4231 | #!/.../python3
#
# This file is part of EXECTASK.
#
# EXECTASK is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# EXECTASK is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EXECTASK. If not, see <http://www.gnu.org/licenses/>.
#
from exectask.actions import *
from exectask.context import *
from exectask.merge import *
from exectask.printer import *
import argparse
import importlib
import json
import os
import os.path
import sys
import traceback
import types
#
# Main function
#
def main():
# Parse arguments
def argparse_directory(path):
if not os.path.exists(path):
msg = '\'{}\' directory does not exist'.format(path)
raise argparse.ArgumentTypeError(msg)
if not os.path.isdir(path):
msg = '\'{}\' is not a directory'.format(path)
raise argparse.ArgumentTypeError(msg)
if os.path.isabs(path):
return path
return os.path.abspath(path)
parser = argparse.ArgumentParser(
description='Executes tasks in a coordinated manner.'
)
parser.add_argument(
'taskfile',
type=argparse.FileType('r'),
nargs='+',
help='file containing a task definition'
)
parser.add_argument(
'-a',
metavar='actionsdir',
type=argparse_directory,
nargs='+',
required=False,
default=[],
dest='actionsdir',
help='action modules directory'
)
parser.add_argument(
'-s',
metavar='settingsfile',
type=argparse.FileType('r'),
nargs='+',
required=False,
default=[],
dest='settingsfile',
help='file with settings'
)
parser.add_argument(
'-v',
required=False,
action='store_true',
default=False,
dest='verbose',
help='verbose output'
)
parser.add_argument(
'-e',
required=False,
action='store_true',
default=False,
dest='exceptions',
help='show exceptions stack trace'
)
args = parser.parse_args(sys.argv)
# Create printer factory
if args.verbose:
printer_fact_level = 1
else:
printer_fact_level = 0
printer_fact = PrinterFactory(printer_fact_level)
# Create action catalog
actions = {}
catalog = ActionCatalog(actions, printer_fact)
for actionsdir in args.actionsdir:
for fname in os.listdir(actionsdir):
path = '{}/{}'.format(actionsdir, fname)
if os.path.isfile(path) and fname.endswith('.py'):
name = 'exectask.modules.{}'.format(fname[0:len(fname)-3])
action_catalog_load(catalog, printer_fact, name, path)
# Gather top level variables
variables = {}
for settingsfile in args.settingsfile:
try:
merge_dict(variables, json.loads(settingsfile.read()))
except:
printer = printer_fact.printer(sys.stderr)
msg = 'Warning: Could not read settings from'
msg = '{} file \'{}\''.format(msg, settingsfile.name)
printer.print(msg, 0, 'yellow')
# Execute tasks
if len(args.taskfile) > 1:
context = ExecuteTaskContext(actions, printer_fact)
for taskfile in args.taskfile[1:len(args.taskfile)]:
try:
task = json.loads(taskfile.read())
except:
task = None
printer = printer_fact.printer(sys.stderr)
msg = 'Warning: Could not read task from'
msg = '{} file \'{}\''.format(msg, taskfile.name)
printer.print(msg, 0, 'yellow')
if task is not None:
try:
# Define built-in variable 'basedir'
dirname = os.path.dirname(taskfile.name)
basedir = '\'{}\''.format(os.path.abspath(dirname))
variables['basedir'] = basedir
# Execute task
context.execute_task(task, variables)
except BaseException as err:
printer = printer_fact.printer(sys.stderr)
msg = 'Error: There was a problem executing task'
msg = '{} from file \'{}\''.format(msg, taskfile.name)
msg = '{}\nCause: {}'.format(msg, err)
if args.exceptions:
msg = '{}:\n{}'.format(msg, traceback.format_exc())
printer.print(msg, 0, 'red', 'bright')
# Tasks was already executed
return 0
| gpl-3.0 | 5,693,101,122,583,059,000 | 25.778481 | 70 | 0.684708 | false | 3.272235 | false | false | false |
dougbenjamin/panda-harvester | pandaharvester/harvestercore/pilot_errors.py | 2 | 17567 | # Error codes : Taken from pilot1. To be removed once pilot2 API is ready
class PilotErrors(object):
""" Pilot error handling """
# error codes
ERR_UNKNOWNERROR = 0
ERR_GENERALERROR = 1008
ERR_DIRECTIOFILE = 1009 # harmless, just means that copy-to-scratch was skipped in favor or direct i/o access
ERR_GETDATAEXC = 1097
ERR_NOLOCALSPACE = 1098
ERR_STAGEINFAILED = 1099
ERR_REPNOTFOUND = 1100
ERR_LRCREGCONNREF = 1101
ERR_NOSUCHFILE = 1103
ERR_USERDIRTOOLARGE = 1104
ERR_LFCADDCSUMFAILED = 1105
ERR_STDOUTTOOBIG = 1106
ERR_MISSDBREL = 1107
ERR_FAILEDLCGREG = 1108
ERR_CMTCONFIG = 1109
ERR_SETUPFAILURE = 1110
ERR_RUNJOBEXC = 1111
ERR_PILOTEXC = 1112
ERR_GETLFCIMPORT = 1113
ERR_PUTLFCIMPORT = 1114
ERR_NFSSQLITE = 1115
ERR_QUEUEDATA = 1116
ERR_QUEUEDATANOTOK = 1117
ERR_CURLSPACE = 1118
ERR_DDMSPACE = 1119
ERR_NOSTMATCHDEST = 1120 # not used
ERR_NOLFCSFN = 1122
ERR_MISSINGGUID = 1123
ERR_OUTPUTFILETOOLARGE = 1124
ERR_NOPFC = 1130
ERR_PUTFUNCNOCALL = 1131
ERR_LRCREG = 1132
ERR_NOSTORAGE = 1133
ERR_MKDIR = 1134
ERR_FAILEDSIZELOCAL = 1135
ERR_FAILEDMD5LOCAL = 1136
ERR_STAGEOUTFAILED = 1137
ERR_FAILEDSIZE = 1138
ERR_PUTWRONGSIZE = 1139
ERR_FAILEDMD5 = 1140
ERR_PUTMD5MISMATCH = 1141
ERR_CHMODTRF = 1143
ERR_PANDAKILL = 1144
ERR_GETMD5MISMATCH = 1145
ERR_DYNTRFINST = 1146
ERR_FAILEDRM = 1148
ERR_TRFDOWNLOAD = 1149
ERR_LOOPINGJOB = 1150
ERR_GETTIMEOUT = 1151
ERR_PUTTIMEOUT = 1152
ERR_LOSTJOBNOTFINISHED = 1153
ERR_LOSTJOBLOGREG = 1154
ERR_LOSTJOBFILETRANSFER = 1155
ERR_LOSTJOBRECOVERY = 1156
ERR_LOSTJOBMAXEDOUT = 1158
ERR_LOSTJOBPFC = 1159
ERR_LRCREGSTRSIZE = 1160
ERR_LOSTJOBXML = 1161
ERR_LRCREGDUP = 1162
ERR_NOPROXY = 1163
ERR_MISSINGLOCALFILE = 1164
ERR_MISSINGOUTPUTFILE = 1165
ERR_SIGPIPE = 1166
ERR_MISSFILEXML = 1167
ERR_SIZETOOLARGE = 1168
ERR_FAILEDLFCREG = 1169
ERR_FAILEDADLOCAL = 1170
ERR_GETADMISMATCH = 1171
ERR_PUTADMISMATCH = 1172
ERR_PANDAMOVERFILENOTCACHED = 1173
ERR_PANDAMOVERTRANSFER = 1174
ERR_GETWRONGSIZE = 1175
ERR_NOCHILDPROCESSES = 1176
ERR_NOVOMSPROXY = 1177
ERR_NOSTAGEDFILES = 1178
ERR_FAILEDLFCGETREPS = 1179
ERR_GETGLOBUSSYSERR = 1180
ERR_PUTGLOBUSSYSERR = 1181
ERR_FAILEDLFCGETREP = 1182
ERR_GUIDSEXISTSINLRC = 1183
ERR_MISSINGPFC = 1184
ERR_NOSOFTWAREDIR = 1186
ERR_NOPAYLOADMETADATA = 1187
ERR_LCGGETTURLS = 1188
ERR_LCGGETTURLSTIMEOUT = 1189
ERR_LFNTOOLONG = 1190
ERR_ZEROFILESIZE = 1191
ERR_DBRELNOTYETTRANSFERRED = 1192
ERR_SEPROBLEM = 1193
ERR_NOFILEVERIFICATION = 1194
ERR_COMMANDTIMEOUT = 1195
ERR_GETFAILEDTOMOUNTNFS4 = 1196
ERR_GETPNFSSYSTEMERROR = 1197
ERR_MKDIRWORKDIR = 1199
ERR_KILLSIGNAL = 1200
ERR_SIGTERM = 1201
ERR_SIGQUIT = 1202
ERR_SIGSEGV = 1203
ERR_SIGXCPU = 1204
# ERR_USERKILL = 1205 # not used by pilot
ERR_SIGBUS = 1206
ERR_SIGUSR1 = 1207
ERR_NOPAYLOADOUTPUT = 1210
ERR_MISSINGINSTALLATION = 1211
ERR_PAYLOADOUTOFMEMORY = 1212
ERR_REACHEDMAXTIME = 1213
ERR_DAFSNOTALLOWED = 1214
ERR_NOTCPCONNECTION = 1215
ERR_NOPILOTTCPSERVER = 1216
ERR_CORECOUNTMISMATCH = 1217
ERR_RUNEVENTEXC = 1218
ERR_UUIDGEN = 1219
ERR_UNKNOWN = 1220
ERR_FILEEXIST = 1221
ERR_GETKEYPAIR = 1222
ERR_BADALLOC = 1223
ERR_ESRECOVERABLE = 1224
ERR_ESMERGERECOVERABLE = 1225
ERR_GLEXEC = 1226
ERR_ESATHENAMPDIED = 1227
ERR_ESFATAL = 1228
ERR_TEFATAL = 1229
ERR_TEBADURL = 1230
ERR_TEINVALIDGUID = 1231
ERR_TEWRONGGUID = 1232
ERR_TEHOSTNAME = 1233
ERR_EXECUTEDCLONEJOB = 1234
ERR_PAYLOADEXCEEDMAXMEM = 1235
ERR_FAILEDBYSERVER = 1236
ERR_ESKILLEDBYSERVER = 1237
ERR_NOEVENTS = 1238
ERR_OVERSUBSCRIBEDEVENTS = 1239
ERR_ESMESSAGESERVER = 1240
ERR_ESOBJECTSTORESETUP = 1241
ERR_CHKSUMNOTSUP = 1242
ERR_ESPREFETCHERDIED = 1243
ERR_NORELEASEFOUND = 1244
ERR_TOOFEWEVENTS = 1245
# internal error codes
ERR_DDMREG = 1
ERR_FILEONTAPE = 2
pilotError = {
ERR_UNKNOWNERROR : "",
ERR_GENERALERROR : "General pilot error, consult batch log",
ERR_GETDATAEXC : "Get function can not be called for staging input file",
ERR_NOLOCALSPACE : "No space left on local disk",
ERR_STAGEINFAILED : "Get error: Staging input file failed",
ERR_REPNOTFOUND : "Get error: Replica not found",
ERR_LRCREGCONNREF : "LRC registration error: Connection refused",
# 1102 : "Expected output file does not exist", # not used, see ERR_MISSINGOUTPUTFILE below
ERR_NOSUCHFILE : "No such file or directory",
ERR_USERDIRTOOLARGE : "User work directory too large",
ERR_LFCADDCSUMFAILED : "Put error: Failed to add file size and checksum to LFC",
ERR_STDOUTTOOBIG : "Payload stdout file too big",
ERR_MISSDBREL : "Get error: Missing DBRelease file",
ERR_FAILEDLCGREG : "Put error: LCG registration failed",
ERR_CMTCONFIG : "Required CMTCONFIG incompatible with WN",
ERR_SETUPFAILURE : "Failed during setup",
ERR_RUNJOBEXC : "Exception caught by RunJob*",
ERR_PILOTEXC : "Exception caught by pilot",
ERR_GETLFCIMPORT : "Get error: Failed to import LFC python module",
ERR_PUTLFCIMPORT : "Put error: Failed to import LFC python module",
ERR_NFSSQLITE : "NFS SQLite locking problems",
ERR_QUEUEDATA : "Pilot could not download queuedata",
ERR_QUEUEDATANOTOK : "Pilot found non-valid queuedata",
ERR_CURLSPACE : "Pilot could not curl space report",
ERR_DDMSPACE : "Pilot aborted due to DDM space shortage",
ERR_NOSTMATCHDEST : "Space token descriptor does not match destination path",
# 1121 : "Can not read the xml file for registering output files to dispatcher", # not used
ERR_NOLFCSFN : "Bad replica entry returned by lfc_getreplicas(): SFN not set in LFC for this guid",
ERR_MISSINGGUID : "Missing guid in output file list",
ERR_OUTPUTFILETOOLARGE : "Output file too large",
ERR_NOPFC : "Get error: Failed to get PoolFileCatalog",
ERR_PUTFUNCNOCALL : "Put function can not be called for staging out",
ERR_LRCREG : "LRC registration error (consult log file)",
ERR_NOSTORAGE : "Put error: Fetching default storage URL failed",
ERR_MKDIR : "Put error: Error in mkdir on localSE, not allowed or no available space",
ERR_FAILEDSIZELOCAL : "Could not get file size in job workdir",
ERR_FAILEDMD5LOCAL : "Error running md5sum on the file in job workdir",
ERR_STAGEOUTFAILED : "Put error: Error in copying the file from job workdir to localSE",
ERR_FAILEDSIZE : "Put error: could not get the file size on localSE",
ERR_PUTWRONGSIZE : "Put error: Problem with copying from job workdir to local SE: size mismatch",
ERR_FAILEDMD5 : "Put error: Error running md5sum on the file on local SE",
ERR_PUTMD5MISMATCH : "Put error: Problem with copying from job workdir to local SE: md5sum mismatch",
# 1142 : "Put error: failed to register the file on local SE", # not used
ERR_CHMODTRF : "Failed to chmod trf",
ERR_PANDAKILL : "This job was killed by panda server",
ERR_GETMD5MISMATCH : "Get error: md5sum mismatch on input file",
ERR_DYNTRFINST : "Trf installation dir does not exist and could not be installed",
# 1147 : "Put error: dccp returned readOnly", # not used
ERR_FAILEDRM : "Put error: Failed to remove readOnly file in dCache",
ERR_TRFDOWNLOAD : "wget command failed to download trf",
ERR_LOOPINGJOB : "Looping job killed by pilot",
ERR_GETTIMEOUT : "Get error: Input file staging timed out",
ERR_PUTTIMEOUT : "Put error: File copy timed out",
ERR_LOSTJOBNOTFINISHED : "Lost job was not finished",
ERR_LOSTJOBLOGREG : "Failed to register log file",
ERR_LOSTJOBFILETRANSFER : "Failed to move output files for lost job",
ERR_LOSTJOBRECOVERY : "Pilot could not recover job",
# 1157 : "Could not create log file", # not used
ERR_LOSTJOBMAXEDOUT : "Reached maximum number of recovery attempts",
ERR_LOSTJOBPFC : "Job recovery could not read PoolFileCatalog.xml file (guids lost)",
ERR_LRCREGSTRSIZE : "LRC registration error: file name string size exceeded limit of 250",
ERR_LOSTJOBXML : "Job recovery could not generate xml for remaining output files",
ERR_LRCREGDUP : "LRC registration error: Non-unique LFN",
ERR_NOPROXY : "Grid proxy not valid",
ERR_MISSINGLOCALFILE : "Get error: Local input file missing",
ERR_MISSINGOUTPUTFILE : "Put error: Local output file missing",
ERR_SIGPIPE : "Put error: File copy broken by SIGPIPE",
ERR_MISSFILEXML : "Get error: Input file missing in PoolFileCatalog.xml",
ERR_SIZETOOLARGE : "Get error: Total file size too large",
ERR_FAILEDLFCREG : "Put error: File registration failed",
ERR_FAILEDADLOCAL : "Error running adler32 on the file in job workdir",
ERR_GETADMISMATCH : "Get error: adler32 mismatch on input file",
ERR_PUTADMISMATCH : "Put error: adler32 mismatch on output file",
ERR_PANDAMOVERFILENOTCACHED : "PandaMover staging error: File is not cached",
ERR_PANDAMOVERTRANSFER : "PandaMover transfer failure",
ERR_GETWRONGSIZE : "Get error: Problem with copying from local SE to job workdir: size mismatch",
ERR_NOCHILDPROCESSES : "Pilot has no child processes (job wrapper has either crashed or did not send final status)",
ERR_NOVOMSPROXY : "Voms proxy not valid",
ERR_NOSTAGEDFILES : "Get error: No input files are staged",
ERR_FAILEDLFCGETREPS : "Get error: Failed to get replicas",
ERR_GETGLOBUSSYSERR : "Get error: Globus system error",
ERR_PUTGLOBUSSYSERR : "Put error: Globus system error",
ERR_FAILEDLFCGETREP : "Get error: Failed to get replica",
ERR_GUIDSEXISTSINLRC : "LRC registration error: Guid-metadata entry already exists",
ERR_MISSINGPFC : "Put error: PoolFileCatalog could not be found in workdir",
# 1185 : "Put error: Error running adler32 on the file in job workdir", # not used
ERR_NOSOFTWAREDIR : "Software directory does not exist",
ERR_NOPAYLOADMETADATA : "Payload metadata is not available",
ERR_LCGGETTURLS : "lcg-getturls failed",
ERR_LCGGETTURLSTIMEOUT : "lcg-getturls was timed-out",
ERR_LFNTOOLONG : "LFN too long (exceeding limit of 150 characters)",
ERR_ZEROFILESIZE : "Illegal zero file size",
ERR_DBRELNOTYETTRANSFERRED : "DBRelease file has not been transferred yet",
ERR_NOFILEVERIFICATION : "File verification failed",
ERR_COMMANDTIMEOUT : "Command timed out",
ERR_GETFAILEDTOMOUNTNFS4 : "Get error: Failed to mount NSF4",
ERR_GETPNFSSYSTEMERROR : "Get error: PNFS system error",
# 1198 : "Can not check the child process status from the heartbeat process", # not used
ERR_MKDIRWORKDIR : "Could not create directory",
ERR_KILLSIGNAL : "Job terminated by unknown kill signal",
ERR_SIGTERM : "Job killed by signal: SIGTERM",
ERR_SIGQUIT : "Job killed by signal: SIGQUIT",
ERR_SIGSEGV : "Job killed by signal: SIGSEGV",
ERR_SIGXCPU : "Job killed by signal: SIGXCPU",
ERR_SIGUSR1 : "Job killed by signal: SIGUSR1",
ERR_SIGBUS : "Job killed by signal: SIGBUS",
ERR_NOPAYLOADOUTPUT : "No payload output",
ERR_MISSINGINSTALLATION : "Missing installation",
ERR_PAYLOADOUTOFMEMORY : "Payload ran out of memory",
ERR_REACHEDMAXTIME : "Reached batch system time limit",
ERR_DAFSNOTALLOWED : "Site does not allow requested direct access or file stager",
ERR_NOTCPCONNECTION : "Failed to open TCP connection to localhost (worker node network problem)",
ERR_NOPILOTTCPSERVER : "Pilot TCP server has died",
ERR_CORECOUNTMISMATCH : "Mismatch between core count in job and queue definition",
ERR_RUNEVENTEXC : "Exception caught by runEvent",
ERR_UUIDGEN : "uuidgen failed to produce a guid",
ERR_UNKNOWN : "Job failed due to unknown reason (consult log file)",
ERR_FILEEXIST : "File already exist",
ERR_GETKEYPAIR : "Failed to get security key pair",
ERR_BADALLOC : "TRF failed due to bad_alloc",
ERR_ESMERGERECOVERABLE : "Recoverable Event Service Merge error",
ERR_ESRECOVERABLE: "Recoverable Event Service error",
ERR_GLEXEC: "gLExec related error",
ERR_ESATHENAMPDIED: "AthenaMP ended Event Service job prematurely",
ERR_ESFATAL: "Fatal Event Service error",
ERR_TEFATAL: "Fatal Token Extractor error",
ERR_TEHOSTNAME: "Token Extractor error: Host name could not be resolved",
ERR_TEBADURL: "Token Extractor error: Bad URL",
ERR_TEINVALIDGUID: "Token Extractor error: Invalid GUID length",
ERR_TEWRONGGUID: "Token Extractor error: No tokens for this GUID",
ERR_EXECUTEDCLONEJOB: "Already executed clone job",
ERR_PAYLOADEXCEEDMAXMEM: "Payload exceeded maximum allowed memory",
ERR_FAILEDBYSERVER: "Failed by server",
ERR_ESKILLEDBYSERVER: "Event Service job killed by server",
ERR_NOEVENTS: "Event Service no available events",
ERR_OVERSUBSCRIBEDEVENTS: "Event Service over subscribed events",
ERR_ESMESSAGESERVER: "Event service message server error",
ERR_ESOBJECTSTORESETUP: "Event service objectstore setup",
ERR_CHKSUMNOTSUP: "Mover error: query checksum is not supported",
ERR_ESPREFETCHERDIED: "Prefetcher ended Event Service job prematurely",
ERR_NORELEASEFOUND: "No release candidates found",
ERR_TOOFEWEVENTS: "Too few events, less events than minimal requirement",
}
getErrorCodes = [1097, 1099, 1100, 1103, 1107, 1113, 1130, 1145, 1151, 1164, 1167, 1168, 1171, 1175, 1178, 1179, 1180, 1182]
putErrorCodes = [1101, 1114, 1122, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1140, 1141, 1152, 1154, 1155, 1181]
recoverableErrorCodes = [0] + putErrorCodes
# Error codes that will issue a Pilot-controlled resubmission
PilotResubmissionErrorCodes = [1008, 1098, 1099, 1110, 1113, 1114, 1115, 1116, 1117, 1137, 1139, 1151, 1152, 1171, 1172, 1177, 1179, 1180, 1181, 1182, 1188, 1189, 1195, 1196, 1197, 1219]
# Error codes used with FAX fail-over (only an error code in this list will allow FAX fail-over)
PilotFAXErrorCodes = [1103] + PilotResubmissionErrorCodes
def getPilotErrorDiag(self, code=0):
""" Return text corresponding to error code """
pilotErrorDiag = ""
if code in self.pilotError.keys():
pilotErrorDiag = self.pilotError[code]
else:
pilotErrorDiag = "Unknown pilot error code"
return pilotErrorDiag
def isGetErrorCode(self, code=0):
""" Determine whether code is in the put error list or not """
state = False
if code in self.getErrorCodes:
state = True
return state
def isPutErrorCode(self, code=0):
""" Determine whether code is in the put error list or not """
state = False
if code in self.putErrorCodes:
state = True
return state
@classmethod
def isRecoverableErrorCode(self, code=0):
""" Determine whether code is a recoverable error code or not """
return code in self.recoverableErrorCodes
def isPilotResubmissionErrorCode(self, code=0):
""" Determine whether code issues a Pilot-controlled resubmission """
state = False
if code in self.PilotResubmissionErrorCodes:
state = True
return state
def isPilotFAXErrorCode(self, code=0):
""" Determine whether code allows for a FAX fail-over """
state = False
if code in self.PilotFAXErrorCodes:
state = True
return state
@classmethod
def getErrorStr(self, code):
"""
Avoids exception if an error is not in the dictionary.
An empty string is returned if the error is not in the dictionary.
"""
return self.pilotError.get(code, '')
def getErrorName(self, code):
""" From the error code to get the error name"""
for k in self.__class__.__dict__.keys():
if self.__class__.__dict__[k] == code:
return k
return None
class PilotException(Exception):
def __init__(self, message, code=PilotErrors.ERR_GENERALERROR, state='', *args):
self.code = code
self.state = state
self.message = message
super(PilotException, self).__init__(*args)
@property
def code(self):
return self._code
@code.setter
def code(self, code):
self._code = code
self.code_description = PilotErrors.getErrorStr(code)
def __str__(self):
return "%s: %s: %s%s" % (self.__class__.__name__, self.code, self.message, ' : %s' % self.args if self.args else '')
def __repr__(self):
return "%s: %s: %s%s" % (self.__class__.__name__, repr(self.code), repr(self.message), ' : %s' % repr(self.args) if self.args else '')
| apache-2.0 | -2,309,670,009,693,079,600 | 43.473418 | 190 | 0.667729 | false | 3.475173 | false | false | false |
filannim/ManTIME | mantime.py | 1 | 3325 | #!/usr/bin/env python
#
# Copyright 2014 Michele Filannino
#
# gnTEAM, School of Computer Science, University of Manchester.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the GNU General Public License.
#
# author: Michele Filannino
# email: [email protected]
#
# For details, see www.cs.man.ac.uk/~filannim/
import argparse
import codecs
import glob
import logging
import os
from mantime.mantime import ManTIME
from mantime.readers import TempEval3FileReader
from mantime.writers import TempEval3Writer
from mantime.attributes_extractor import FullExtractor
def main():
""" It annotates documents in a specific folder.
"""
logging.basicConfig(format='%(asctime)s: %(message)s',
level=logging.DEBUG,
datefmt='%m/%d/%Y %I:%M:%S %p')
# Parse input
parser = argparse.ArgumentParser(
description='ManTIME: temporal information extraction')
parser.add_argument('mode', choices=['train', 'test'],
help='Train or Test mode?')
parser.add_argument('input_folder', help='Input data folder path')
parser.add_argument('model',
help='Name of the model to use (case sensitive)')
parser.add_argument('-v', '--version', help='show the version and exit',
action='store_true')
parser.add_argument('-ppp', '--post_processing_pipeline',
action='store_true',
help='it uses the post processing pipeline.')
args = parser.parse_args()
# ManTIME
mantime = ManTIME(reader=TempEval3FileReader(),
writer=TempEval3Writer(),
extractor=FullExtractor(),
model_name=args.model,
pipeline=args.post_processing_pipeline)
if args.mode == 'train':
# Training
mantime.train(args.input_folder)
else:
# Testing
assert os.path.exists(args.input_folder), 'Model not found.'
input_files = os.path.join(args.input_folder, '*.*')
documents = sorted(glob.glob(input_files))
assert documents, 'Input folder is empty.'
for index, doc in enumerate(documents, start=1):
basename = os.path.basename(doc)
writein = os.path.join('./output/', basename)
position = '[{}/{}]'.format(index, len(documents))
# if writein not in glob.glob('./output/*.*'):
file_path = '.'.join(writein.split('.')[:-1])
with codecs.open(file_path, 'w', encoding='utf8') as output:
# try:
logging.info('{} Doc {}.'.format(position, basename))
output.write(mantime.label(doc)[0])
logging.info('{} Doc {} annotated.'.format(position,
basename))
# except Exception:
# logging.error('{} Doc {} ** skipped **!'.format(
# position, basename))
# os.remove(file_path)
# else:
# logging.info('{} Doc {} already in output folder.'.format(
# position, basename))
if __name__ == '__main__':
main()
| bsd-2-clause | 6,540,255,438,117,947,000 | 38.583333 | 76 | 0.563609 | false | 4.268293 | false | false | false |
chrox/RealTimeElectrophy | Experimenter/Experiments/script/mangrating.py | 1 | 2018 | # Demo program using ManGrating.
#
# Copyright (C) 2010-2011 Huang Xin
#
# See LICENSE.TXT that came with this file.
"""
USAGE:
Move the mouse cursor to change the position of the grating.
Scroll the mouse wheel to change the orientation.
Press right arrow to increase the spatial frequency.
Press left arrow to decrease the spatial frequency.
Press up arrow to increase the temporal frequency.
...
"""
from __future__ import division
from StimControl.LightStim.Core import DefaultScreen
from StimControl.LightStim.LightData import dictattr
from StimControl.LightStim.FrameControl import FrameSweep
from StimControl.LightStim.ManGrating import ManGrating
# Manual Grating experiment parameters, all must be scalars
DefaultScreen(['control','left','right'])
p = dictattr()
# mask, one of: None, 'gaussian', or 'circle'
p.mask = 'circle'
p.maskSizeStepDeg = 0.5
# initial grating phase
p.phase0 = 0
# grating mean luminance (0-1)
p.ml = 0.5
# grating contrast (0-1)
p.contrast = 1
# background brightness (0-1)
p.bgbrightness = 0
# antialiase the bar?
p.antialiase = True
# flash the grating?
p.flash = False
# duration of each on period (sec)
p.flashduration = 0.5
# duration of each off period (sec)
p.flashinterval = 0.3
# factor to chage bar width and height by left/right/up/down key
p.sizemultiplier = 1.02
# factor to change temporal freq by on up/down
p.tfreqmultiplier = 1.01
# factor to change spatial freq by on left/right
p.sfreqmultiplier = 1.01
# factor to change contrast by on +/-
p.contrastmultiplier = 1.005
# orientation step size to snap to when scrolling mouse wheel (deg)
p.snapDeg = 12
stimulus_control = ManGrating(disp_info=True, params=p, viewport='control')
stimulus_left = ManGrating(disp_info=False, params=p, viewport='left')
stimulus_right = ManGrating(disp_info=False, params=p, viewport='right')
sweep = FrameSweep()
sweep.add_stimulus(stimulus_control)
sweep.add_stimulus(stimulus_left)
sweep.add_stimulus(stimulus_right)
sweep.go() | bsd-2-clause | 2,272,019,656,601,708,800 | 30.546875 | 75 | 0.744301 | false | 3.244373 | false | true | false |
feltus/BDSS | client/client/actions/transfer_action.py | 1 | 6074 | # Big Data Smart Socket
# Copyright (C) 2016 Clemson University
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
import argparse
import logging
import os
import traceback
import tempfile
import requests
from ..config import client_destination, metadata_repository_url, dtn_host, dtn_user, dtn_path
from ..transfer.base import Transfer
from ..transfer.data import run_data_transfer
from ..transfer.mechanisms import available_mechanisms
from ..transfer.reporting import ReportsFile, send_report
from ..util import run_subprocess
cli_help = "Download data file(s)."
logger = logging.getLogger("bdss")
def configure_parser(parser):
input_group = parser.add_mutually_exclusive_group(required=True)
input_group.add_argument("manifest_file",
help="File containing a list of URLs to transfer",
nargs="?",
type=argparse.FileType("r"))
input_group.add_argument("--urls", "-u",
dest="urls",
help="URL(s) of data files to transfer",
metavar="URL",
nargs="+")
parser.add_argument("--destination", "-d",
dest="destination_directory",
default=os.getcwd(),
help="Path to directory to store transferred files in")
parser.add_argument("--dry-run",
action="store_true",
help="Display available sources for files, but do not transfer")
parser.add_argument("--transfer-report", "-t",
dest="report_file",
help="Path to write transfer report to",
type=argparse.FileType("w"))
def output_file_name(url):
return url.partition("?")[0].rpartition("/")[2]
def get_transfers(url, mechanisms):
transfers = []
data = dict(
available_mechanisms=mechanisms,
url=url
)
if client_destination:
data["destination"] = client_destination
logger.info("Requesting transfers for %s" % url)
try:
response = requests.post("%s/transfers" % metadata_repository_url,
data=data,
headers={"Accept": "application/json"})
response = response.json()
transfers = [Transfer(**r) for r in response["transfers"]]
if not transfers:
logger.warn("Received no transfers")
except:
logger.warn("Request for transfers failed")
logger.debug(traceback.format_exc())
# As a last resort, fall back to original URL and its default mechanism
# Defaults are defined in mechanisms/__init__ module
default_transfer = Transfer(url)
if default_transfer not in transfers:
transfers.append(default_transfer)
return transfers
def handle_dtn_action(args, parser, reports_file):
# We want to download into a temporary file.
tf = tempfile.NamedTemporaryFile()
dest_dir = "".join([dtn_path, '/', tf.name])
conn_str = "%s@%s" % (dtn_user, dtn_host)
logger.info("Initiating transfer with DTN: %s", conn_str)
# Download the files using the DTN by calling BDSS on that server instead.
bdss_cmd = " ".join(['bdss', 'transfer', '--urls', " ".join(args.urls), '--destination', dest_dir])
run_subprocess(["ssh", conn_str, bdss_cmd])
# Move the files to where they should be.
logger.info("Copying files from DTN...")
run_subprocess(["scp", "-r", "%s:%s/*" % (conn_str, dest_dir), args.destination_directory])
# Finally delete the files on the remote server
logger.info("Removing from from DTN...")
run_subprocess(["ssh", conn_str, "rm -rf %s" % dest_dir])
def handle_local_action(args, parser, reports_file):
for url in args.urls:
output_path = os.path.abspath(os.path.join(args.destination_directory, output_file_name(url)))
if os.path.isfile(output_path):
logger.warn("File at %s already exists at %s", url, output_path)
continue
transfers = get_transfers(url, available_mechanisms())
logger.info("%d transfer(s) for %s", len(transfers), url)
logger.info("------------------")
for t in transfers:
logger.info(str(t))
if args.dry_run:
continue
transfer_success = False
for t in transfers:
report = run_data_transfer(t, output_path)
if report.success:
transfer_success = True
logger.info("Transfer successful")
logger.debug(report)
send_report(report)
if reports_file:
reports_file.write_report(report)
break
else:
logger.warn("Transfer failed")
send_report(report)
if not transfer_success:
logger.error("Failed to transfer file")
def handle_action(args, parser):
if args.manifest_file:
args.urls = [line.strip() for line in args.manifest_file if line.strip()]
os.makedirs(args.destination_directory, exist_ok=True)
reports_file = ReportsFile(args.report_file) if args.report_file else None
if dtn_host:
handle_dtn_action(args, parser, reports_file)
else:
handle_local_action(args, parser, reports_file)
| gpl-2.0 | 3,516,957,681,346,837,000 | 33.511364 | 103 | 0.611129 | false | 4.143247 | false | false | false |
wangyum/tensorflow | tensorflow/contrib/session_bundle/gc.py | 47 | 6397 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""System for specifying garbage collection (GC) of path based data.
This framework allows for GC of data specified by path names, for example files
on disk. gc.Path objects each represent a single item stored at a path and may
be a base directory,
/tmp/exports/0/...
/tmp/exports/1/...
...
or a fully qualified file,
/tmp/train-1.ckpt
/tmp/train-2.ckpt
...
A gc filter function takes and returns a list of gc.Path items. Filter
functions are responsible for selecting Path items for preservation or deletion.
Note that functions should always return a sorted list.
For example,
base_dir = "/tmp"
# create the directories
for e in xrange(10):
os.mkdir("%s/%d" % (base_dir, e), 0o755)
# create a simple parser that pulls the export_version from the directory
def parser(path):
match = re.match("^" + base_dir + "/(\\d+)$", path.path)
if not match:
return None
return path._replace(export_version=int(match.group(1)))
path_list = gc.get_paths("/tmp", parser) # contains all ten Paths
every_fifth = gc.mod_export_version(5)
print every_fifth(path_list) # shows ["/tmp/0", "/tmp/5"]
largest_three = gc.largest_export_versions(3)
print largest_three(all_paths) # shows ["/tmp/7", "/tmp/8", "/tmp/9"]
both = gc.union(every_fifth, largest_three)
print both(all_paths) # shows ["/tmp/0", "/tmp/5",
# "/tmp/7", "/tmp/8", "/tmp/9"]
# delete everything not in 'both'
to_delete = gc.negation(both)
for p in to_delete(all_paths):
gfile.DeleteRecursively(p.path) # deletes: "/tmp/1", "/tmp/2",
# "/tmp/3", "/tmp/4", "/tmp/6",
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import heapq
import math
import os
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.platform import gfile
from tensorflow.python.util.deprecation import deprecated
Path = collections.namedtuple('Path', 'path export_version')
@deprecated('2017-06-30', 'Please use SavedModel instead.')
def largest_export_versions(n):
"""Creates a filter that keeps the largest n export versions.
Args:
n: number of versions to keep.
Returns:
A filter function that keeps the n largest paths.
"""
def keep(paths):
heap = []
for idx, path in enumerate(paths):
if path.export_version is not None:
heapq.heappush(heap, (path.export_version, idx))
keepers = [paths[i] for _, i in heapq.nlargest(n, heap)]
return sorted(keepers)
return keep
@deprecated('2017-06-30', 'Please use SavedModel instead.')
def one_of_every_n_export_versions(n):
r"""Creates a filter that keeps one of every n export versions.
Args:
n: interval size.
Returns:
A filter function that keeps exactly one path from each interval
[0, n], (n, 2n], (2n, 3n], etc... If more than one path exists in an
interval the largest is kept.
"""
def keep(paths):
keeper_map = {} # map from interval to largest path seen in that interval
for p in paths:
if p.export_version is None:
# Skip missing export_versions.
continue
# Find the interval (with a special case to map export_version = 0 to
# interval 0.
interval = math.floor(
(p.export_version - 1) / n) if p.export_version else 0
existing = keeper_map.get(interval, None)
if (not existing) or (existing.export_version < p.export_version):
keeper_map[interval] = p
return sorted(keeper_map.values())
return keep
@deprecated('2017-06-30', 'Please use SavedModel instead.')
def mod_export_version(n):
"""Creates a filter that keeps every export that is a multiple of n.
Args:
n: step size.
Returns:
A filter function that keeps paths where export_version % n == 0.
"""
def keep(paths):
keepers = []
for p in paths:
if p.export_version % n == 0:
keepers.append(p)
return sorted(keepers)
return keep
@deprecated('2017-06-30', 'Please use SavedModel instead.')
def union(lf, rf):
"""Creates a filter that keeps the union of two filters.
Args:
lf: first filter
rf: second filter
Returns:
A filter function that keeps the n largest paths.
"""
def keep(paths):
l = set(lf(paths))
r = set(rf(paths))
return sorted(list(l|r))
return keep
@deprecated('2017-06-30', 'Please use SavedModel instead.')
def negation(f):
"""Negate a filter.
Args:
f: filter function to invert
Returns:
A filter function that returns the negation of f.
"""
def keep(paths):
l = set(paths)
r = set(f(paths))
return sorted(list(l-r))
return keep
@deprecated('2017-06-30', 'Please use SavedModel instead.')
def get_paths(base_dir, parser):
"""Gets a list of Paths in a given directory.
Args:
base_dir: directory.
parser: a function which gets the raw Path and can augment it with
information such as the export_version, or ignore the path by returning
None. An example parser may extract the export version from a path
such as "/tmp/exports/100" an another may extract from a full file
name such as "/tmp/checkpoint-99.out".
Returns:
A list of Paths contained in the base directory with the parsing function
applied.
By default the following fields are populated,
- Path.path
The parsing function is responsible for populating,
- Path.export_version
"""
raw_paths = gfile.ListDirectory(base_dir)
paths = []
for r in raw_paths:
p = parser(Path(os.path.join(base_dir, r), None))
if p:
paths.append(p)
return sorted(paths)
| apache-2.0 | -2,593,996,059,534,440,400 | 29.317536 | 80 | 0.663436 | false | 3.670109 | false | false | false |
ohadshacham/phoenix | bin/queryserver.py | 2 | 7815 | #!/usr/bin/env python
############################################################################
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
############################################################################
#
# Script to handle launching the query server process.
#
# usage: queryserver.py [start|stop|makeWinServiceDesc] [-Dhadoop=configs]
#
import datetime
import getpass
import os
import os.path
import signal
import subprocess
import sys
import tempfile
try:
import daemon
daemon_supported = True
except ImportError:
# daemon script not supported on some platforms (windows?)
daemon_supported = False
import phoenix_utils
phoenix_utils.setPath()
command = None
args = sys.argv
if len(args) > 1:
if args[1] == 'start':
command = 'start'
elif args[1] == 'stop':
command = 'stop'
elif args[1] == 'makeWinServiceDesc':
command = 'makeWinServiceDesc'
if command:
# Pull off queryserver.py and the command
args = args[2:]
else:
# Just pull off queryserver.py
args = args[1:]
if os.name == 'nt':
args = subprocess.list2cmdline(args)
else:
import pipes # pipes module isn't available on Windows
args = " ".join([pipes.quote(v) for v in args])
# HBase configuration folder path (where hbase-site.xml reside) for
# HBase/Phoenix client side property override
hbase_config_path = phoenix_utils.hbase_conf_dir
hadoop_config_path = phoenix_utils.hadoop_conf
hadoop_classpath = phoenix_utils.hadoop_classpath
# TODO: add windows support
phoenix_file_basename = 'phoenix-%s-queryserver' % getpass.getuser()
phoenix_log_file = '%s.log' % phoenix_file_basename
phoenix_out_file = '%s.out' % phoenix_file_basename
phoenix_pid_file = '%s.pid' % phoenix_file_basename
# load hbase-env.??? to extract JAVA_HOME, HBASE_PID_DIR, HBASE_LOG_DIR
hbase_env_path = None
hbase_env_cmd = None
if os.name == 'posix':
hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.sh')
hbase_env_cmd = ['bash', '-c', 'source %s && env' % hbase_env_path]
elif os.name == 'nt':
hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.cmd')
hbase_env_cmd = ['cmd.exe', '/c', 'call %s & set' % hbase_env_path]
if not hbase_env_path or not hbase_env_cmd:
print >> sys.stderr, "hbase-env file unknown on platform %s" % os.name
sys.exit(-1)
hbase_env = {}
if os.path.isfile(hbase_env_path):
p = subprocess.Popen(hbase_env_cmd, stdout = subprocess.PIPE)
for x in p.stdout:
(k, _, v) = x.partition('=')
hbase_env[k.strip()] = v.strip()
java_home = hbase_env.get('JAVA_HOME') or os.getenv('JAVA_HOME')
if java_home:
java = os.path.join(java_home, 'bin', 'java')
else:
java = 'java'
tmp_dir = os.path.join(tempfile.gettempdir(), 'phoenix')
opts = os.getenv('PHOENIX_QUERYSERVER_OPTS') or hbase_env.get('PHOENIX_QUERYSERVER_OPTS') or ''
pid_dir = os.getenv('PHOENIX_QUERYSERVER_PID_DIR') or hbase_env.get('HBASE_PID_DIR') or tmp_dir
log_dir = os.getenv('PHOENIX_QUERYSERVER_LOG_DIR') or hbase_env.get('HBASE_LOG_DIR') or tmp_dir
pid_file_path = os.path.join(pid_dir, phoenix_pid_file)
log_file_path = os.path.join(log_dir, phoenix_log_file)
out_file_path = os.path.join(log_dir, phoenix_out_file)
# " -Xdebug -Xrunjdwp:transport=dt_socket,address=5005,server=y,suspend=n " + \
# " -XX:+UnlockCommercialFeatures -XX:+FlightRecorder -XX:FlightRecorderOptions=defaultrecording=true,dumponexit=true" + \
# The command is run through subprocess so environment variables are automatically inherited
java_cmd = '%(java)s -cp ' + hbase_config_path + os.pathsep + hadoop_config_path + os.pathsep + \
phoenix_utils.phoenix_client_jar + os.pathsep + phoenix_utils.phoenix_loadbalancer_jar + \
os.pathsep + phoenix_utils.phoenix_queryserver_jar + os.pathsep + hadoop_classpath + \
" -Dproc_phoenixserver" + \
" -Dlog4j.configuration=file:" + os.path.join(phoenix_utils.current_dir, "log4j.properties") + \
" -Dpsql.root.logger=%(root_logger)s" + \
" -Dpsql.log.dir=%(log_dir)s" + \
" -Dpsql.log.file=%(log_file)s" + \
" " + opts + \
" org.apache.phoenix.queryserver.server.QueryServer " + args
if command == 'makeWinServiceDesc':
cmd = java_cmd % {'java': java, 'root_logger': 'INFO,DRFA,console', 'log_dir': log_dir, 'log_file': phoenix_log_file}
slices = cmd.split(' ')
print "<service>"
print " <id>queryserver</id>"
print " <name>Phoenix Query Server</name>"
print " <description>This service runs the Phoenix Query Server.</description>"
print " <executable>%s</executable>" % slices[0]
print " <arguments>%s</arguments>" % ' '.join(slices[1:])
print "</service>"
sys.exit()
if command == 'start':
if not daemon_supported:
print >> sys.stderr, "daemon mode not supported on this platform"
sys.exit(-1)
# run in the background
d = os.path.dirname(out_file_path)
if not os.path.exists(d):
os.makedirs(d)
with open(out_file_path, 'a+') as out:
context = daemon.DaemonContext(
pidfile = daemon.PidFile(pid_file_path, 'Query Server already running, PID file found: %s' % pid_file_path),
stdout = out,
stderr = out,
)
print 'starting Query Server, logging to %s' % log_file_path
with context:
# this block is the main() for the forked daemon process
child = None
cmd = java_cmd % {'java': java, 'root_logger': 'INFO,DRFA', 'log_dir': log_dir, 'log_file': phoenix_log_file}
# notify the child when we're killed
def handler(signum, frame):
if child:
child.send_signal(signum)
sys.exit(0)
signal.signal(signal.SIGTERM, handler)
print '%s launching %s' % (datetime.datetime.now(), cmd)
child = subprocess.Popen(cmd.split())
sys.exit(child.wait())
elif command == 'stop':
if not daemon_supported:
print >> sys.stderr, "daemon mode not supported on this platform"
sys.exit(-1)
if not os.path.exists(pid_file_path):
print >> sys.stderr, "no Query Server to stop because PID file not found, %s" % pid_file_path
sys.exit(0)
if not os.path.isfile(pid_file_path):
print >> sys.stderr, "PID path exists but is not a file! %s" % pid_file_path
sys.exit(1)
pid = None
with open(pid_file_path, 'r') as p:
pid = int(p.read())
if not pid:
sys.exit("cannot read PID file, %s" % pid_file_path)
print "stopping Query Server pid %s" % pid
with open(out_file_path, 'a+') as out:
print >> out, "%s terminating Query Server" % datetime.datetime.now()
os.kill(pid, signal.SIGTERM)
else:
# run in the foreground using defaults from log4j.properties
cmd = java_cmd % {'java': java, 'root_logger': 'INFO,console', 'log_dir': '.', 'log_file': 'psql.log'}
# Because shell=True is not set, we don't have to alter the environment
child = subprocess.Popen(cmd.split())
sys.exit(child.wait())
| apache-2.0 | -2,199,263,458,977,220,900 | 36.753623 | 125 | 0.64517 | false | 3.216049 | true | false | false |
Fat-Zer/FreeCAD_sf_master | src/Mod/TechDraw/TDTest/DVSectionTest.py | 27 | 1841 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# test script for TechDraw module
# creates a page, 1 view and 1 section view
from __future__ import print_function
import FreeCAD
import Part
import Measure
import TechDraw
import os
def DVSectionTest():
path = os.path.dirname(os.path.abspath(__file__))
print ('TDSection path: ' + path)
templateFileSpec = path + '/TestTemplate.svg'
FreeCAD.newDocument("TDSection")
FreeCAD.setActiveDocument("TDSection")
FreeCAD.ActiveDocument=FreeCAD.getDocument("TDSection")
box = FreeCAD.ActiveDocument.addObject("Part::Box","Box")
page = FreeCAD.ActiveDocument.addObject('TechDraw::DrawPage','Page')
FreeCAD.ActiveDocument.addObject('TechDraw::DrawSVGTemplate','Template')
FreeCAD.ActiveDocument.Template.Template = templateFileSpec
FreeCAD.ActiveDocument.Page.Template = FreeCAD.ActiveDocument.Template
page.Scale = 5.0
# page.ViewObject.show() # unit tests run in console mode
print("page created")
view = FreeCAD.ActiveDocument.addObject('TechDraw::DrawViewPart','View')
rc = page.addView(view)
view.Source = [box]
view.Direction = (0.0,0.0,1.0)
view.Rotation = 0.0
view.X = 30.0
view.Y = 150.0
print("view created")
section = FreeCAD.ActiveDocument.addObject('TechDraw::DrawViewSection','Section')
rc = page.addView(section)
section.Source = [box]
section.BaseView = view
section.Direction = (0.0,1.0,0.0)
section.SectionNormal = (0.0,1.0,0.0)
section.SectionOrigin = (5.0,5.0,5.0)
view.touch()
print("section created")
FreeCAD.ActiveDocument.recompute()
rc = False
if ("Up-to-date" in view.State) and ("Up-to-date" in section.State):
rc = True
FreeCAD.closeDocument("TDSection")
return rc
if __name__ == '__main__':
DVSectionTest()
| lgpl-2.1 | -8,766,120,630,367,836,000 | 29.683333 | 85 | 0.682238 | false | 3.421933 | false | false | false |
andela-kndungu/ponycheckup | ponycheckup/check/models.py | 3 | 2407 | from django.db import models
from django_extensions.db.models import TimeStampedModel
class Check(TimeStampedModel):
url = models.URLField()
no_of_recommendations = models.IntegerField(default=0)
runs_debug = models.BooleanField()
supports_https = models.BooleanField()
heartbleed_vuln = models.BooleanField()
hsts_header_found = models.BooleanField()
xframe_header_found = models.BooleanField()
admin_found = models.BooleanField()
admin_forces_https = models.NullBooleanField()
login_found = models.BooleanField()
login_forces_https = models.NullBooleanField()
allows_trace = models.BooleanField()
csrf_cookie_found = models.BooleanField()
session_cookie_found = models.BooleanField()
session_cookie_secure = models.NullBooleanField()
session_cookie_httponly = models.NullBooleanField()
def update_recommendation_count(self):
self.no_of_recommendations = 0
if self.runs_debug: self.no_of_recommendations += 1
if not self.supports_https: self.no_of_recommendations += 1
if self.heartbleed_vuln: self.no_of_recommendations += 1
if not self.hsts_header_found: self.no_of_recommendations += 1
if not self.xframe_header_found: self.no_of_recommendations += 1
if self.admin_found and not self.admin_forces_https: self.no_of_recommendations += 1
if self.login_found and not self.login_forces_https: self.no_of_recommendations += 1
if self.allows_trace: self.no_of_recommendations += 1
#if not self.csrf_cookie_found: self.no_of_recommendations += 1
if self.session_cookie_found and not self.session_cookie_secure: self.no_of_recommendations += 1
if self.session_cookie_found and not self.session_cookie_httponly: self.no_of_recommendations += 1
@property
def secure_percentage(self):
# worst is 10, best is 0
return int(100-round(10*self.no_of_recommendations))
@property
def proven_django(self):
return self.runs_debug or self.csrf_cookie_found or self.session_cookie_found or self.admin_found
| bsd-3-clause | -3,438,242,315,357,026,000 | 45.288462 | 106 | 0.619028 | false | 3.985099 | false | false | false |
intangere/NewHope_X25519_XSalsa20_Poly1305 | example.py | 1 | 2711 | from nhxpoly.nhxpoly import NewHopeXSPolyBox
from newhope import newhope
from hashlib import sha256
"""This file shows a test run of
the NewHope_X25519_XSalsa20_Poly1305
key exchange protocol"""
user1 = NewHopeXSPolyBox()
user1.name = 'Alice'
user1.genPrekeyChunk()
user1.log('Prekey chunk created: %s' % repr([x for x in user1.prekey_chunk_1]))
user2 = NewHopeXSPolyBox()
user2.name = 'Bob'
user2.createSealedBox()
user1.setSenderPubKey(user2.pk)
user2.log('Created a X25519_XSalsa20_Poly1305 sealed box and sent public key to %s' % user1.name)
user1.encryptPrekeyChunk()
user1.log('Encrypted prekey chunk using the recieved X25519 Public Key via unidentifiable authentication.')
user2.enc_prekey_chunk_1 = user1.enc_prekey_chunk_1
user2.openSealedBox()
user2.log('First prekey chunk decrypted %s' % repr([x for x in user2.prekey_chunk_1]))
user1.genSeed()
user2.genSeed()
user1.initNewHope()
user1.genCommitmentHash()
user2.recv_commitment = user1.commitment #user1 sends commitment hash
user2.log('NewHope Seed commitment received with hash %s' % user2.recv_commitment)
user2.recv_message = user1.message
user2.recv_seed = user1.seed #user1 sends (reveals) seed
user2.verifyCommitment()
user2.log('Seed verification succeded. Authentic NewHope message recieved')
user2.message = newhope.sharedb(user1.message)
user1.log('NewHope initial authenticated message exchanged')
user2.genCommitmentHash()
user1.recv_commitment = user2.commitment #User2 sends commitment
user1.log('NewHope Seed commitment received with hash %s' % user1.recv_commitment)
user1.recv_message = user2.message
user1.recv_seed = user2.seed
user1.verifyCommitment()
user1.log('Seed verification succeded. Authentic NewHope message recieved')
user1.message = newhope.shareda(user2.message)
user1.log('NewHope final authenticated message exchanged')
user2.createNewHopeSharedKeyb()
user1.createNewHopeSharedKeya()
assert user2.shared_newhope_key == user1.shared_newhope_key
user2.log('NewHope shared key %s' % user2.shared_newhope_key)
user1.log('NewHope shared key %s' % user1.shared_newhope_key)
user2.combine_prekey_chunks()
user1.combine_prekey_chunks()
user2.log('Combined prekey %s' % user2.prekey)
user1.log('Combined prekey %s' % user1.prekey)
print('NewHope_X25519_XSalsa20_Poly1305 key exchange successful.')
print('Pre-key ready for key derivation..')
#Very simple possible key-derivation...
def derive(prekey):
prekey = ''.join([chr(_) for _ in prekey])
prekey = prekey.encode()
return sha256(prekey).hexdigest()
user1.log('Derived key:' + derive(user1.prekey))
user2.log('Derived key:' + derive(user2.prekey))
assert derive(user1.prekey) == derive(user2.prekey)
print('Derived keys match!')
| mit | 6,902,199,356,694,869,000 | 30.16092 | 107 | 0.772409 | false | 2.806418 | false | false | false |
scipy/scipy-svn | scipy/linalg/interface_gen.py | 11 | 6803 | #!/usr/bin/env python
import os
import re
from distutils.dir_util import mkpath
def all_subroutines(interface_in):
# remove comments
comment_block_exp = re.compile(r'/\*(?:\s|.)*?\*/')
subroutine_exp = re.compile(r'subroutine (?:\s|.)*?end subroutine.*')
function_exp = re.compile(r'function (?:\s|.)*?end function.*')
interface = comment_block_exp.sub('',interface_in)
subroutine_list = subroutine_exp.findall(interface)
function_list = function_exp.findall(interface)
subroutine_list = subroutine_list + function_list
subroutine_list = map(lambda x: x.strip(),subroutine_list)
return subroutine_list
def real_convert(val_string):
return val_string
def complex_convert(val_string):
return '(' + val_string + ',0.)'
def convert_types(interface_in,converter):
regexp = re.compile(r'<type_convert=(.*?)>')
interface = interface_in[:]
while 1:
sub = regexp.search(interface)
if sub is None: break
converted = converter(sub.group(1))
interface = interface.replace(sub.group(),converted)
return interface
def generic_expand(generic_interface,skip_names=[]):
generic_types ={'s' :('real', 'real', real_convert,
'real'),
'd' :('double precision','double precision',real_convert,
'double precision'),
'c' :('complex', 'complex',complex_convert,
'real'),
'z' :('double complex', 'double complex',complex_convert,
'double precision'),
'cs':('complex', 'real',complex_convert,
'real'),
'zd':('double complex', 'double precision',complex_convert,
'double precision'),
'sc':('real', 'complex',real_convert,
'real'),
'dz':('double precision','double complex', real_convert,
'double precision')}
generic_c_types = {'real':'float',
'double precision':'double',
'complex':'complex_float',
'double complex':'complex_double'}
# cc_types is specific in ATLAS C BLAS, in particular, for complex arguments
generic_cc_types = {'real':'float',
'double precision':'double',
'complex':'void',
'double complex':'void'}
#2. get all subroutines
subs = all_subroutines(generic_interface)
print len(subs)
#loop through the subs
type_exp = re.compile(r'<tchar=(.*?)>')
TYPE_EXP = re.compile(r'<TCHAR=(.*?)>')
routine_name = re.compile(r'(subroutine|function)\s*(?P<name>\w+)\s*\(')
interface = ''
for sub in subs:
#3. Find the typecodes to use:
m = type_exp.search(sub)
if m is None:
interface = interface + '\n\n' + sub
continue
type_chars = m.group(1)
# get rid of spaces
type_chars = type_chars.replace(' ','')
# get a list of the characters (or character pairs)
type_chars = type_chars.split(',')
# Now get rid of the special tag that contained the types
sub = re.sub(type_exp,'<tchar>',sub)
m = TYPE_EXP.search(sub)
if m is not None:
sub = re.sub(TYPE_EXP,'<TCHAR>',sub)
sub_generic = sub.strip()
for char in type_chars:
type_in,type_out,converter, rtype_in = generic_types[char]
sub = convert_types(sub_generic,converter)
function_def = sub.replace('<tchar>',char)
function_def = function_def.replace('<TCHAR>',char.upper())
function_def = function_def.replace('<type_in>',type_in)
function_def = function_def.replace('<type_in_c>',
generic_c_types[type_in])
function_def = function_def.replace('<type_in_cc>',
generic_cc_types[type_in])
function_def = function_def.replace('<rtype_in>',rtype_in)
function_def = function_def.replace('<rtype_in_c>',
generic_c_types[rtype_in])
function_def = function_def.replace('<type_out>',type_out)
function_def = function_def.replace('<type_out_c>',
generic_c_types[type_out])
m = routine_name.match(function_def)
if m:
if m.group('name') in skip_names:
print 'Skipping',m.group('name')
continue
else:
print 'Possible bug: Failed to determine routines name'
interface = interface + '\n\n' + function_def
return interface
#def interface_to_module(interface_in,module_name,include_list,sdir='.'):
def interface_to_module(interface_in,module_name):
pre_prefix = "!%f90 -*- f90 -*-\n"
# heading and tail of the module definition.
file_prefix = "\npython module " + module_name +" ! in\n" \
"!usercode '''#include \"cblas.h\"\n"\
"!'''\n"\
" interface \n"
file_suffix = "\n end interface\n" \
"end module %s" % module_name
return pre_prefix + file_prefix + interface_in + file_suffix
def process_includes(interface_in,sdir='.'):
include_exp = re.compile(r'\n\s*[^!]\s*<include_file=(.*?)>')
include_files = include_exp.findall(interface_in)
for filename in include_files:
f = open(os.path.join(sdir,filename))
interface_in = interface_in.replace('<include_file=%s>'%filename,
f.read())
f.close()
return interface_in
def generate_interface(module_name,src_file,target_file,skip_names=[]):
print "generating",module_name,"interface"
f = open(src_file)
generic_interface = f.read()
f.close()
sdir = os.path.dirname(src_file)
generic_interface = process_includes(generic_interface,sdir)
generic_interface = generic_expand(generic_interface,skip_names)
module_def = interface_to_module(generic_interface,module_name)
mkpath(os.path.dirname(target_file))
f = open(target_file,'w')
user_routines = os.path.join(sdir,module_name+"_user_routines.pyf")
if os.path.exists(user_routines):
f2 = open(user_routines)
f.write(f2.read())
f2.close()
f.write(module_def)
f.close()
def process_all():
# process the standard files.
for name in ['fblas','cblas','clapack','flapack']:
generate_interface(name,'generic_%s.pyf'%(name),name+'.pyf')
if __name__ == "__main__":
process_all()
| bsd-3-clause | -5,580,993,742,058,274,000 | 40.993827 | 80 | 0.550933 | false | 3.885208 | false | false | false |
zhoupeiling/Colibri | usernameless/urls.py | 1 | 4083 | """
Mostly a copy of (django-registration/)registration/backends/default/urls.py and
registration/auth_urls.py, but using our own backends:
* UsernameLessAuthenticationBackend for authentication
* UsernameLessRegistrationBackend for registration
"""
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from django.contrib.sites.models import Site
from django.contrib.auth import views as auth_views
from registration.views import activate
from registration.views import register
from forms import UsernameLessAuthenticationForm
auth_urls = patterns('',
url(r'^login/$',
auth_views.login,
{'template_name': 'registration/login.html',
'authentication_form': UsernameLessAuthenticationForm, },
name='auth_login'),
url(r'^logout/$',
auth_views.logout,
{'template_name': 'registration/logout.html'},
name='auth_logout'),
url(r'^password/change/$',
auth_views.password_change,
name='auth_password_change'),
url(r'^password/change/done/$',
auth_views.password_change_done,
name='auth_password_change_done'),
url(r'^password/reset/$',
auth_views.password_reset,
name='auth_password_reset'),
url(r'^password/reset/confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
auth_views.password_reset_confirm,
name='auth_password_reset_confirm'),
url(r'^password/reset/complete/$',
auth_views.password_reset_complete,
name='auth_password_reset_complete'),
url(r'^password/reset/done/$',
auth_views.password_reset_done,
name='auth_password_reset_done'),
)
urlpatterns = patterns('',
url(r'^activate/complete/$',
direct_to_template,
{
'template': 'registration/activation_complete.html',
'extra_context': { 'site' : Site.objects.get_current() },
}, name='registration_activation_complete'),
# Activation keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]{40} because a bad activation key should still get to the view;
# that way it can return a sensible "invalid key" message instead of a
# confusing 404.
url(r'^activate/(?P<activation_key>\w+)/$',
activate,
{'backend': 'usernameless.auth.UsernameLessRegistrationBackend'},
name='registration_activate'),
url(r'^register/$',
register,
{'backend': 'usernameless.auth.UsernameLessRegistrationBackend'},
name='registration_register'),
url(r'^register/complete/$',
direct_to_template,
{
'template': 'registration/registration_complete.html',
'extra_context': { 'site' : Site.objects.get_current() },
}, name='registration_complete'),
url(r'^register/closed/$',
direct_to_template,
{'template': 'registration/registration_closed.html'},
name='registration_disallowed'),
(r'', include(auth_urls)),
)
| gpl-3.0 | 8,162,398,747,961,945,000 | 50.0375 | 99 | 0.478815 | false | 5.495289 | false | false | false |
zookeepr/zookeepr | zk/model/rego_note.py | 5 | 1592 | import sqlalchemy as sa
from meta import Base
from meta import Session
from lib.model import CommaList
from person import Person
from registration import Registration
from registration_product import RegistrationProduct
class RegoNote(Base):
"""Misc notes from the organising team on a person
"""
__tablename__ = 'rego_note'
id = sa.Column(sa.types.Integer, primary_key=True)
rego_id = sa.Column(sa.types.Integer, sa.ForeignKey('registration.id'))
note = sa.Column(sa.types.Text)
block = sa.Column(sa.types.Boolean, nullable=False)
by_id = sa.Column(sa.types.Integer, sa.ForeignKey('person.id'), nullable=False)
creation_timestamp = sa.Column(sa.types.DateTime, nullable=False, default=sa.func.current_timestamp())
last_modification_timestamp = sa.Column(sa.types.DateTime, nullable=False, default=sa.func.current_timestamp(), onupdate=sa.func.current_timestamp())
# relations
by = sa.orm.relation(Person, backref=sa.orm.backref('notes_made', cascade="all, delete-orphan", lazy=True))
rego = sa.orm.relation(Registration, backref=sa.orm.backref('notes', cascade="all, delete-orphan", lazy=True))
def __init__(self, **kwargs):
super(RegoNote, self).__init__(**kwargs)
@classmethod
def find_by_id(cls, id, abort_404 = True):
result = Session.query(RegoNote).filter_by(id=id).first()
if result is None and abort_404:
abort(404, "No such rego note object")
return result
@classmethod
def find_all(cls):
return Session.query(RegoNote).order_by(RegoNote.id).all()
| gpl-2.0 | -7,972,538,946,433,576,000 | 36.904762 | 153 | 0.697864 | false | 3.537778 | false | false | false |
xiaocong/github-timeline | ghdata/fetch.py | 1 | 8836 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import os.path
import re
import json
import requests
import shutil
import gzip
from tempfile import NamedTemporaryFile
import StringIO
from datetime import date
from collections import defaultdict
from .db import redis, mongodb, pipe as _pipe, format_key as _format
# The URL template for the GitHub Archive.
archive_url = ("http://data.githubarchive.org/"
"{year}-{month:02d}-{day:02d}-{hour}.json.gz")
local_url = "./data/{year}-{month:02d}-{day:02d}-{hour}.json.gz"
date_re = re.compile(r"([0-9]{4})-([0-9]{2})-([0-9]{2})-([0-9]+)\.json.gz")
# mkdir data directory
os.path.exists('./data') or os.mkdir('./data')
def fetch_one(year, month, day, hour):
'''Fecch one archived timeline.'''
local_fn = local_url.format(year=year, month=month, day=day, hour=hour)
if os.path.exists(local_fn):
print '%s exists.' % local_fn
return local_fn
else:
url = archive_url.format(year=year, month=month, day=day, hour=hour)
r = None
try:
r = requests.get(url, timeout=120)
if r.status_code == 200:
f = NamedTemporaryFile("wb", delete=False)
f.write(r.content)
f.flush()
os.fsync(f.fileno())
f.close()
shutil.move(f.name, local_fn)
print("Fetching %s successded." % url)
return local_fn
else:
print("Fetching %s failed." % url)
except:
return None
finally:
if r is not None:
r.close()
return None
def _gen_json(buf):
line = buf.readline()
while line:
try:
yield json.loads(line)
except Exception as e:
print "Error during load json: %s" % e
line = buf.readline()
def file_process(filename, fns):
if not filename or not os.path.exists(filename):
return
fns = fns if type(fns) is list else [fns]
year, month, day, hour = map(int, date_re.findall(filename)[0])
r = redis()
repl = lambda m: ''
for fn in fns:
print('Processing %s with %s' % (filename, fn.__name__))
fn_key = _format('function:%s' % fn.__name__)
fn_value = "{year}-{month:02d}-{day:02d}-{hour}".format(
year=year, month=month, day=day, hour=hour
)
if not r.sismember(fn_key, fn_value):
with gzip.GzipFile(filename) as f:
content = f.read().decode("utf-8", errors="ignore")
content = re.sub(u"[^\\}]([\\n\\r\u2028\u2029]+)[^\\{]", repl, content)
content = '}\n{"'.join(content.split('}{"'))
buf = StringIO.StringIO(content)
fn(_gen_json(buf), year, month, day, hour)
r.sadd(fn_key, fn_value)
def _mongo_default():
return defaultdict(lambda: defaultdict(int))
def events_process(events, year, month, day, hour):
'''main events process method.'''
weekday = date(year=year, month=month, day=day).strftime("%w")
year_month = "{year}-{month:02d}".format(year=year, month=month)
pipe = _pipe()
users = defaultdict(_mongo_default)
repos = defaultdict(_mongo_default)
languages = defaultdict(_mongo_default)
for event in events:
actor = event["actor"]
attrs = event.get("actor_attributes", {})
if actor is None or attrs.get("type") != "User":
# This was probably an anonymous event (like a gist event)
# or an organization event.
continue
# Normalize the user name.
key = actor.lower()
# Get the type of event.
evttype = event["type"]
nevents = 1
# Can this be called a "contribution"?
contribution = evttype in ["IssuesEvent", "PullRequestEvent",
"PushEvent"]
# Increment the global sum histograms.
pipe.incr(_format("total"), nevents)
pipe.hincrby(_format("day"), weekday, nevents)
pipe.hincrby(_format("hour"), hour, nevents)
pipe.hincrby(_format("month"), year_month, nevents)
pipe.zincrby(_format("user"), key, nevents)
pipe.zincrby(_format("event"), evttype, nevents)
# Event histograms.
pipe.hincrby(_format("event:{0}:day".format(evttype)),
weekday, nevents)
pipe.hincrby(_format("event:{0}:hour".format(evttype)),
hour, nevents)
pipe.hincrby(_format("evnet:{0}:month".format(evttype)),
year_month, nevents)
# User schedule histograms.
incs = [
'total',
'day.%s' % weekday,
'hour.%02d' % hour,
'month.%04d.%02d' % (year, month),
'event.%s.day.%s' % (evttype, weekday),
'event.%s.hour.%02d' % (evttype, hour),
'event.%s.month.%04d.%02d' % (evttype, year, month)
]
for inc in incs:
users[key]['$inc'][inc] += nevents
# Parse the name and owner of the affected repository.
repo = event.get("repository", {})
owner, name, org = (repo.get("owner"), repo.get("name"),
repo.get("organization"))
if owner and name:
repo_name = "{0}/{1}".format(owner, name)
# Save the social graph.
users[key]['repos'][repo_name] += nevents
repos[repo_name]['$inc']['total'] += nevents
repos[repo_name]['$inc']['events.%s' % evttype] += nevents
repos[repo_name]['$inc']['users.%s' % key] += nevents
# Do we know what the language of the repository is?
language = repo.get("language")
if language:
# Which are the most popular languages?
languages[language]['$inc']['total'] += nevents
languages[language]['$inc']['events.%s' % evttype] += nevents
languages[language]['$inc']['month.%d.%2d' % (year, month)] += nevents
# The most used language of users
users[key]['$inc']['lang.%s' % language] += nevents
# Who are the most important users of a language?
if contribution:
pipe.zincrby(_format("lang:{0}:user".format(language)),
key, nevents)
users_stats = mongodb().users_stats
for key in users:
users_stats.update({'_id': key}, {'$inc': users[key]['$inc']}, True)
for repo_name in users[key]['repos']:
users_stats.update(
{'_id': key, 'repos.repo': {'$ne': repo_name}},
{'$addToSet': {'repos': {'repo': repo_name, 'events': 0}}},
False
)
users_stats.update(
{'_id': key, 'repos.repo': repo_name},
{'$inc': {'repos.$.events': users[key]['repos'][repo_name]}},
False
)
del users
languages_stats = mongodb().languages
for key in languages:
languages_stats.update({'_id': key},
{'$inc': languages[key]['$inc']},
True)
del languages
repos_stats = mongodb().repositories
for key in repos:
repos_stats.update({'_id': key},
{'$inc': repos[key]['$inc']},
True)
del repos
pipe.execute()
def events_process_lang_contrib(events, year, month, day, hour):
'''lang contribution process method.'''
users = defaultdict(_mongo_default)
for event in events:
actor = event["actor"]
attrs = event.get("actor_attributes", {})
if actor is None or attrs.get("type") != "User":
# This was probably an anonymous event (like a gist event)
# or an organization event.
continue
# Normalize the user name.
key = actor.lower()
# Get the type of event.
evttype = event["type"]
nevents = 1
# Can this be called a "contribution"?
contribution = evttype in ["IssuesEvent", "PullRequestEvent", "PushEvent"]
repo = event.get("repository", {})
owner, name, org, language = (repo.get("owner"),
repo.get("name"),
repo.get("organization"),
repo.get("language"))
if owner and name and language and contribution:
# The most used language of users
users[key]['$inc']['contrib.%s.%d.%02d' % (language, year, month)] += nevents
users_stats = mongodb().users_stats
for key in users:
users_stats.update({'_id': key}, {'$inc': users[key]['$inc']}, True)
del users
| mit | 2,357,462,357,480,505,300 | 35.6639 | 89 | 0.528407 | false | 3.868651 | false | false | false |
elitak/catalyst | catalyst/arch/arm.py | 1 | 4516 |
from catalyst import builder
class generic_arm(builder.generic):
"Abstract base class for all arm (little endian) builders"
def __init__(self,myspec):
builder.generic.__init__(self,myspec)
self.settings["CFLAGS"]="-O2 -pipe"
class generic_armeb(builder.generic):
"Abstract base class for all arm (big endian) builders"
def __init__(self,myspec):
builder.generic.__init__(self,myspec)
self.settings["CFLAGS"]="-O2 -pipe"
class arch_arm(generic_arm):
"Builder class for arm (little endian) target"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="arm-unknown-linux-gnu"
class arch_armeb(generic_armeb):
"Builder class for arm (big endian) target"
def __init__(self,myspec):
generic_armeb.__init__(self,myspec)
self.settings["CHOST"]="armeb-unknown-linux-gnu"
class arch_armv4l(generic_arm):
"Builder class for armv4l target"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="armv4l-unknown-linux-gnu"
self.settings["CFLAGS"]+=" -march=armv4"
class arch_armv4tl(generic_arm):
"Builder class for armv4tl target"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="armv4tl-softfloat-linux-gnueabi"
self.settings["CFLAGS"]+=" -march=armv4t"
class arch_armv5tl(generic_arm):
"Builder class for armv5tl target"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="armv5tl-softfloat-linux-gnueabi"
self.settings["CFLAGS"]+=" -march=armv5t"
class arch_armv5tel(generic_arm):
"Builder class for armv5tel target"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="armv5tel-softfloat-linux-gnueabi"
self.settings["CFLAGS"]+=" -march=armv5te"
class arch_armv5tejl(generic_arm):
"Builder class for armv5tejl target"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="armv5tejl-softfloat-linux-gnueabi"
self.settings["CFLAGS"]+=" -march=armv5te"
class arch_armv6j(generic_arm):
"Builder class for armv6j target"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="armv6j-softfp-linux-gnueabi"
self.settings["CFLAGS"]+=" -march=armv6j -mfpu=vfp -mfloat-abi=softfp"
class arch_armv6z(generic_arm):
"Builder class for armv6z target"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="armv6z-softfp-linux-gnueabi"
self.settings["CFLAGS"]+=" -march=armv6z -mfpu=vfp -mfloat-abi=softfp"
class arch_armv6zk(generic_arm):
"Builder class for armv6zk target"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="armv6zk-softfp-linux-gnueabi"
self.settings["CFLAGS"]+=" -march=armv6zk -mfpu=vfp -mfloat-abi=softfp"
class arch_armv7a(generic_arm):
"Builder class for armv7a target"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="armv7a-softfp-linux-gnueabi"
self.settings["CFLAGS"]+=" -march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp"
class arch_armv6j_hardfp(generic_arm):
"Builder class for armv6j hardfloat target, needs >=gcc-4.5"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="armv6j-hardfloat-linux-gnueabi"
self.settings["CFLAGS"]+=" -march=armv6j -mfpu=vfp -mfloat-abi=hard"
class arch_armv7a_hardfp(generic_arm):
"Builder class for armv7a hardfloat target, needs >=gcc-4.5"
def __init__(self,myspec):
generic_arm.__init__(self,myspec)
self.settings["CHOST"]="armv7a-hardfloat-linux-gnueabi"
self.settings["CFLAGS"]+=" -march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=hard"
class arch_armv5teb(generic_armeb):
"Builder class for armv5teb (XScale) target"
def __init__(self,myspec):
generic_armeb.__init__(self,myspec)
self.settings["CFLAGS"]+=" -mcpu=xscale"
self.settings["CHOST"]="armv5teb-softfloat-linux-gnueabi"
def register():
"Inform main catalyst program of the contents of this plugin."
return ({
"arm" : arch_arm,
"armv4l" : arch_armv4l,
"armv4tl": arch_armv4tl,
"armv5tl": arch_armv5tl,
"armv5tel": arch_armv5tel,
"armv5tejl": arch_armv5tejl,
"armv6j" : arch_armv6j,
"armv6z" : arch_armv6z,
"armv6zk" : arch_armv6zk,
"armv7a" : arch_armv7a,
"armv6j_hardfp" : arch_armv6j_hardfp,
"armv7a_hardfp" : arch_armv7a_hardfp,
"armeb" : arch_armeb,
"armv5teb" : arch_armv5teb
}, ("arm", "armv4l", "armv4tl", "armv5tl", "armv5tel", "armv5tejl", "armv6l",
"armv7l", "armeb", "armv5teb") )
| gpl-2.0 | -2,506,184,892,682,311,000 | 33.473282 | 79 | 0.704163 | false | 2.447696 | false | false | false |
openlabs/trytond-stock-lot-serial | stock.py | 1 | 2283 | # -*- coding: utf-8 -*-
"""
stock_lot_serial.stock
Shipment
:copyright: (c) 2013-2014 by Openlabs Technologies & Consulting (P) Limited
:license: 3-clause BSD, see LICENSE for more details.
"""
from trytond.model import ModelView
from trytond.pool import PoolMeta, Pool
from trytond.pyson import Eval
__metaclass__ = PoolMeta
__all__ = ['ShipmentIn']
class ShipmentIn:
"ShipmentIn"
__name__ = "stock.shipment.in"
@classmethod
def __setup__(cls):
super(ShipmentIn, cls).__setup__()
cls._buttons.update({
'split_moves': {
'invisible': Eval('state') != 'draft',
},
})
def _split_moves(self):
"Split incoming moves with quantity greater than 1"
Move = Pool().get('stock.move')
for move in self.incoming_moves:
if not move.product.serialized_inventory_control:
continue
while move.quantity > 1:
Move.copy([move], {'quantity': 1, 'lot': None})
move.quantity -= 1
move.save()
@classmethod
@ModelView.button
def split_moves(cls, shipments):
"Split incoming moves with quantity greater than 1"
for shipment in shipments:
shipment._split_moves()
class Move:
"Move"
__name__ = "stock.move"
@classmethod
def __setup__(cls):
super(Move, cls).__setup__()
cls._error_messages.update({
'quantity_one': "Quantity for moves with products having serialized"
" inventory control cannot be greater than 1."
})
def check_product_serial(self):
"""
Ensure that products with serialized inventory control have only 1 as
quantity for stock moves.
"""
if self.state == 'done' and \
self.product.template.serialized_inventory_control and \
self.quantity != 1.0:
self.raise_user_error('quantity_one')
@classmethod
def validate(cls, moves):
"""
Check if quantity is one when serialized inventory control is true for
each incoming move
"""
super(Move, cls).validate(moves)
for move in moves:
move.check_product_serial()
| bsd-3-clause | -2,645,729,696,213,979,000 | 27.185185 | 80 | 0.572492 | false | 4.219963 | false | false | false |
wphicks/Writing3D | pyw3d/timeline.py | 1 | 5322 | # Copyright (C) 2016 William Hicks
#
# This file is part of Writing3D.
#
# Writing3D is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
"""Tools for working with timelines in W3D projects
"""
import logging
LOGGER = logging.getLogger("pyw3d")
import xml.etree.ElementTree as ET
from .features import W3DFeature
from .actions import W3DAction
from .validators import ListValidator, IsNumeric, IsBoolean, ValidPyString, \
FeatureValidator
from .errors import ConsistencyError, BadW3DXML
from .xml_tools import bool2text, text2bool
from .activators import BlenderTimeline
from .errors import EBKAC
from .structs import SortedList
class W3DTimeline(W3DFeature):
"""Represent timeline for choreography of actions in the W3D
:param str name: Name of timeline
:param bool start_immediately: Start timeline when project starts?
:param list actions: A list of two-element tuples specifying (start time
for action, W3DAction)
"""
argument_validators = {
"name": ValidPyString(),
"start_immediately": IsBoolean(),
"actions": ListValidator(
ListValidator(
[IsNumeric(min_value=0), FeatureValidator(W3DAction)],
item_label="Start Time(s), Action",
required_length=2,
help_string="Start time in seconds, action to perform"
),
help_string="A list of (float, W3DAction) tuples")
}
default_arguments = {
"start_immediately": True
}
def __init__(self, *args, **kwargs):
super(W3DTimeline, self).__init__(*args, **kwargs)
if "actions" not in self:
self["actions"] = SortedList()
else:
self["actions"] = SortedList(self["actions"])
def toXML(self, all_timelines_root):
"""Store W3DTimeline as Timeline node within TimelineRoot node
"""
try:
timeline_attribs = {"name": self["name"]}
except KeyError:
raise ConsistencyError("W3DTimeline must specify a name")
if "start_immediately" in self:
timeline_attribs["start-immediately"] = bool2text(
self["start_immediately"])
timeline_root = ET.SubElement(
all_timelines_root, "Timeline", attrib=timeline_attribs)
for time, action in self["actions"]:
action_root = ET.SubElement(
timeline_root, "TimedActions",
attrib={"seconds-time": str(time)}
)
action.toXML(action_root)
return timeline_root
@classmethod
def fromXML(timeline_class, timeline_root):
"""Create W3DTimeline from Timeline node of W3D XML
:param :py:class:xml.etree.ElementTree.Element timeline_root
"""
new_timeline = timeline_class()
try:
new_timeline["name"] = timeline_root.attrib["name"]
except KeyError:
raise BadW3DXML(
"Timeline node must specify name attribute")
if "start-immediately" in timeline_root.attrib:
new_timeline["start_immediately"] = text2bool(timeline_root.attrib[
"start-immediately"])
for timed_action in timeline_root.findall("TimedActions"):
try:
action_time = float(timed_action.attrib["seconds-time"])
except (KeyError, ValueError):
raise BadW3DXML(
"TimedActions node must specify numeric seconds-time "
"attribute")
for child in timed_action.getchildren():
new_timeline["actions"].add(
(action_time, W3DAction.fromXML(child)))
return new_timeline
def blend(self):
"""Create Blender object to implement W3DTimeline"""
self.activator = BlenderTimeline(
self["name"], self["actions"],
start_immediately=self["start_immediately"])
LOGGER.debug("Creating timeline {}".format(self["name"]))
self.activator.create_blender_objects()
return self.activator.base_object
def link_blender_logic(self):
"""Link BGE logic bricks for this W3DTimeline"""
try:
self.activator.link_logic_bricks()
except AttributeError:
raise EBKAC(
"blend() must be called before link_blender_logic()")
def write_blender_logic(self):
"""Write any necessary game engine logic for this W3DTimeline"""
LOGGER.debug(
"Writing game logic for {}".format(self["name"])
)
try:
self.activator.write_python_logic()
except AttributeError:
raise EBKAC(
"blend() must be called before write_blender_logic()")
| gpl-3.0 | -3,449,748,591,373,062,000 | 37.28777 | 79 | 0.625705 | false | 4.22046 | false | false | false |
kappapolls/kappapolls | polls/views.py | 1 | 1237 | from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse
from polls.models import Poll
import re
# Create your views here.
def index(request):
return render(request, 'polls/index.html')
def top_commenters(request):
"""just returns contents of top_commenters.json"""
with open('polls/top_commenters.json', 'r') as f:
data = f.read()
return HttpResponse(data, content_type="application/json")
def poll_results_json(request, poll_thread_id):
poll = get_object_or_404(Poll, thread_id=poll_thread_id)
if poll.is_active:
data = poll.json_results
else:
data = json.dumps(poll.status.name)
return HttpResponse(data, content_type="application/json")
def poongko_sort(x):
if x == 'Poongko':
return 0
else:
return x.lower()
def poll_detail(request, slug):
poll = get_object_or_404(Poll, slug=slug)
choices = poll.choice_set.all()
#hacky fix to change order for now
choices = sorted(choices, key=lambda x: poongko_sort(x.name))
poll_title = re.sub(r'<.*?>', '', poll.name)
data = {'poll': poll, 'choices': choices, 'poll_title': poll_title}
return render(request, 'polls/poll_detail.html', data)
| gpl-2.0 | 6,726,558,599,940,188,000 | 28.452381 | 71 | 0.668553 | false | 3.325269 | false | false | false |
BBN-Q/PySimulator | tests/SimSpeedTest.py | 1 | 5046 | # -*- coding: utf-8 -*-
"""
Created on Wed Dec 5 21:44:22 2012
@author: cryan
"""
import numpy as np
from numpy import sin, cos
from scipy.constants import pi
from scipy.linalg import expm, eigh
from PySim.SystemParams import SystemParams
from PySim.PulseSequence import PulseSequence
from PySim.Simulation import simulate_sequence_stack, simulate_sequence
from PySim.QuantumSystems import SCQubit, Hamiltonian, Dissipator
from numba import *
#import matplotlib.pyplot as plt
#from timeit import timeit
#Try to load the CPPBackEnd
try:
import PySim.CySim
CPPBackEnd = True
except ImportError:
CPPBackEnd = False
#@jit(c16[:,:](c16[:,:], c16))
def expm_eigen(matIn, mult):
'''
Helper function to compute matrix exponential of Hermitian matrix
'''
dim = matIn.shape[0]
D, V = eigh(matIn)
return V.dot(np.diag(np.exp(mult*D))).dot(V.conj().T)
@autojit()
def mult_a_X(alpha, X):
outArray = np.zeros_like(X)
for rowct in range(X.shape[0]):
for colct in range(X.shape[1]):
outArray[rowct,colct] = alpha*X[rowct, colct]
return outArray
@jit(c16[:,:](c16[:,:], c16[:,:,:], f8[:,:], f8[:]))
#@autojit
def evolution_numba(Hnat, controlHams, controlFields, controlFreqs):
timeStep = 0.01
curTime = 0.0
Uprop = np.eye(Hnat.shape[0])
for timect in range(controlFields.shape[1]):
tmpH = np.copy(Hnat)
for controlct in range(controlFields.shape[0]):
tmpMult = controlFields[controlct, timect]*cos(2*pi*curTime*controlFreqs[controlct])
for rowct in range(tmpH.shape[0]):
for colct in range(tmpH.shape[1]):
tmpH[rowct,colct] += tmpMult*controlHams[controlct, rowct, colct]
Uprop = np.dot(expm_eigen(tmpH,-1j*2*pi*timeStep)[0],Uprop)
curTime += timeStep
return Uprop
def evolution_numpy(Hnat, controlHams, controlFields, controlFreqs):
timeStep = 0.01
curTime = 0.0
Uprop = np.eye(Hnat.shape[0])
for timect in range(controlFields.shape[1]):
tmpH = np.copy(Hnat)
for controlct in range(controlFields.shape[0]):
tmpH += controlFields[controlct, timect]*cos(2*pi*curTime*controlFreqs[controlct])*controlHams[controlct]
Uprop = np.dot(expm_eigen(tmpH,-1j*2*pi*timeStep)[0],Uprop)
curTime += timeStep
return Uprop
def sim_setup(dimension, numTimeSteps, numControls):
#Create a random natural hamiltonian
tmpMat = np.random.randn(dimension, dimension) + 1j*np.random.randn(dimension, dimension)
Hnat = tmpMat+tmpMat.conj().T
#Create random control Hamiltonians
controlHams = np.zeros((numControls,dimension, dimension), dtype=np.complex128)
for ct in range(numControls):
tmpMat = np.random.randn(dimension, dimension) + 1j*np.random.randn(dimension, dimension)
controlHams[ct] = tmpMat+tmpMat.conj().T
#Create random controlfields
controlFields = np.random.randn(numControls, numTimeSteps)
#Control frequencies
controlFreqs = np.random.randn(numControls)
return Hnat, controlHams, controlFields, controlFreqs
def sim_setup_cython(Hnat, controlHams, controlFields, controlFreqs):
systemParams = SystemParams()
systemParams.Hnat = Hamiltonian(Hnat)
pulseSeq = PulseSequence()
pulseSeq.controlAmps = controlFields
for ct in range(len(controlHams)):
systemParams.add_control_ham(inphase=Hamiltonian(controlHams[ct]))
pulseSeq.add_control_line(freq = controlFreqs[ct], phase=0, controlType='sinusoidal')
for ct in range(np.int(np.log2(Hnat.shape[0]))):
systemParams.add_sub_system(SCQubit(2,0e9, name='Q1', T1=1e-6))
pulseSeq.timeSteps = 0.01*np.ones(controlFields.shape[1])
pulseSeq.maxTimeStep = 1e6
return systemParams, pulseSeq
if __name__ == '__main__':
dims = 2**np.arange(1,6)
dim = 16
Hnat, controlHams, controlFields, controlFreqs = sim_setup(dim, 1000, 4)
print(evolution_numba(Hnat, controlHams, controlFields, controlFreqs))
# systemParams, pulseSeq = sim_setup_cython(Hnat, controlHams, controlFields, controlFreqs)
# cythonTimes = []
# numpyTimes = []
# for dim in dims:
# print(dim)
# Hnat, controlHams, controlFields, controlFreqs = sim_setup(dim, 2000, 4)
# systemParams, pulseSeq = sim_setup_cython(Hnat, controlHams, controlFields, controlFreqs)
# numpyTimes.append(timeit('evolution_numpy(Hnat, controlHams, controlFields, controlFreqs)',
# setup='from __main__ import evolution_numpy, Hnat, controlHams, controlFields, controlFreqs', number=3)/3)
# cythonTimes.append(timeit('simulate_sequence(pulseSeq, systemParams)', setup='from __main__ import simulate_sequence, pulseSeq, systemParams', number=3)/3)
#
# plt.plot(dims, numpyTimes)
# plt.plot(dims, cythonTimes)
# plt.legend(('Numpy', 'Cython'))
# plt.xlabel('System Dimension')
# plt.show()
| apache-2.0 | 7,210,422,255,238,354,000 | 33.8 | 164 | 0.666072 | false | 3.211967 | false | false | false |
barnone/EigenD | pi/database_test.py | 3 | 9436 | #!/usr/bin/env python
#
# Copyright 2009 Eigenlabs Ltd. http://www.eigenlabs.com
#
# This file is part of EigenD.
#
# EigenD is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# EigenD is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EigenD. If not, see <http://www.gnu.org/licenses/>.
#
import unittest
from pi import database,logic
class PartofTest(unittest.TestCase):
def test1(self):
c = database.RelationCache()
self.failIf(c.relation(1,2))
self.failIf(c.direct_relation(1,2))
c.assert_relation(database.Relation('x',1,2))
self.failUnless(c.relation(1,2))
self.failUnless(c.direct_relation(1,2))
c.assert_relation(database.Relation('x',1,2))
self.failUnless(c.relation(1,2))
self.failUnless(c.direct_relation(1,2))
c.retract_relation(database.Relation('x',1,2))
self.failUnless(c.relation(1,2))
self.failUnless(c.direct_relation(1,2))
c.retract_relation(database.Relation('x',1,2))
self.failIf(c.relation(1,2))
self.failIf(c.direct_relation(1,2))
def test2(self):
c = database.RelationCache()
c.assert_relation(database.Relation('x',1,2))
c.assert_relation(database.Relation('x',3,4))
self.failUnless(c.relation(1,2))
self.failUnless(c.relation(3,4))
self.failUnless(c.direct_relation(1,2))
self.failUnless(c.direct_relation(3,4))
self.failIf(c.relation(2,3))
self.failIf(c.relation(1,3))
self.failIf(c.relation(1,4))
self.failIf(c.direct_relation(2,3))
self.failIf(c.direct_relation(1,3))
self.failIf(c.direct_relation(1,4))
self.assertEqual(set((2,)),c.direct_rights(1))
self.assertEqual(set(),c.direct_rights(2))
self.assertEqual(set((4,)),c.direct_rights(3))
self.assertEqual(set(),c.direct_rights(4))
self.assertEqual(set(),c.direct_lefts(1))
self.assertEqual(set((1,)),c.direct_lefts(2))
self.assertEqual(set(),c.direct_lefts(3))
self.assertEqual(set((3,)),c.direct_lefts(4))
self.assertEqual(set((2,)),c.rights(1))
self.assertEqual(set(),c.rights(2))
self.assertEqual(set((4,)),c.rights(3))
self.assertEqual(set(),c.rights(4))
self.assertEqual(set(),c.lefts(1))
self.assertEqual(set((1,)),c.lefts(2))
self.assertEqual(set(),c.lefts(3))
self.assertEqual(set((3,)),c.lefts(4))
c.assert_relation(database.Relation('x',2,3))
self.assertEqual(set((2,3,4)),c.rights(1))
self.assertEqual(set((3,4)),c.rights(2))
self.assertEqual(set((4,)),c.rights(3))
self.assertEqual(set(),c.rights(4))
self.assertEqual(set(),c.lefts(1))
self.assertEqual(set((1,)),c.lefts(2))
self.assertEqual(set((1,2)),c.lefts(3))
self.assertEqual(set((1,2,3)),c.lefts(4))
self.assertEqual(set((2,)),c.direct_rights(1))
self.assertEqual(set((3,)),c.direct_rights(2))
self.assertEqual(set((4,)),c.direct_rights(3))
self.assertEqual(set(),c.direct_rights(4))
self.assertEqual(set(),c.direct_lefts(1))
self.assertEqual(set((1,)),c.direct_lefts(2))
self.assertEqual(set((2,)),c.direct_lefts(3))
self.assertEqual(set((3,)),c.direct_lefts(4))
c.retract_relation(database.Relation('x',2,3))
self.assertEqual(set((2,)),c.direct_rights(1))
self.assertEqual(set(),c.direct_rights(2))
self.assertEqual(set((4,)),c.direct_rights(3))
self.assertEqual(set(),c.direct_rights(4))
self.assertEqual(set(),c.direct_lefts(1))
self.assertEqual(set((1,)),c.direct_lefts(2))
self.assertEqual(set(),c.direct_lefts(3))
self.assertEqual(set((3,)),c.direct_lefts(4))
self.assertEqual(set((2,)),c.rights(1))
self.assertEqual(set(),c.rights(2))
self.assertEqual(set((4,)),c.rights(3))
self.assertEqual(set(),c.rights(4))
self.assertEqual(set(),c.lefts(1))
self.assertEqual(set((1,)),c.lefts(2))
self.assertEqual(set(),c.lefts(3))
self.assertEqual(set((3,)),c.lefts(4))
class CircleTest(logic.Fixture):
def __init__(self,*a,**k):
logic.Fixture.__init__(self,database.Database(),*a,**k)
def setUp(self):
r = (database.Relation('partof','wheel','car'),
database.Relation('partof','hub','wheel'),
database.Relation('partof','car','hub'))
self.engine.assert_rules(r)
def test1(self):
self.checkVar('@partof(O,car)','O','wheel','hub','car')
self.checkVar('@partof(O,hub)','O','wheel','hub','car')
self.checkTrue('@partof(hub,car)')
self.checkTrue('@partof(car,hub)')
self.checkTrue('@partof(hub,hub)')
self.checkTrue('@partof(car,car)')
def test1d(self):
self.checkVar('@partof_direct(O,car)','O','wheel')
self.checkFalse('@partof_direct(hub,car)')
self.checkTrue('@partof_direct(wheel,car)')
def test1e(self):
self.checkVar('@partof_extended(O,car)','O','wheel','hub','car')
self.checkTrue('@partof_extended(hub,car)')
self.checkTrue('@partof_extended(car,car)')
self.checkTrue('@partof_extended(wheel,car)')
self.checkTrue('@partof_extended(wheel,wheel)')
self.checkTrue('@partof_extended(car,wheel)')
class RulesTest(logic.Fixture):
def __init__(self,*a,**k):
logic.Fixture.__init__(self,database.Database(),*a,**k)
def setUp(self):
r = (database.Relation('partof','wheel','car'),
database.Relation('partof','hub','wheel'),
database.Relation('partof','tyre','wheel'))
self.engine.assert_rules(r)
def test1(self):
self.checkTrue('@partof(wheel,car)')
self.checkTrue('@partof(hub,car)')
self.checkTrue('@partof(tyre,car)')
self.checkTrue('@partof(hub,wheel)')
self.checkTrue('@partof(tyre,wheel)')
self.checkFalse('@partof(car,wheel)')
self.checkFalse('@partof(car,hub)')
self.checkFalse('@partof(car,tyre)')
self.checkFalse('@partof(wheel,hub)')
self.checkFalse('@partof(wheel,tyre)')
self.checkFalse('@partof(car,car)')
self.checkFalse('@partof(wheel,wheel)')
def test1e(self):
self.checkTrue('@partof_extended(wheel,car)')
self.checkTrue('@partof_extended(hub,car)')
self.checkTrue('@partof_extended(tyre,car)')
self.checkTrue('@partof_extended(hub,wheel)')
self.checkTrue('@partof_extended(tyre,wheel)')
self.checkFalse('@partof_extended(car,wheel)')
self.checkFalse('@partof_extended(car,hub)')
self.checkFalse('@partof_extended(car,tyre)')
self.checkFalse('@partof_extended(wheel,hub)')
self.checkFalse('@partof_extended(wheel,tyre)')
self.checkTrue('@partof_extended(car,car)')
self.checkTrue('@partof_extended(wheel,wheel)')
def test1d(self):
self.checkTrue('@partof_direct(wheel,car)')
self.checkFalse('@partof_direct(hub,car)')
self.checkFalse('@partof_direct(tyre,car)')
self.checkTrue('@partof_direct(hub,wheel)')
self.checkTrue('@partof_direct(tyre,wheel)')
self.checkFalse('@partof_direct(car,wheel)')
self.checkFalse('@partof_direct(car,hub)')
self.checkFalse('@partof_direct(car,tyre)')
self.checkFalse('@partof_direct(wheel,hub)')
self.checkFalse('@partof_direct(wheel,tyre)')
self.checkFalse('@partof_direct(car,car)')
self.checkFalse('@partof_direct(wheel,wheel)')
def test2(self):
self.checkVar('@partof(C,car)','C','wheel','tyre','hub')
self.checkVar('@partof(C,wheel)','C','tyre','hub')
self.checkVar('@partof(wheel,C)','C','car')
self.checkVar('@partof(tyre,C)','C','car','wheel')
self.checkVar('@partof(hub,C)','C','car','wheel')
self.checkResults('@partof(O,C)',[])
def test2e(self):
self.checkVar('@partof_extended(C,car)','C','wheel','tyre','hub','car')
self.checkVar('@partof_extended(C,wheel)','C','tyre','hub','wheel')
self.checkTrue('@partof_extended(wheel,wheel)')
self.checkTrue('@partof_extended(wheel,car)')
self.checkVar('@partof_extended(wheel,C)','C','car','wheel')
self.checkVar('@partof_extended(tyre,C)','C','car','wheel','tyre')
self.checkVar('@partof_extended(hub,C)','C','car','wheel','hub')
self.checkResults('@partof_extended(O,C)',[])
def test2d(self):
self.checkVar('@partof_direct(C,car)','C','wheel')
self.checkVar('@partof_direct(C,wheel)','C','tyre','hub')
self.checkVar('@partof_direct(wheel,C)','C','car')
self.checkVar('@partof_direct(tyre,C)','C','wheel')
self.checkVar('@partof_direct(hub,C)','C','wheel')
self.checkResults('@partof_direct(O,C)',[])
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | 387,983,136,235,194,700 | 38.814346 | 79 | 0.61117 | false | 3.261666 | true | false | false |
rajul/ginga | ginga/misc/plugins/Compose.py | 3 | 11292 | #
# Compose.py -- Compose plugin for Ginga reference viewer
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import os
from ginga.gw import Widgets
from ginga.misc import Bunch
from ginga import RGBImage, LayerImage
from ginga import GingaPlugin
import numpy
try:
from PIL import Image
have_PIL = True
except ImportError:
have_PIL = False
class ComposeImage(RGBImage.RGBImage, LayerImage.LayerImage):
def __init__(self, *args, **kwdargs):
RGBImage.RGBImage.__init__(self, *args, **kwdargs)
LayerImage.LayerImage.__init__(self)
class Compose(GingaPlugin.LocalPlugin):
"""
Usage:
Start the Compose plugin from the Operation menu--the tab should
show up under "Dialogs"
- Press "New Image" to start composing a new RGB image.
- drag your three constituent images that will make up the R, G and B
planes to the main viewer window--drag them in the order R (red),
G (green) and B (blue).
In the plugin, the R, G and B iamges should show up as three slider
controls in the Layers area of the plugin.
You should now have a composite three color image in the Compose preview
window. Most likely the image does not have good cut levels set, so you
may want to set cut levels on the image using any of the usual cut levels
controls.
- Play with the alpha levels of each layer using the sliders in the
Compose plugin, when you release a slider the image should update.
- When you see something you like you can save it to a file using the
"Save As" button.
"""
def __init__(self, fv, fitsimage):
# superclass defines some variables for us, like logger
super(Compose, self).__init__(fv, fitsimage)
self.limage = None
self.count = 0
self.layertag = 'compose-canvas'
self.dc = fv.getDrawClasses()
canvas = self.dc.DrawingCanvas()
canvas.set_callback('drag-drop', self.drop_file_cb)
canvas.setSurface(self.fitsimage)
self.canvas = canvas
self.gui_up = False
def build_gui(self, container):
top = Widgets.VBox()
top.set_border_width(4)
vbox, sw, orientation = Widgets.get_oriented_box(container)
vbox.set_border_width(4)
vbox.set_spacing(2)
self.msgFont = self.fv.getFont("sansFont", 12)
tw = Widgets.TextArea(wrap=True, editable=False)
tw.set_font(self.msgFont)
self.tw = tw
fr = Widgets.Expander("Instructions")
fr.set_widget(tw)
vbox.add_widget(fr, stretch=0)
fr = Widgets.Frame("Compositing")
captions = (("Compose Type:", 'label', "Compose Type", 'combobox'),
("New Image", 'button', "Insert Layer", 'button'),
)
w, b = Widgets.build_info(captions)
self.w.update(b)
fr.set_widget(w)
vbox.add_widget(fr, stretch=0)
combobox = b.compose_type
index = 0
for name in ('Alpha', 'RGB'):
combobox.append_text(name)
index += 1
combobox.set_index(1)
#combobox.add_callback('activated', self.set_combine_cb)
b.new_image.add_callback('activated', lambda w: self.new_cb())
b.new_image.set_tooltip("Start a new composite image")
b.insert_layer.add_callback('activated', lambda w: self.insert_cb())
b.insert_layer.set_tooltip("Insert channel image as layer")
fr = Widgets.Frame("Layers")
self.w.scales = fr
vbox.add_widget(fr, stretch=0)
hbox = Widgets.HBox()
hbox.set_border_width(4)
hbox.set_spacing(4)
btn = Widgets.Button("Save Image As")
btn.add_callback('activated', lambda w: self.save_as_cb())
hbox.add_widget(btn, stretch=0)
self.entry2 = Widgets.TextEntry()
hbox.add_widget(self.entry2, stretch=1)
self.entry2.add_callback('activated', lambda *args: self.save_as_cb())
vbox.add_widget(hbox, stretch=0)
# spacer
vbox.add_widget(Widgets.Label(''), stretch=1)
top.add_widget(sw, stretch=1)
btns = Widgets.HBox()
btns.set_border_width(4)
btns.set_spacing(4)
btn = Widgets.Button("Close")
btn.add_callback('activated', lambda w: self.close())
btns.add_widget(btn)
btns.add_widget(Widgets.Label(''), stretch=1)
top.add_widget(btns, stretch=0)
container.add_widget(top, stretch=1)
self.gui_up = True
def _gui_config_layers(self):
# remove all old scales
self.logger.debug("removing layer alpha controls")
self.w.scales.remove_all()
self.logger.debug("building layer alpha controls")
# construct a new vbox of alpha controls
captions = []
num_layers = self.limage.num_layers()
for i in range(num_layers):
layer = self.limage.get_layer(i)
captions.append((layer.name+':', 'label', 'layer_%d' % i, 'hscale'))
w, b = Widgets.build_info(captions)
self.w.update(b)
for i in range(num_layers):
layer = self.limage.get_layer(i)
adj = b['layer_%d' % (i)]
lower, upper = 0, 100
adj.set_limits(lower, upper, incr_value=1)
#adj.set_decimals(2)
adj.set_value(int(layer.alpha * 100.0))
#adj.set_tracking(True)
adj.add_callback('value-changed', self.set_opacity_cb, i)
self.logger.debug("adding layer alpha controls")
self.w.scales.set_widget(w)
def new_cb(self):
#self.fitsimage.clear()
name = "composite%d" % (self.count)
self.limage = ComposeImage(logger=self.logger, order='RGB')
# Alpha or RGB composition?
index = self.w.compose_type.get_index()
if index == 0:
self.limage.compose = 'alpha'
else:
self.limage.compose = 'rgb'
self._gui_config_layers()
self.limage.set(name=name, nothumb=True)
def _get_layer_attributes(self):
# Get layer name
idx = self.limage.num_layers()
if self.limage.compose == 'rgb':
idx = min(idx, 2)
names = ['Red', 'Green', 'Blue']
name = names[idx]
else:
name = 'layer%d' % (idx)
# Get alpha
alpha = 1.0
bnch = Bunch.Bunch(name=name, alpha=alpha, idx=idx)
return bnch
def insert_image(self, image):
if self.limage is None:
self.new_cb()
nlayers = self.limage.num_layers()
if (self.limage.compose == 'rgb') and (nlayers >= 3):
self.fv.show_error("There are already 3 layers")
return
elif nlayers == 0:
# populate metadata from first layer
metadata = image.get_metadata()
self.limage.update_metadata(metadata)
attrs = self._get_layer_attributes()
self.limage.insert_layer(attrs.idx, image, name=attrs.name,
alpha=attrs.alpha)
self._gui_config_layers()
self.logger.debug("setting layer image")
self.fitsimage.set_image(self.limage)
def insert_cb(self):
image = self.fitsimage.get_image()
self.insert_image(image)
def drop_file_cb(self, viewer, paths):
self.logger.info("dropped files: %s" % str(paths))
for path in paths[:3]:
image = self.fv.load_image(path)
self.insert_image(image)
return True
def set_opacity_cb(self, w, val, idx):
alpha = val / 100.0
self.limage.set_alpha(idx, alpha)
def _alphas_controls_to_layers(self):
self.logger.debug("updating layers in %s from controls" % self.limage)
num_layers = self.limage.num_layers()
vals = []
for i in range(num_layers):
alpha = self.w['layer_%d' % i].get_value() / 100.0
vals.append(alpha)
self.logger.debug("%d: alpha=%f" % (i, alpha))
i += 1
self.limage.set_alphas(vals)
def _alphas_layers_to_controls(self):
self.logger.debug("updating controls from %s" % self.limage)
num_layers = self.limage.num_layers()
for i in range(num_layers):
layer = self.limage.get_layer(i)
self.logger.debug("%d: alpha=%f" % (i, layer.alpha))
ctrlname = 'layer_%d' % (i)
if ctrlname in self.w:
self.w[ctrlname].set_value(layer.alpha * 100.0)
i += 1
def add_to_channel_cb(self):
image = self.limage.copy()
name = "composite%d" % (self.count)
self.count += 1
image.set(name=name)
self.fv.add_image(name, image)
def save_as_file(self, path, image, order='RGB'):
if not have_PIL:
raise Exception("You need to install PIL or pillow to save images")
data = image.get_data()
viewer = self.fitsimage
rgbmap = viewer.get_rgbmap()
vmin, vmax = 0, rgbmap.get_hash_size() - 1
# Cut levels on the full image, with settings from viewer
autocuts = viewer.autocuts
loval, hival = viewer.get_cut_levels()
data = autocuts.cut_levels(data, loval, hival,
vmin=vmin, vmax=vmax)
# result becomes an index array fed to the RGB mapper
if not numpy.issubdtype(data.dtype, numpy.dtype('uint')):
data = data.astype(numpy.uint)
# get RGB array using settings from viewer
rgbobj = rgbmap.get_rgbarray(data, order=order,
image_order='RGB')
data = rgbobj.get_array(order)
# Save image using PIL
p_image = Image.fromarray(data)
p_image.save(path)
def save_as_cb(self):
path = str(self.entry2.get_text()).strip()
if not path.startswith('/'):
path = os.path.join('.', path)
image = self.fitsimage.get_image()
self.fv.nongui_do(self.fv.error_wrap, self.save_as_file, path, image)
def instructions(self):
self.tw.set_text("""Drag R, then G then B images to the window. Adjust cut levels and contrast as desired.
Then manipulate channel mix using the sliders.""")
def close(self):
self.fv.stop_local_plugin(self.chname, str(self))
return True
def start(self):
self.instructions()
# start ruler drawing operation
p_canvas = self.fitsimage.get_canvas()
try:
obj = p_canvas.getObjectByTag(self.layertag)
except KeyError:
# Add ruler layer
p_canvas.add(self.canvas, tag=self.layertag)
self.resume()
def pause(self):
self.canvas.ui_setActive(False)
def resume(self):
self.canvas.ui_setActive(True)
def stop(self):
# remove the canvas from the image
p_canvas = self.fitsimage.get_canvas()
try:
p_canvas.deleteObjectByTag(self.layertag)
except:
pass
self.canvas.ui_setActive(False)
self.fv.showStatus("")
self.gui_up = False
def redo(self):
pass
def __str__(self):
return 'compose'
#END
| bsd-3-clause | -1,674,413,334,150,704,600 | 30.988669 | 114 | 0.586344 | false | 3.591603 | false | false | false |
CraigglesO/Ciphers | vigenere_plaintext_encrypt.py | 1 | 3166 | #########################################################################################
#################### HOW TO USE ##########################
#########################################################################################
# This takes input from the terminal so run (in the proper cd): #
# 'python vigenere_plaintext_encrypt.py textFile.txt key' #
# Make sure the file is in the same folder as this script #
# You can also directly input the plain text: #
# 'python vigenere_plaintext_encrypt.py ThisIsPlainTextCamelCasing key' #
# #
# so obviously the first variable is the plaintext with no spaces allowed #
# and the key is an arbitrary length you use to encode the words #
# #
# #
# #
# For decrypting your code check the brother script 'vigenere_plaintext_decrypt.py' #
#########################################################################################
#########################################################################################
# Created by Craig O'Connor - Thursday, August 15, 2013 #
#########################################################################################
from sys import argv
script, plain_text, key = argv
plain_text_string = "%s" % plain_text
if ".txt" in plain_text_string:
with open(plain_text_string, 'r') as f:
plain_text_string = f.read()
plain_text_string = plain_text_string.lower()
key_string = "%s" % key
key_string = key_string.lower()
plain_text_num = []
key_num = []
encryption_val = []
encryption_char = ""
#Make sure the key length is long enough to convert the plaintext
while len(key_string) < len(plain_text_string):
key_string += key_string
#This is our value system using a dictionary for a table
num_char = { 0 : 'a', 1 : 'b', 2 : 'c', 3 : 'd', 4 : 'e', 5 : 'f', 6 : 'g', 7 : 'h', 8 : 'i',
9 : 'j', 10 : 'k', 11 : 'l', 12 : 'm', 13 : 'n', 14 : 'o', 15 : 'p', 16 : 'q',
17 : 'r', 18 : 's', 19 : 't', 20 : 'u', 21 : 'v', 22 : 'w', 23 : 'x', 24 : 'y',
25 : 'z' }
#lets convert the plain_text and key into there character values and place each value in its own compartment
for i, c in enumerate(plain_text_string):
for value, char in num_char.iteritems():
if char == c:
plain_text_num.append(value)
for i, c in enumerate(key_string):
for value, char in num_char.iteritems():
if char == c:
key_num.append(value)
#Create encryption values
for i in range(0,len(plain_text_num)):
#Cipher_value = (Message_value + Key_value) mod 26
encryption_val.append((plain_text_num[i] + key_num[i]) % 26)
#Finish up, turn those values into the proper characters:
for i in range(0,len(encryption_val)):
for value, char in num_char.iteritems():
if value == encryption_val[i]:
encryption_char += char
print (encryption_char)
with open('cipher_text.txt', 'w') as f:
f.write(encryption_char) | apache-2.0 | 6,300,122,498,283,316,000 | 41.226667 | 108 | 0.482312 | false | 3.778043 | false | false | false |
hdemeyer/king-phisher | king_phisher/server/database/models.py | 1 | 17588 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/server/database/models.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import datetime
import logging
import operator
from king_phisher import errors
from king_phisher import utilities
from king_phisher.server import signals
import sqlalchemy
import sqlalchemy.event
import sqlalchemy.ext.declarative
import sqlalchemy.orm
DATABASE_TABLE_REGEX = '[a-z_]+'
"""A regular expression which will match all valid database table names."""
SCHEMA_VERSION = 7
"""The schema version of the database, used for compatibility checks."""
database_tables = {}
"""A dictionary which contains all the database tables and their column names."""
database_table_objects = {}
"""A dictionary which contains all the database tables and their primitive objects."""
logger = logging.getLogger('KingPhisher.Server.Database.Models')
def current_timestamp(*args, **kwargs):
"""
The function used for creating the timestamp used by database objects.
:return: The current timestamp.
:rtype: :py:class:`datetime.datetime`
"""
return datetime.datetime.utcnow()
def get_tables_with_column_id(column_id):
"""
Get all tables which contain a column named *column_id*.
:param str column_id: The column name to get all the tables of.
:return: The list of matching tables.
:rtype: set
"""
return set(x[0] for x in database_tables.items() if column_id in x[1])
def forward_signal_delete(mapper, connection, target):
signals.safe_send('db-table-delete', logger, target.__tablename__, mapper=mapper, connection=connection, target=target)
def forward_signal_insert(mapper, connection, target):
signals.safe_send('db-table-insert', logger, target.__tablename__, mapper=mapper, connection=connection, target=target)
def forward_signal_update(mapper, connection, target):
signals.safe_send('db-table-update', logger, target.__tablename__, mapper=mapper, connection=connection, target=target)
def register_table(table):
"""
Register a database table. This will populate the information provided in
DATABASE_TABLES dictionary. This also forwards signals to the appropriate
listeners within the :py:mod:`server.signal` module.
:param cls table: The table to register.
"""
columns = tuple(col.name for col in table.__table__.columns)
database_tables[table.__tablename__] = columns
database_table_objects[table.__tablename__] = table
sqlalchemy.event.listen(table, 'before_delete', forward_signal_delete)
sqlalchemy.event.listen(table, 'before_insert', forward_signal_insert)
sqlalchemy.event.listen(table, 'before_update', forward_signal_update)
return table
class BaseRowCls(object):
"""
The base class from which other database table objects inherit from.
Provides a standard ``__repr__`` method and default permission checks which
are to be overridden as desired by subclasses.
"""
__repr_attributes__ = ()
"""Attributes which should be included in the __repr__ method."""
is_private = False
"""Whether the table is only allowed to be accessed by the server or not."""
def __repr__(self):
description = "<{0} id={1} ".format(self.__class__.__name__, repr(self.id))
for repr_attr in self.__repr_attributes__:
description += "{0}={1!r} ".format(repr_attr, getattr(self, repr_attr))
description += '>'
return description
def assert_session_has_permissions(self, *args, **kwargs):
"""
A convenience function which wraps :py:meth:`~.session_has_permissions`
and raises a :py:exc:`~king_phisher.errors.KingPhisherPermissionError`
if the session does not have the specified permissions.
"""
if self.session_has_permissions(*args, **kwargs):
return
raise errors.KingPhisherPermissionError()
def session_has_permissions(self, access, session):
"""
Check that the authenticated session has the permissions specified in
*access*. The permissions in *access* are abbreviated with the first
letter of create, read, update, and delete.
:param str access: The desired permissions.
:param session: The authenticated session to check access for.
:return: Whether the session has the desired permissions.
:rtype: bool
"""
if self.is_private:
return False
access = access.lower()
for case in utilities.switch(access, comp=operator.contains, swapped=True):
if case('c') and not self.session_has_create_access(session):
break
if case('r') and not self.session_has_read_access(session):
break
if case('u') and not self.session_has_update_access(session):
break
if case('d') and not self.session_has_delete_access(session):
break
else:
return True
return False
def session_has_create_access(self, session):
if self.is_private:
return False
return True
def session_has_delete_access(self, session):
if self.is_private:
return False
return True
def session_has_read_access(self, session):
if self.is_private:
return False
return True
def session_has_read_prop_access(self, session, prop):
return self.session_has_read_access(session)
def session_has_update_access(self, session):
if self.is_private:
return False
return True
Base = sqlalchemy.ext.declarative.declarative_base(cls=BaseRowCls)
metadata = Base.metadata
class TagMixIn(object):
__repr_attributes__ = ('name',)
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String, nullable=False)
description = sqlalchemy.Column(sqlalchemy.String)
@register_table
class AlertSubscription(Base):
__repr_attributes__ = ('campaign_id', 'user_id')
__tablename__ = 'alert_subscriptions'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
user_id = sqlalchemy.Column(sqlalchemy.String, sqlalchemy.ForeignKey('users.id'), nullable=False)
campaign_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('campaigns.id'), nullable=False)
type = sqlalchemy.Column(sqlalchemy.Enum('email', 'sms', name='alert_subscription_type'), default='sms', server_default='sms', nullable=False)
mute_timestamp = sqlalchemy.Column(sqlalchemy.DateTime)
def session_has_create_access(self, session):
return session.user == self.user_id
def session_has_delete_access(self, session):
return session.user == self.user_id
def session_has_read_access(self, session):
return session.user == self.user_id
def session_has_update_access(self, session):
return session.user == self.user_id
@register_table
class AuthenticatedSession(Base):
__repr_attributes__ = ('user_id',)
__tablename__ = 'authenticated_sessions'
is_private = True
id = sqlalchemy.Column(sqlalchemy.String, primary_key=True)
created = sqlalchemy.Column(sqlalchemy.Integer, nullable=False)
last_seen = sqlalchemy.Column(sqlalchemy.Integer, nullable=False)
user_id = sqlalchemy.Column(sqlalchemy.String, sqlalchemy.ForeignKey('users.id'), nullable=False)
@register_table
class Campaign(Base):
__repr_attributes__ = ('name',)
__tablename__ = 'campaigns'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String, unique=True, nullable=False)
description = sqlalchemy.Column(sqlalchemy.String)
user_id = sqlalchemy.Column(sqlalchemy.String, sqlalchemy.ForeignKey('users.id'), nullable=False)
created = sqlalchemy.Column(sqlalchemy.DateTime, default=current_timestamp)
reject_after_credentials = sqlalchemy.Column(sqlalchemy.Boolean, default=False)
expiration = sqlalchemy.Column(sqlalchemy.DateTime)
campaign_type_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('campaign_types.id'))
company_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('companies.id'))
# relationships
alert_subscriptions = sqlalchemy.orm.relationship('AlertSubscription', backref='campaign', cascade='all, delete-orphan')
credentials = sqlalchemy.orm.relationship('Credential', backref='campaign', cascade='all, delete-orphan')
deaddrop_connections = sqlalchemy.orm.relationship('DeaddropConnection', backref='campaign', cascade='all, delete-orphan')
deaddrop_deployments = sqlalchemy.orm.relationship('DeaddropDeployment', backref='campaign', cascade='all, delete-orphan')
landing_pages = sqlalchemy.orm.relationship('LandingPage', backref='campaign', cascade='all, delete-orphan')
messages = sqlalchemy.orm.relationship('Message', backref='campaign', cascade='all, delete-orphan')
visits = sqlalchemy.orm.relationship('Visit', backref='campaign', cascade='all, delete-orphan')
@property
def has_expired(self):
if self.expiration is None:
return False
if self.expiration > current_timestamp():
return False
return True
@register_table
class CampaignType(TagMixIn, Base):
__tablename__ = 'campaign_types'
# relationships
campaigns = sqlalchemy.orm.relationship('Campaign', backref='campaign_type')
@register_table
class Company(Base):
__repr_attributes__ = ('name',)
__tablename__ = 'companies'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String, unique=True, nullable=False)
description = sqlalchemy.Column(sqlalchemy.String)
industry_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('industries.id'))
url_main = sqlalchemy.Column(sqlalchemy.String)
url_email = sqlalchemy.Column(sqlalchemy.String)
url_remote_access = sqlalchemy.Column(sqlalchemy.String)
# relationships
campaigns = sqlalchemy.orm.relationship('Campaign', backref='company', cascade='all')
@register_table
class CompanyDepartment(TagMixIn, Base):
__tablename__ = 'company_departments'
# relationships
messages = sqlalchemy.orm.relationship('Message', backref='company_department')
@register_table
class Credential(Base):
__repr_attributes__ = ('campaign_id', 'username')
__tablename__ = 'credentials'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
visit_id = sqlalchemy.Column(sqlalchemy.String, sqlalchemy.ForeignKey('visits.id'), nullable=False)
message_id = sqlalchemy.Column(sqlalchemy.String, sqlalchemy.ForeignKey('messages.id'), nullable=False)
campaign_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('campaigns.id'), nullable=False)
username = sqlalchemy.Column(sqlalchemy.String)
password = sqlalchemy.Column(sqlalchemy.String)
submitted = sqlalchemy.Column(sqlalchemy.DateTime, default=current_timestamp)
@register_table
class DeaddropDeployment(Base):
__repr_attributes__ = ('campaign_id', 'destination')
__tablename__ = 'deaddrop_deployments'
id = sqlalchemy.Column(sqlalchemy.String, default=lambda: utilities.random_string(16), primary_key=True)
campaign_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('campaigns.id'), nullable=False)
destination = sqlalchemy.Column(sqlalchemy.String)
# relationships
deaddrop_connections = sqlalchemy.orm.relationship('DeaddropConnection', backref='deaddrop_deployment', cascade='all, delete-orphan')
@register_table
class DeaddropConnection(Base):
__repr_attributes__ = ('campaign_id', 'deployment_id', 'visitor_ip')
__tablename__ = 'deaddrop_connections'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
deployment_id = sqlalchemy.Column(sqlalchemy.String, sqlalchemy.ForeignKey('deaddrop_deployments.id'), nullable=False)
campaign_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('campaigns.id'), nullable=False)
visit_count = sqlalchemy.Column(sqlalchemy.Integer, default=1)
visitor_ip = sqlalchemy.Column(sqlalchemy.String)
local_username = sqlalchemy.Column(sqlalchemy.String)
local_hostname = sqlalchemy.Column(sqlalchemy.String)
local_ip_addresses = sqlalchemy.Column(sqlalchemy.String)
first_visit = sqlalchemy.Column(sqlalchemy.DateTime, default=current_timestamp)
last_visit = sqlalchemy.Column(sqlalchemy.DateTime, default=current_timestamp)
@register_table
class Industry(TagMixIn, Base):
__tablename__ = 'industries'
# relationships
companies = sqlalchemy.orm.relationship('Company', backref='industry')
@register_table
class LandingPage(Base):
__repr_attributes__ = ('campaign_id', 'hostname', 'page')
__tablename__ = 'landing_pages'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
campaign_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('campaigns.id'), nullable=False)
hostname = sqlalchemy.Column(sqlalchemy.String, nullable=False)
page = sqlalchemy.Column(sqlalchemy.String, nullable=False)
@register_table
class StorageData(Base):
__repr_attributes__ = ('namespace', 'key', 'value')
__tablename__ = 'storage_data'
is_private = True
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
created = sqlalchemy.Column(sqlalchemy.DateTime, default=current_timestamp)
namespace = sqlalchemy.Column(sqlalchemy.String)
key = sqlalchemy.Column(sqlalchemy.String, nullable=False)
value = sqlalchemy.Column(sqlalchemy.Binary)
@register_table
class Message(Base):
__repr_attributes__ = ('campaign_id', 'target_email')
__tablename__ = 'messages'
id = sqlalchemy.Column(sqlalchemy.String, default=utilities.make_message_uid, primary_key=True)
campaign_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('campaigns.id'), nullable=False)
target_email = sqlalchemy.Column(sqlalchemy.String)
first_name = sqlalchemy.Column(sqlalchemy.String)
last_name = sqlalchemy.Column(sqlalchemy.String)
opened = sqlalchemy.Column(sqlalchemy.DateTime)
opener_ip = sqlalchemy.Column(sqlalchemy.String)
opener_user_agent = sqlalchemy.Column(sqlalchemy.String)
sent = sqlalchemy.Column(sqlalchemy.DateTime, default=current_timestamp)
trained = sqlalchemy.Column(sqlalchemy.Boolean, default=False)
company_department_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('company_departments.id'))
# relationships
credentials = sqlalchemy.orm.relationship('Credential', backref='message', cascade='all, delete-orphan')
visits = sqlalchemy.orm.relationship('Visit', backref='message', cascade='all, delete-orphan')
@register_table
class MetaData(Base):
__repr_attributes__ = ('value_type', 'value')
__tablename__ = 'meta_data'
is_private = True
id = sqlalchemy.Column(sqlalchemy.String, primary_key=True)
value_type = sqlalchemy.Column(sqlalchemy.String, default='str')
value = sqlalchemy.Column(sqlalchemy.String)
@register_table
class User(Base):
__tablename__ = 'users'
id = sqlalchemy.Column(sqlalchemy.String, default=lambda: utilities.random_string(16), primary_key=True)
phone_carrier = sqlalchemy.Column(sqlalchemy.String)
phone_number = sqlalchemy.Column(sqlalchemy.String)
email_address = sqlalchemy.Column(sqlalchemy.String)
otp_secret = sqlalchemy.Column(sqlalchemy.String(16))
# relationships
alert_subscriptions = sqlalchemy.orm.relationship('AlertSubscription', backref='user', cascade='all, delete-orphan')
campaigns = sqlalchemy.orm.relationship('Campaign', backref='user', cascade='all, delete-orphan')
def session_has_create_access(self, session):
return False
def session_has_delete_access(self, session):
return False
def session_has_read_access(self, session):
return session.user == self.id
def session_has_read_prop_access(self, session, prop):
if prop in ('id', 'campaigns'): # everyone can read the id
return True
return self.session_has_read_access(session)
def session_has_update_access(self, session):
return session.user == self.id
@register_table
class Visit(Base):
__repr_attributes__ = ('campaign_id', 'message_id')
__tablename__ = 'visits'
id = sqlalchemy.Column(sqlalchemy.String, default=utilities.make_visit_uid, primary_key=True)
message_id = sqlalchemy.Column(sqlalchemy.String, sqlalchemy.ForeignKey('messages.id'), nullable=False)
campaign_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('campaigns.id'), nullable=False)
visit_count = sqlalchemy.Column(sqlalchemy.Integer, default=1)
visitor_ip = sqlalchemy.Column(sqlalchemy.String)
visitor_details = sqlalchemy.Column(sqlalchemy.String)
first_visit = sqlalchemy.Column(sqlalchemy.DateTime, default=current_timestamp)
last_visit = sqlalchemy.Column(sqlalchemy.DateTime, default=current_timestamp)
# relationships
credentials = sqlalchemy.orm.relationship('Credential', backref='visit', cascade='all, delete-orphan')
| bsd-3-clause | 8,127,772,163,786,096,000 | 42.107843 | 143 | 0.76194 | false | 3.667987 | false | false | false |
talbrecht/pism_pik | examples/searise-antarctica/showhydro.py | 2 | 2221 | #!/usr/bin/env python
from numpy import *
from matplotlib.pyplot import *
from sys import exit
try:
from netCDF4 import Dataset as NC
except:
print "netCDF4 is not installed!"
sys.exit(1)
nameroot = "routing"
for dx in ("100", "50", "25", "15", "10", "5"):
basename = nameroot + dx + "km"
filename = basename + ".nc"
print "%s: looking for file ..." % filename
try:
nc = NC(filename, 'r')
except:
print " can't read from file ..."
continue
xvar = nc.variables["x"]
yvar = nc.variables["y"]
x = asarray(squeeze(xvar[:]))
y = asarray(squeeze(yvar[:]))
for varname in ("bwat", "bwp", "psi"): # psi must go after bwat, bwp
print " %s: generating pcolor() image ..." % varname
try:
if varname == "psi":
var = nc.variables["topg"]
else:
var = nc.variables[varname]
except:
print "variable '%s' not found ... continuing ..." % varname
continue
data = asarray(squeeze(var[:])).transpose()
if varname == "bwat":
bwatdata = data.copy()
if varname == "bwp":
bwpdata = data.copy()
if varname == "psi":
# psi = bwp + rho_w g (topg + bwat)
data = bwpdata + 1000.0 * 9.81 * (data + bwatdata)
if varname == "bwat":
units = "m"
barmin = 0.0
barmax = 650.0
scale = 1.0
else:
units = "bar"
barmin = -20.0
barmax = 360.0
scale = 1.0e5
print " [stats: max = %9.3f %s, av = %8.3f %s]" % \
(data.max() / scale, units, data.sum() / (scale * x.size * y.size), units)
pcolor(x / 1000.0, y / 1000.0, data / scale, vmin=barmin, vmax=barmax)
colorbar()
gca().set_aspect('equal')
gca().autoscale(tight=True)
xlabel('x (km)')
ylabel('y (km)')
dxpad = "%03d" % int(dx)
pngfilename = varname + "_" + dxpad + "km" + ".png"
print " saving figure in %s ..." % pngfilename
savefig(pngfilename, dpi=300, bbox_inches='tight')
close()
nc.close()
| gpl-3.0 | 3,000,062,345,818,872,300 | 27.844156 | 88 | 0.488969 | false | 3.432767 | false | false | false |
shibinp/google_python | copyspecial/copyspecial.py | 2 | 1679 | #!/usr/bin/pythonfiles
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
import sys
import re
import os
import shutil
import commands
def get_special_path(dirr_name):
paths=[]
files=os.listdir(dirr_name)
for filee in files:
special=re.search(r'__(\w+\w+)__',filee)
if special:
paths.append( os.path.abspath(filee))
print paths
return paths
def copy_to(path,dirr):
print dirr
efile=get_special_path(path)
if not os.path.exists(dirr):
os.mkdir(dirr)
for each_file in efile:
shutil.copy(each_file,os.path.abspath(dirr))
def zip_to(path,zippath):
efile=get_special_path(path)
for each_file in efile:
cmd='zip -j'+' '+ zippath+' '+ each_file
asd=commands.getstatusoutput(cmd)
def main():
# This basic command line argument parsing code is provided.
# Add code to call your functions below.
# Make a list of command line arguments, omitting the [0] element
# which is the script itself.
args = sys.argv[1:]
if not args:
print "usage: [--todir dir][--tozip zipfile] dir [dir ...]";
sys.exit(1)
# todir and tozip are either set from command line
# or left as the empty string.
# The args array is left just containing the dirs.
todir = ''
if args[0] == '--todir':
todir = args[1]
del args[0:2]
tozip = ''
if args[0] == '--tozip':
tozip = args[1]
del args[0:2]
if len(args) == 0:
print "error: must specify one or more dirs"
sys.exit(1)
# +++your code here+++
# Call your functions
if __name__ == "__main__":
main()
| apache-2.0 | 3,702,862,706,209,384,400 | 20.805195 | 67 | 0.661703 | false | 2.961199 | false | false | false |
jonasluz/mia-cg | pyCohenSutherland/cs.py | 1 | 6358 | # -*- coding: utf-8 -*-
"""
UNIVERSIDADE DE FORTALEZA - UNIFOR
DEPARTAMENTO DE PÓS-GRADUAÇÃO EM INFORMÁTICA APLICADA - PPGIA
Disciplina: Probabilidade e Estatística
Resolução de Exercícios
----
Aluno: Jonas de Araújo Luz Jr. <[email protected]>
"""
## BIBLIOTECAS
#
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.lines as lines
from mpl_toolkits.axes_grid.axislines import SubplotZero
def initplot(size):
"""
Inicializa o desenho de gráficos.
"""
fig = plt.figure(1)
ax = SubplotZero(fig, 111)
fig.add_subplot(ax)
for direction in ["xzero", "yzero"]:
ax.axis[direction].set_axisline_style("-|>")
ax.axis[direction].set_visible(True)
for direction in ["left", "right", "bottom", "top"]:
ax.axis[direction].set_visible(False)
ax.set_xlim(-10, size)
ax.set_ylim(-10, size)
return (fig, ax)
def drawWindow(left, right, bottom, top):
"""
Desenha a janela de visualização.
"""
limR = right + 100
ix, iy = np.linspace(0, limR, limR - left), np.zeros(limR - left)
xw, xh = np.linspace(left, right, right-left+1), np.zeros(top-bottom+1)
yw, yh = np.zeros(right-left+1), np.linspace(bottom, top, top-bottom+1)
for mark in (bottom, top):
ax.plot(ix, iy + mark, linestyle=(0, (1, 5)), linewidth=1.0, color='black')
ax.plot(xw, yw + mark, linestyle=(0, ()), linewidth=1.5, color='black')
for mark in (left, right):
ax.plot(iy + mark, ix, linestyle=(0, (1, 5)), linewidth=1.0, color='black')
ax.plot(xh + mark, yh, linestyle=(0, ()), linewidth=1.5, color='black')
def drawLine(item, p1x, p1y, p2x, p2y, linewidth=1.0, color='blue'):
"""
Desenha a linha passada.
"""
mx, my = p2x - p1x, p2y - p1y # deltas x e y.
pm = (p1x + mx/2, p1y + my/2) # ponto médio.
m = my / mx if mx != 0 else float('inf') # inclinação da reta.
x, y = [], []
if m == float('inf'):
x = np.zeros(100) + p1x
y = np.linspace(p1y, p2y, 100)
else:
x = np.linspace(p1x, p2x, 100)
y = p1y + m * (x - p1x)
ax.plot(x, y, linewidth=linewidth, color=color)
if item:
ax.annotate(item,
xy=pm, xycoords='data',
xytext=(30, -15), textcoords='offset points',
arrowprops=dict(facecolor='black', shrink=0.05),
horizontalalignment='right', verticalalignment='middle')
INF = float('inf') # infinito.
BIT_L = 0B0001
BIT_R = 0B0010
BIT_B = 0B0100
BIT_T = 0B1000
def f(x, m, x1, y1):
"""
Equação da reta para y
"""
return y1 + m * (x - x1)
def fi(y, m, x1, y1):
"""
Equação da reta para x
"""
return ( x1 + (y - y1) / m ) if m != INF else x1
def csBinaryCode(left, right, bottom, top, x, y):
"""
Algoritmo de Cohen-Sutherland para recorte bidimensional em uma janela retangular.
Subrotina para calcular o código de determinado ponto.
"""
result = 0b0000
if x < left: result |= BIT_L
elif x > right: result |= BIT_R
if y < bottom: result |= BIT_B
elif y > top: result |= BIT_T
return result
def csIntersect(left, right, bottom, top, x, y, m, c, verbose=None):
"""
Calcula o ponto de intersecção válido a partir do ponto (x, y)
"""
p = (x, y)
if c:
if c & BIT_L: # ponto à esquerda.
p = (left, f(left, m, x, y)) # intersecção com esquerda.
elif c & BIT_R: # ponto à direita.
p = (right, f(right, m, x, y)) # intersecção à direita.
c = csBinaryCode(left, right, bottom, top, *p)
if verbose: print('{}\'={} - código: {:b}'.format(verbose, p, c))
if c & BIT_B: # ponto abaixo.
p = (fi(bottom, m, x, y), bottom) # intersecção abaixo.
elif c & BIT_T: # ponto acima.
p = (fi(top, m, x, y), top) # intersecção acima.
c = csBinaryCode(left, right, bottom, top, *p)
if verbose: print('{}\'={} - código: {:b}'.format(verbose, p, c))
return (p, c)
def CohenSutherland(left, right, bottom, top, p1x, p1y, p2x, p2y, verbose=False):
"""
Algoritmo de Cohen-Sutherland para recorte bidimensional em uma janela retangular.
"""
p1, p2 = (p1x, p1y), (p2x, p2y)
c1 = csBinaryCode(left, right, bottom, top, *p1)
c2 = csBinaryCode(left, right, bottom, top, *p2)
if verbose:
print('VERIFICANDO O SEGMENTO DE RETA {}-{} NA JANELA {}'.
format(p1, p2, (left, right, bottom, top)))
print('--------------------------------------------------------------------')
print('Os códigos binários são: P1{}: {:b} e P2{}: {:b}.'.
format(p1, c1, p2, c2))
result, m = None, None
if c1 & c2: # caso trivial de invisitibilidade total.
assert True
# senão, c1 & c2 == 0 - é total ou parcialmente visível.
elif c1 | c2: # parcialmente visível.
mx, my = p2x - p1x, p2y - p1y # deltas x e y.
m = my / mx if mx != 0 else INF # inclinação da reta.
## Calcula intersecções com as arestas.
#
p1, c1 = csIntersect(left, right, bottom, top, *p1, m, c1,
'P1' if verbose else None)
p2, c2 = csIntersect(left, right, bottom, top, *p2, m, c2,
'P2' if verbose else None)
result = (*p1, *p2)
else: # totalmente visível.
result = (*p1, *p2)
if verbose:
msg = 'TRIVIAL E COMPLETAMENTE IN' if result == None else ('PARCIALMENTE ' if c1 | c2 else 'TOTALMENTE ')
print('O segmento de reta é {}VISÍVEL'.format(msg))
if result != None:
print('A inclinação da reta é {}'.format(m))
print('Deve-se traçar o segmento {}-{}'.
format((result[0], result[1]), (result[2], result[3])))
print('====================================================================\n')
return result
def show():
plt.tight_layout()
plt.show()
fig, ax = initplot(1000) | unlicense | -2,511,778,365,239,691,300 | 34.223464 | 113 | 0.528712 | false | 2.944418 | false | false | false |
nordam/PyPPT | python_scripts/alpha_v/communication.py | 1 | 17103 | # script with functions to use in main-file
# this script handle the "communication of particles between ranks"-part
import sys
# Simen Mikkelsen, 2016-11-21
# communication.py
'''
def exchange(X):
# Handle all the communication stuff here
# return the updated particle arrays
# (which may be of a different length now)
return X
'''
### TODO:
#implement 'find biggest factor'-function or another dynamical function to determine number of cells in each direction
import numpy as np
import mpi4py.MPI as MPI
### TODO: import comm from main?
#comm = MPI.COMM_WORLD
#rank_communication_module = communicator.Get_rank() # not used, parameter given from main
#mpi_size_communication_module = communicator.Get_size() # not used, parameter given from main
## INITIALISING start
# number of cells in each direction (only divide x-direction initially)
cell_x_n = 20
cell_y_n = 0
cell_n = cell_x_n + cell_y_n
# scaling factor when expanding/scrinking local arrays
scaling_factor = 1.25 ## variable
shrink_if = 1/(scaling_factor**3)
# the particles are defined with its properties in several arrays
# one tag for each properties which are communicated to other ranks
# tags: id, x-pos, y-pos
# other properties: active-status
tag_n = 3
# buffer overhead to use in memory reservation for non-blocking communication
buffer_overhead = 1000
## INITIALISING end
# spatial properties
x_start = 0
x_end = 1
y_start = 0
y_end = 1
x_len = x_end - x_start
y_len = y_end - y_start
## VARIABLES start
## VARIABLES end
## secondary FUNCTIONS start
# function to find the corresponding rank of a cell
# this function determines how the cells are distributed to ranks
### TODO: discussion: how to do this distribution
def find_rank_from_cell(cell_id, mpi_size):
return int(cell_id % mpi_size)
# function to find the corresponding cell of a position
# this function determines how the cells are distributed geometrically
### TODO: discussion: how to do this distribution
def find_cell_from_position(x, y):
return int(((x - x_start)/(x_len))*(cell_x_n)) # for 1D
# send_n_array: array to show how many particles should be sent from one rank to the others
# filled out locally in each rank, then communicated to all other ranks
# rows represent particles sent FROM rank = row number (0 indexing)
# column represent particles sent TO rank = row number (0 indexing)
# function to fill out the array showing number of particles need to be sent from a given rank given thelocal particles there
# local particles are the particles who belonged to the rank before the transport of particles.
# some of the particles may have to been moved to a new rank if they have been moved to a cell belonging to a new rank
# send_to: array to show which rank a local particle needs to be sent to. or -1 if it should stay in the same rank
def global_communication_array(mpi_size, rank, particle_n, particle_x, particle_y, particle_active):
#print('global com.array, particle n:', particle_n)
# reset arrays telling which particles are to be sent
send_to = np.zeros(particle_n, dtype=int) # local
send_to[:] = -1
send_n_array = np.zeros((mpi_size, mpi_size), dtype=int)
for i in range(particle_n):
# only check if the particle is active
if particle_active[i]:
# find the rank of the cell of which the particle (its position) belongs to
particle_rank = find_rank_from_cell(find_cell_from_position(particle_x[i], particle_y[i]), mpi_size)
# if the particle's new rank does not equal the current rank (for the given process), it should be moved
if particle_rank != rank:
send_n_array[int(rank)][int(particle_rank)] = send_n_array[int(rank)][int(particle_rank)] + 1
send_to[i] = particle_rank
# converted indices to int to not get 'deprecation warning'
return send_to, send_n_array
# function to reallocate active particles to the front of the local arrays
# active_n = number of active particles after deactivation of particles sent to another rank, but before receiving.
# aka. particles that stays in its own rank
def move_active_to_front(particle_id, particle_x, particle_y, particle_active, active_n):
#print('move_active_to_front(), particle_active, active_n:', particle_active.dtype, active_n)
particle_id[:active_n] = particle_id[particle_active]
particle_x[:active_n] = particle_x[particle_active]
particle_y[:active_n] = particle_y[particle_active]
# set the corresponding first particles to active, the rest to false
particle_active[:active_n] = True
particle_active[active_n:] = False
return particle_id, particle_x, particle_y, particle_active
## secondary FUNCTIONS end
## main FUNCTION start
# all variables taken in by exchange() are local variables for the given rank (except mpi_size)
def exchange(communicator,
mpi_size,
rank,
#particle_n, # could also be calculated in function: particle_n = np.size(particle_id)
particle_id,
particle_x,
particle_y,
particle_active):
#print('mpi_size from main module', mpi_size)
#print('mpi_size from communication module', mpi_size_communication_module)
#print('rank from main module', rank)
#print('rank from communication module', rank_communication_module)
# compute "global communication array"
# with all-to-all communication
# length of local particle arrays
particle_n = np.size(particle_id)
# note: not necessary equal to number of active particles
send_to, send_n = global_communication_array(mpi_size, rank, particle_n, particle_x, particle_y, particle_active)
# all nodes receives results with a collective 'Allreduce'
# mpi4py requires that we pass numpy objects (byte-like objects)
send_n_global = np.zeros((mpi_size, mpi_size), dtype=int)
communicator.Allreduce(send_n, send_n_global , op=MPI.SUM)
# each rank communicate with other ranks if it sends or receives particles from that rank
# this information is now given in the "global communication array"
# point-to-point communication of particles
# using list of arrays for communication of particle properties
# initializing "communication arrays": send_*** and recv_***
# send_**: list of arrays to hold particles that are to be sent from a given rank to other ranks,
# where row number corresponds to the rank the the particles are send to
# recv_**: list of arrays to hold particles that are to be received from to a given rank from other ranks,
# where row number corresponds to the rank the particles are sent from
send_id = []
send_x = []
send_y = []
recv_id = []
recv_x = []
recv_y = []
# total number of received particles
received_n = np.sum(send_n_global, axis = 0)[rank]
for irank in range(mpi_size):
# find number of particles to be received from irank (sent to current rank)
Nrecv = send_n_global[irank, rank]
# append recv_id with the corresponding number of elements
recv_id.append(np.zeros(Nrecv, dtype = np.int64))
recv_x.append(np.zeros(Nrecv, dtype = np.float64))
recv_y.append(np.zeros(Nrecv, dtype = np.float64))
# find number of particles to be sent to irank (from current rank)
Nsend = send_n_global[rank, irank]
# append send_id with the corresponding number of elements
send_id.append(np.zeros(Nsend, dtype = np.int64))
send_x.append(np.zeros(Nsend, dtype = np.float64))
send_y.append(np.zeros(Nsend, dtype = np.float64))
# counter to get position in send_** for a particle to be sent
send_count = np.zeros(mpi_size, dtype=int)
# iterate over all local particles to allocate them to send_** if they belong in another rank
for i in range(particle_n):
# if particle is active (still a local particle) and should be sent to a rank (-1 means that the particle already is in the correct rank)
if (particle_active[i] and send_to[i] != -1):
# fill the temporary communication arrays (send_**) with particle and it's properties
send_id[send_to[i]][send_count[send_to[i]]] = i
send_x[send_to[i]][send_count[send_to[i]]] = particle_x[i]
send_y[send_to[i]][send_count[send_to[i]]] = particle_y[i]
# deactivate sent particle
particle_active[i] = False
# increment counter to update position in temporary communication arrays (send_**)
send_count[send_to[i]] = send_count[send_to[i]] + 1
# actual exchange of particle properties follows
# must convert the list of arrays which are to be communicated to numpy objects (byte-like objects)
# this is not done before because np.ndarrays does not support a "list of arrays" if the arrays does not have equal dimensions
#send_id_np = np.array(send_id)
#recv_id_np = np.array(recv_id)
#send_x_np = np.array(send_x)
#recv_x_np = np.array(recv_x)
#send_y_np = np.array(send_y)
#recv_y_np = np.array(recv_y)
# requests to be used for non-blocking send and receives
send_request_id = [0] * mpi_size
send_request_x = [0] * mpi_size
send_request_y = [0] * mpi_size
recv_request_id = [0] * mpi_size
recv_request_x = [0] * mpi_size
recv_request_y = [0] * mpi_size
# sending
for irank in range(mpi_size):
if (irank != rank):
# number of particles rank sends to irank
Nsend = send_n_global[rank, irank]
# only receive if there is something to recieve
if (Nsend > 0):
#print('rank:', rank, 'sending', Nsend, 'particles to', irank)
# use tags to separate communication of different arrays/properties
# tag uses 1-indexing so there will be no confusion with the default tag = 0
send_request_id[irank] = communicator.isend(send_id[irank][0:Nsend], dest = irank, tag = 1)
send_request_x[irank] = communicator.isend(send_x[irank][0:Nsend], dest = irank, tag = 2)
send_request_y[irank] = communicator.isend(send_y[irank][0:Nsend], dest = irank, tag = 3)
# receiving
for irank in range(mpi_size):
if (irank != rank):
# number of particles irank sends to rank (number of particles rank recieves from irank)
Nrecv = send_n_global[irank, rank]
# only receive if there is something to recieve
if (Nrecv > 0):
#print('rank:', rank, 'receiving', Nrecv, 'particles from', irank)
buf_id = np.zeros(Nrecv+buffer_overhead, dtype = np.int64)
buf_x = np.zeros(Nrecv+buffer_overhead, dtype = np.float64)
buf_y = np.zeros(Nrecv+buffer_overhead, dtype = np.float64)
# use tags to separate communication of different arrays/properties
# tag uses 1-indexing so there will be no confusion with the default tag = 0
recv_request_id[irank] = communicator.irecv(buf = buf_id, source = irank, tag = 1)
recv_request_x[irank] = communicator.irecv(buf = buf_x, source = irank, tag = 2)
recv_request_y[irank] = communicator.irecv(buf = buf_y, source = irank, tag = 3)
# obtain data from completed requests
# only at this step is the data actually returned.
for irank in range(mpi_size):
if irank != rank:
# if there is something to receive
if send_n_global[irank, rank] > 0: # Nrecv > 0
recv_id[irank][:] = recv_request_id[irank].wait()
recv_x[irank][:] = recv_request_x[irank].wait()
recv_y[irank][:] = recv_request_y[irank].wait()
#print('recv_id_np:', recv_id_np)
#print("recv_x_np:", recv_x_np)
#print("recv_y_np:", recv_y_np)
# make sure this rank does not exit until sends have completed
for irank in range(mpi_size):
if irank != rank:
# if there is something to send
if send_n_global[rank, irank] > 0: # Nsend > 0
send_request_id[irank].wait()
send_request_x[irank].wait()
send_request_y[irank].wait()
# total number of received and sent particles
# total number of active particles after communication
sent_n = int(np.sum(send_n_global, axis = 1)[rank])
received_n = int(np.sum(send_n_global, axis = 0)[rank])
active_n = int(np.sum(particle_active))
# move all active particles to front of local arrays
if (active_n > 0):
particle_id, particle_x, particle_y, particle_active = move_active_to_front(particle_id, particle_x, particle_y, particle_active, active_n)
# resize local arrays if needed
# current scaling factor = 1.25
### TODO: add ceil/floor directly in if-check?
# check if local arrays have enough free space, if not, allocate a 'scaling_factor' more than needed
if (active_n + received_n > particle_n):
new_length = int(np.ceil((active_n + received_n)*scaling_factor))
# if new length is not equal old length: resize all local arrays
if new_length != particle_n:
#print('extending arrays to new length:', new_length)
# with .resize-method, missing/extra/new entries are filled with zero (false in particle_active)
### TODO: change from resize function to method
particle_active = np.resize(particle_active, new_length)
particle_id = np.resize(particle_id, new_length)
particle_x = np.resize(particle_x, new_length)
particle_y = np.resize(particle_y, new_length)
# particle_active.resize(new_length, refcheck = False)# refcheck = True by default
# particle_id.resize(new_length, refcheck = False)
# particle_x.resize(new_length, refcheck = False)
# particle_y.resize(new_length, refcheck = False)
# check if local arrays are bigger than needed (with a factor: shrink_if = 1/scaling_factor**3)
# old + new particles < shrink_if*old_size
# if they are, shrink them with a scaling_factor
if (active_n + received_n < shrink_if*particle_n):
new_length = int(np.ceil(particle_n/scaling_factor))
# if new length is not equal old length: resize all local arrays
if new_length != particle_n:
#print('shrinking arrays to new length:', new_length)
### TODO: change from resize function to method
particle_active = np.resize(particle_active, new_length)
particle_id = np.resize(particle_id, new_length)
particle_x = np.resize(particle_x, new_length)
particle_y = np.resize(particle_y, new_length)
# particle_active.resize(new_length, refcheck = false)# refcheck = true by default
# particle_id.resize(new_length, refcheck = false)
# particle_x.resize(new_length, refcheck = false)
# particle_y.resize(new_length, refcheck = False)
# add the received particles to local arrays
# unpack (hstack) the list of arrays, (ravel/flatten can not be used for dtype=object)
if received_n > 0:
particle_id[active_n:active_n+received_n] = np.hstack(recv_id)
particle_x[active_n:active_n+received_n] = np.hstack(recv_x)
particle_y[active_n:active_n+received_n] = np.hstack(recv_y)
# set the received particles to active
particle_active[active_n:active_n+received_n] = np.ones(received_n, dtype = np.bool)
# optional printing for debugging
# print values for debugging
#print('particle_n (old value):', particle_n)
#print("old active_n:", active_n)
#print("sent_n:", sent_n)
#print("received_n:", received_n)
#print("new active_n:", np.sum(particle_active))
#print('new length of local arrays:', np.size(particle_id))
#print("new local particles:", particle_id)
#print("new active particles:", particle_active*1) # *1 to turn the output into 0 and 1 instead of False and True
else:
print("\nno received particles")
# print global array
if rank == 0:
print('\nglobal_array:\n', send_n_global)
# return the updated particle arrays
return (particle_id,
np.array([ particle_x, # x component
particle_y]),# x component
particle_active)
| mit | 3,886,690,969,642,233,300 | 44.986264 | 147 | 0.632813 | false | 3.756424 | false | false | false |
bpkeene/pythonPlayground | GUI_Template.py | 1 | 16981 | #********************************************************************************
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#********************************************************************************
#
# Created by Brian Keene on 8 September 2016
#
# Revision history:
#
#
#********************************************************************************
# note - on OSX, requires framework build of python/2.7 to run, as this
# application requires access to the screen (this might only apply to systems
# running Mavericks or later)
# This script for a graphical user interface is intended to serve as a template for
# graphical user interfaces; primarily intended to be used as a simplified
# front-end of wx.Widgets, and to allow for easy setup of dynamic hiding that
# might involve cross-communication between objects of assorted placement in
# the hierarchy of parent-child objects. Allows for an OOP creation of a GUI,
# with emphasis on easy modification in the accompanying script.
# import the needed modules
import wx, os
# global dictionary in which we store data
myDict = {}
class wxFrame(wx.Frame):
# note to others: we pass another class (an instance of Frame) to this wx.Frame derived class;
# the ambiguity of parent in the class __init__ vs the wx.Frame.__init__ is due to parent in the
# wx.Frame.__init__ function being a /keyword/ argument, rather than a python convention, as is used
# in the class __init__ function. The wx.Frame.__init__ parent argument /must/ be a wx.Window object,
# or simply value "None", which is what we usually use
# __init__ takes the implicit self argument as usual
# and 'sibling' here is an instance of our 'Frame' class defined immediately below this class.
# 'sibling' holds all the necessary data needed to define a wx.Frame object.
def __init__(self,sibling):
wx.Frame.__init__(self,parent=sibling._parent,title=sibling._title)
self.SetInitialSize(sibling._size)
# we define our own Frame() class, because we don't instantly want to create an actual wx.Frame object yet
class Frame:
# a static class object we can access using Frame._register[index] - we don't access this via an instance of
# the class; we can also iterate over it, looking for instances with specific data
_register = []
_typeName = "Frame"
# implicit argument self
# parent: typically None, but if a frame is spawned dynamically it may be useful to pass the relevant object
# title: string displayed at the top of the frame (the name)
# size: integer tuple (e.g., (100,100)) specifying the size of the frame in pixels
def __init__(self, parent, title, size, **kwargs):
self._parent = parent;
self._title = title;
self._size = size;
# an instance variable holding other instances that are children of this instance
self._children = []
def initObj(self):
# make an instance of the frame, that is a derived class of the wx.Frame class
self._obj = wxFrame(self)
Frame._register.append(self)
# iterate over this instance's children and initialize them.
for obj in self._children:
obj.initObj();
# we have now instantiated all of the objects on this frame; show the frame
self._obj.Show()
# a wxNotebook class
class wxNotebook(wx.Notebook):
# the implicit self argument, as usual
# and 'sibling' - the instance of 'Notebook' class (defined below) holding all
# necessary data needed to define the wx.Notebook
def __init__(self,sibling):
wx.Notebook(sibling._parent._obj)
self._pages = [];
for index, item in enumerate(sibling._children):
item.initObj();
self._pages.append(item._obj)
self.AddPage(self._pages[index], item._name);
self.NBSizer = wx.BoxSizer();
self.NBSizer.Add(self,1,wx.EXPAND)
sibling._parent._obj.SetSizer(self.NBSizer)
# our notebook class that collates information before making a wx.Notebook notebook
class Notebook:
# the implicit self argument
# parent panel object
# the pages to be added to this notebook
# and the names of the pages
_register = []
_typeName = "Notebook"
def __init__(self,parent, **kwargs):
# instantiate the notebook
self._parent = parent;
# an instance variable holding other instances that are children of this instance
self._children = [];
self._pages = [];
# append this instance to a list belonging to the parent, so that the parent knows
parent._children.append(self);
def initObj(self):
# our wxNotebook method initiates the instantiation of the self._children objects
self._obj = wx.Notebook(self._parent._obj)
# create a wxNotebook instance and store it in self._obj; pass 'self' as the argument
# i.e., we pass this instance of Notebook as the 'sibling' argument (the wxNotebook 'self' is implicit)
##self._obj = wxNotebook(self)
for index, item in enumerate(self._children):
item.initObj();
self._pages.append(item._obj);
self._obj.AddPage(self._pages[index], item._name);
self.NBSizer = wx.BoxSizer();
self.NBSizer.Add(self._obj, 1, wx.EXPAND)
self._parent._obj.SetSizer(self.NBSizer)
Notebook._register.append(self)
def customBehavior():
pass
# i think this has to be incorporated in the wxNotebook class, rather than here;
def OnPageChanging(self,event):
oldPage = event.GetOldSelection()
newPage = event.GetSelection()
customBehavior()
class wxPanel(wx.Panel):
def __init__(self,sibling):
wx.Panel.__init__(self,parent=sibling._parent._obj);
self._needsSizer = True;
for obj in sibling._children:
if obj._typeName == "Notebook":
self._needsSizer = False;
break
if self._needsSizer:
self.grid = wx.GridBagSizer(hgap=5,vgap=5);
self.SetSizer(self.grid);
# call the init methods of the objects, which then places wxWidget objects in the self._widgets variable for
# each Widget class instance
# a panel holding a notebook will never have a widget - its a dummy panel
# if it does, this is where an error will be thrown!
for child in sibling._children:
if child._typeName == "Widget":
child.initObj(self);
self.grid.Add(child._obj, pos=child._pos, span=child._span, flag=child._gridFlags)
# if the base child widget object is a label, it won't have a function
if ((child._function is not None) and (child._wxEvt is not None)):
self.Bind(child._wxEvt,child._function,child._obj)
if child._label is not None:
# we know that this will be a label;
child._labelObj = wx.StaticText(self,label=child._label)
self.grid.Add(child._labelObj,child._labelPos, child._labelSpan)
if (child._hasSlave):
self.Bind(child._wxEvt, child.masterFunction, child._obj)
# some objects are initially hidden; here, we hide them.
if (child._initHide):
child._obj.Hide()
if (child._label is not None):
child._labelObj.Hide()
self.Layout()
# in this class, we collate all the information we'll need to make a well-defined wx.Panel object
class Panel:
# what do we require from the user to instantiate a base panel object?
# make an iterable list of panel instances; make sure methods only access this /after/
# the main frame has added all objects (i.e., at the end of th user's GUI script!)
_register = []
# all instances of this class have the _typeName = "Panel"
_typeName = "Panel"
def __init__(self, parent,**kwargs):
# a list of widget objects, from our widgets class, that identify this panel as their parent panel
# note that we do /not/ need more information, as this has the instanced objects; we can call their methods
# directly from here! Very convenient.
self._widgets = [];
# panel must have parent object on which it is displayed
self._parent = parent;
# a list of the instances that have this instance of the Panel class as their parent
self._children = []
parent._children.append(self);
# we use a name if this panel is a child of a Notebook object; in this case, the name is
# displayed atop the notebook
self._name = kwargs.get("name",None)
def initObj(self):
# we initialize the panel, which then refers to all of the panel's widgets' methods for their instantiation
self._obj = wxPanel(self);
# append this instance to the class register, so that we may iterate over the class instances if needed
Panel._register.append(self);
# iterate over self._children, and initialize objects that are /not/ of the type widget; these will
# be initialized in the wxPanel class!
for obj in self._children:
if (obj._typeName != "Widget"):
obj.initObj()
def deleteWidget():
pass
def bindToFunction():
# ehhhh... we might have already done this in the widget class. could be better that way.
pass
#class wxWidget:
# def __init__(self,sibling):
# self._widget = None;
# if sibling._
class Widget:
_register = []
_typeName = "Widget"
# for all Widget objects, we need the parent object, widgetType, name, and position
def __init__(self,parent,widgetType,name,pos,**kwargs):
# note that we use **kwargs to pass in information that may be specific to certain type
# of widget; e.g., text widget vs button vs ... etc.
# **kwargs is a list of KeyWord ARGumentS (kwargs) of arbitrary length
# note that, by default, there is no label (and no label position <(int,int)> provided
#####################
# Required arguments, for all widget types
#####################
self._parent = parent; # parent object, typically an instance of Panel
self._widgetType = widgetType; # button, textwidget, label, etc.
self._name = name; #string
self._pos = pos; #tuple of coords: "(integer, integer)"
#####################
# Required arguments, for some widget types
#####################
# required for choice widgets
self._choices = kwargs.get('choices',None)
############################
# optional arguments
# we can specify a label (if so, must specify a position)
# the spans of the label and widget default to (1,1)
# if a widget can use an initial value (e.g., a text control), it defaults to an empty string
# if a widget is to be bound to a function, must specify this explicitly or bind to it later
############################
self._label = kwargs.get('label',None)
self._labelPos = kwargs.get('labelPos',None)
# default behavior of span is (1,1) if not specified
self._span = kwargs.get('span',(1,1))
self._labelSpan = kwargs.get('labelSpan',(1,1))
self._initValue = kwargs.get('value',"")
self._function = kwargs.get('function',None)
self._wxEvt = None
self._hasMaster = False; # default this to false; changed if the setMaster() function is called on self
self._hasSlave = False;
# these will be instantiated during the creation of the parent object
self._labelObj = None;
self._obj = None;
# Hide most objects at first; that way, they only show if they are told to show,
# and otherwise will hide when told to hide
# implement this /after/ we have connected all the show/hide funcitonality
self._initHide = False;
# TODO: have the Panel's grid.Add() method use these flags when instantiating the widget
self._gridFlags = (wx.RESERVE_SPACE_EVEN_IF_HIDDEN | wx.EXPAND | wx.ALIGN_CENTER)
# append the object to the list of children in the parent instance
parent._children.append(self)
# the master widget - this is a /Widget/ instance
self._masters = []
# denotes messages from master that instruct self to Hide()
# these should be strings
self._hideWhen = []
# widgets to which self is master; note that this is set implicitly via setMaster, when
# other widgets denotes self as master
# this is a /Widget/ instance (not a wx object)
self._slaves = []
Widget._register.append(self); # append this instance to the class register
# allows the function to which the widget will be bound to be set after construction of the widget instance
# we allow the function to be defined according to whatever parameters the user inputs; no implicit self
def masterFunction(self,event):
# pass the value of this widget to slaved widgets
message = str(event.GetString())
for slave in self._slaves:
slave.evaluateMessage(message);
def evaluateMessage(self,message):
# this is used by the interface to loop over child widgets
# in the event that a chosen selection hides multiple levels of the parent-child hierarchy.
# continues until exhaustion
if message in self._hideWhen:
self._obj.Hide()
if (self._labelObj is not None):
self._labelObj.Hide()
self._parent._obj.Layout()
else:
self._obj.Show()
if (self._labelObj is not None):
self._labelObj.Show()
self._parent._obj.Layout()
def setMaster(self, master, hideWhen):
self._masters.append(master)
# assume hideWhen is in the form of an array
for instruction in hideWhen:
self._hideWhen.append(instruction)
# append self to master._slaves[]
master._slaves.append(self);
self._hasMaster = True;
if master._hasSlave == False:
master._hasSlave = True;
def setFunction(self,function):
self._function = function;
def setGridFlags(self,flags):
self._gridFlags = flags;
def setInitHide(self,boolean):
self._initHide = boolean;
# maybe the user wants to attach labels later; allow them to do so here
def setLabel(self,label,labelPos,**kwargs):
self._label = label;
self._labelPos = labelPos;
self._labelSpan = kwargs.get('labelSpan',(1,1))
# this is a bottom level object; it requires a parentInstance on initialization
def initObj(self,parentInstance):
# for each, initialize the wx object in self._obj, and inform the class what kind of wx event to
# expect in self._wxEvt
#self._obj = wxWidget(self)
if (self._widgetType == "text"):
self._obj = wx.TextCtrl(parentInstance,value=self._initValue,name=self._name)
self._wxEvt = wx.EVT_TEXT
# need to add all types of widgets here; remember to overload necessary parameters for each via kwargs.get()
elif (self._widgetType == "choice"):
#choicesList
if (self._choices is None):
raise ValueError('%s has no choices! Please specify choices for the choice widget.' %(self._name))
self._obj = wx.Choice(parentInstance,-1,choices=self._choices,name=self._name)
self._wxEvt = wx.EVT_CHOICE
# more types of widgets to be implemented
elif (self._widgetType == "button"):
if (self._name is None):
raise ValueError('%s has no name! The name of the button is displayed on the button, and \n\
is required!' %(self._name))
self._obj = wx.Button(parentInstance,label=self._name, name=self._name)
self._wxEvt = wx.EVT_BUTTON
elif (self._widgetType == "static"):
self._obj = wx.StaticText(parentInstance,label=self._name, name=self._name)
self._wxEvt = None
| gpl-3.0 | 2,376,403,117,219,915,300 | 42.541026 | 116 | 0.624227 | false | 4.275176 | false | false | false |
valtech-mooc/edx-platform | cms/djangoapps/contentstore/views/course.py | 7 | 75285 | """
Views related to operations on course objects
"""
from django.shortcuts import redirect
import json
import random
import string # pylint: disable=deprecated-module
import logging
from django.utils.translation import ugettext as _
import django.utils
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.views.decorators.http import require_http_methods, require_GET
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.http import HttpResponseBadRequest, HttpResponseNotFound, HttpResponse, Http404
from util.json_request import JsonResponse, JsonResponseBadRequest
from util.date_utils import get_default_time_display
from util.db import generate_int_id, MYSQL_MAX_INT
from edxmako.shortcuts import render_to_response
from xmodule.course_module import DEFAULT_START_DATE
from xmodule.error_module import ErrorDescriptor
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.courseware_index import CoursewareSearchIndexer, SearchIndexingError
from xmodule.contentstore.content import StaticContent
from xmodule.tabs import PDFTextbookTabs
from xmodule.partitions.partitions import UserPartition
from xmodule.modulestore import EdxJSONEncoder
from xmodule.modulestore.exceptions import ItemNotFoundError, DuplicateCourseError
from opaque_keys import InvalidKeyError
from opaque_keys.edx.locations import Location
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.course_groups.partition_scheme import get_cohorted_user_partition
from django_future.csrf import ensure_csrf_cookie
from contentstore.course_info_model import get_course_updates, update_course_updates, delete_course_update
from contentstore.utils import (
add_instructor,
initialize_permissions,
get_lms_link_for_item,
add_extra_panel_tab,
remove_extra_panel_tab,
reverse_course_url,
reverse_library_url,
reverse_usage_url,
reverse_url,
remove_all_instructors,
)
from models.settings.course_details import CourseDetails, CourseSettingsEncoder
from models.settings.course_grading import CourseGradingModel
from models.settings.course_metadata import CourseMetadata
from util.json_request import expect_json
from util.string_utils import _has_non_ascii_characters
from student.auth import has_studio_write_access, has_studio_read_access
from .component import (
OPEN_ENDED_COMPONENT_TYPES,
NOTE_COMPONENT_TYPES,
ADVANCED_COMPONENT_POLICY_KEY,
SPLIT_TEST_COMPONENT_TYPE,
ADVANCED_COMPONENT_TYPES,
)
from contentstore.tasks import rerun_course
from contentstore.views.entrance_exam import (
create_entrance_exam,
update_entrance_exam,
delete_entrance_exam
)
from .library import LIBRARIES_ENABLED
from .item import create_xblock_info
from course_creators.views import get_course_creator_status, add_user_with_status_unrequested
from contentstore import utils
from student.roles import (
CourseInstructorRole, CourseStaffRole, CourseCreatorRole, GlobalStaff, UserBasedRole
)
from student import auth
from course_action_state.models import CourseRerunState, CourseRerunUIStateManager
from course_action_state.managers import CourseActionStateItemNotFoundError
from microsite_configuration import microsite
from xmodule.course_module import CourseFields
from xmodule.split_test_module import get_split_user_partitions
from student.auth import has_course_author_access
from util.milestones_helpers import (
set_prerequisite_courses,
is_valid_course_key
)
MINIMUM_GROUP_ID = 100
RANDOM_SCHEME = "random"
COHORT_SCHEME = "cohort"
# Note: the following content group configuration strings are not
# translated since they are not visible to users.
CONTENT_GROUP_CONFIGURATION_DESCRIPTION = 'The groups in this configuration can be mapped to cohort groups in the LMS.'
CONTENT_GROUP_CONFIGURATION_NAME = 'Content Group Configuration'
__all__ = ['course_info_handler', 'course_handler', 'course_listing',
'course_info_update_handler', 'course_search_index_handler',
'course_rerun_handler',
'settings_handler',
'grading_handler',
'advanced_settings_handler',
'course_notifications_handler',
'textbooks_list_handler', 'textbooks_detail_handler',
'group_configurations_list_handler', 'group_configurations_detail_handler']
log = logging.getLogger(__name__)
class AccessListFallback(Exception):
"""
An exception that is raised whenever we need to `fall back` to fetching *all* courses
available to a user, rather than using a shorter method (i.e. fetching by group)
"""
pass
def get_course_and_check_access(course_key, user, depth=0):
"""
Internal method used to calculate and return the locator and course module
for the view functions in this file.
"""
if not has_studio_read_access(user, course_key):
raise PermissionDenied()
course_module = modulestore().get_course(course_key, depth=depth)
return course_module
def reindex_course_and_check_access(course_key, user):
"""
Internal method used to restart indexing on a course.
"""
if not has_course_author_access(user, course_key):
raise PermissionDenied()
return CoursewareSearchIndexer.do_course_reindex(modulestore(), course_key)
@login_required
def course_notifications_handler(request, course_key_string=None, action_state_id=None):
"""
Handle incoming requests for notifications in a RESTful way.
course_key_string and action_state_id must both be set; else a HttpBadResponseRequest is returned.
For each of these operations, the requesting user must have access to the course;
else a PermissionDenied error is returned.
GET
json: return json representing information about the notification (action, state, etc)
DELETE
json: return json repressing success or failure of dismissal/deletion of the notification
PUT
Raises a NotImplementedError.
POST
Raises a NotImplementedError.
"""
# ensure that we have a course and an action state
if not course_key_string or not action_state_id:
return HttpResponseBadRequest()
response_format = request.REQUEST.get('format', 'html')
course_key = CourseKey.from_string(course_key_string)
if response_format == 'json' or 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
if not has_studio_write_access(request.user, course_key):
raise PermissionDenied()
if request.method == 'GET':
return _course_notifications_json_get(action_state_id)
elif request.method == 'DELETE':
# we assume any delete requests dismiss actions from the UI
return _dismiss_notification(request, action_state_id)
elif request.method == 'PUT':
raise NotImplementedError()
elif request.method == 'POST':
raise NotImplementedError()
else:
return HttpResponseBadRequest()
else:
return HttpResponseNotFound()
def _course_notifications_json_get(course_action_state_id):
"""
Return the action and the action state for the given id
"""
try:
action_state = CourseRerunState.objects.find_first(id=course_action_state_id)
except CourseActionStateItemNotFoundError:
return HttpResponseBadRequest()
action_state_info = {
'action': action_state.action,
'state': action_state.state,
'should_display': action_state.should_display
}
return JsonResponse(action_state_info)
def _dismiss_notification(request, course_action_state_id): # pylint: disable=unused-argument
"""
Update the display of the course notification
"""
try:
action_state = CourseRerunState.objects.find_first(id=course_action_state_id)
except CourseActionStateItemNotFoundError:
# Can't dismiss a notification that doesn't exist in the first place
return HttpResponseBadRequest()
if action_state.state == CourseRerunUIStateManager.State.FAILED:
# We remove all permissions for this course key at this time, since
# no further access is required to a course that failed to be created.
remove_all_instructors(action_state.course_key)
# The CourseRerunState is no longer needed by the UI; delete
action_state.delete()
return JsonResponse({'success': True})
# pylint: disable=unused-argument
@login_required
def course_handler(request, course_key_string=None):
"""
The restful handler for course specific requests.
It provides the course tree with the necessary information for identifying and labeling the parts. The root
will typically be a 'course' object but may not be especially as we support modules.
GET
html: return course listing page if not given a course id
html: return html page overview for the given course if given a course id
json: return json representing the course branch's index entry as well as dag w/ all of the children
replaced w/ json docs where each doc has {'_id': , 'display_name': , 'children': }
POST
json: create a course, return resulting json
descriptor (same as in GET course/...). Leaving off /branch/draft would imply create the course w/ default
branches. Cannot change the structure contents ('_id', 'display_name', 'children') but can change the
index entry.
PUT
json: update this course (index entry not xblock) such as repointing head, changing display name, org,
course, run. Return same json as above.
DELETE
json: delete this branch from this course (leaving off /branch/draft would imply delete the course)
"""
try:
response_format = request.REQUEST.get('format', 'html')
if response_format == 'json' or 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
if request.method == 'GET':
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user, depth=None)
return JsonResponse(_course_outline_json(request, course_module))
elif request.method == 'POST': # not sure if this is only post. If one will have ids, it goes after access
return _create_or_rerun_course(request)
elif not has_studio_write_access(request.user, CourseKey.from_string(course_key_string)):
raise PermissionDenied()
elif request.method == 'PUT':
raise NotImplementedError()
elif request.method == 'DELETE':
raise NotImplementedError()
else:
return HttpResponseBadRequest()
elif request.method == 'GET': # assume html
if course_key_string is None:
return redirect(reverse("home"))
else:
return course_index(request, CourseKey.from_string(course_key_string))
else:
return HttpResponseNotFound()
except InvalidKeyError:
raise Http404
@login_required
@ensure_csrf_cookie
@require_http_methods(["GET"])
def course_rerun_handler(request, course_key_string):
"""
The restful handler for course reruns.
GET
html: return html page with form to rerun a course for the given course id
"""
# Only global staff (PMs) are able to rerun courses during the soft launch
if not GlobalStaff().has_user(request.user):
raise PermissionDenied()
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user, depth=3)
if request.method == 'GET':
return render_to_response('course-create-rerun.html', {
'source_course_key': course_key,
'display_name': course_module.display_name,
'user': request.user,
'course_creator_status': _get_course_creator_status(request.user),
'allow_unicode_course_id': settings.FEATURES.get('ALLOW_UNICODE_COURSE_ID', False)
})
@login_required
@ensure_csrf_cookie
@require_GET
def course_search_index_handler(request, course_key_string):
"""
The restful handler for course indexing.
GET
html: return status of indexing task
json: return status of indexing task
"""
# Only global staff (PMs) are able to index courses
if not GlobalStaff().has_user(request.user):
raise PermissionDenied()
course_key = CourseKey.from_string(course_key_string)
content_type = request.META.get('CONTENT_TYPE', None)
if content_type is None:
content_type = "application/json; charset=utf-8"
with modulestore().bulk_operations(course_key):
try:
reindex_course_and_check_access(course_key, request.user)
except SearchIndexingError as search_err:
return HttpResponse(json.dumps({
"user_message": search_err.error_list
}), content_type=content_type, status=500)
return HttpResponse(json.dumps({
"user_message": _("Course has been successfully reindexed.")
}), content_type=content_type, status=200)
def _course_outline_json(request, course_module):
"""
Returns a JSON representation of the course module and recursively all of its children.
"""
return create_xblock_info(
course_module,
include_child_info=True,
course_outline=True,
include_children_predicate=lambda xblock: not xblock.category == 'vertical'
)
def _accessible_courses_list(request):
"""
List all courses available to the logged in user by iterating through all the courses
"""
def course_filter(course):
"""
Filter out unusable and inaccessible courses
"""
if isinstance(course, ErrorDescriptor):
return False
# pylint: disable=fixme
# TODO remove this condition when templates purged from db
if course.location.course == 'templates':
return False
return has_studio_read_access(request.user, course.id)
courses = filter(course_filter, modulestore().get_courses())
in_process_course_actions = [
course for course in
CourseRerunState.objects.find_all(
exclude_args={'state': CourseRerunUIStateManager.State.SUCCEEDED}, should_display=True
)
if has_studio_read_access(request.user, course.course_key)
]
return courses, in_process_course_actions
def _accessible_courses_list_from_groups(request):
"""
List all courses available to the logged in user by reversing access group names
"""
courses_list = {}
in_process_course_actions = []
instructor_courses = UserBasedRole(request.user, CourseInstructorRole.ROLE).courses_with_role()
staff_courses = UserBasedRole(request.user, CourseStaffRole.ROLE).courses_with_role()
all_courses = instructor_courses | staff_courses
for course_access in all_courses:
course_key = course_access.course_id
if course_key is None:
# If the course_access does not have a course_id, it's an org-based role, so we fall back
raise AccessListFallback
if course_key not in courses_list:
# check for any course action state for this course
in_process_course_actions.extend(
CourseRerunState.objects.find_all(
exclude_args={'state': CourseRerunUIStateManager.State.SUCCEEDED},
should_display=True,
course_key=course_key,
)
)
# check for the course itself
try:
course = modulestore().get_course(course_key)
except ItemNotFoundError:
# If a user has access to a course that doesn't exist, don't do anything with that course
pass
if course is not None and not isinstance(course, ErrorDescriptor):
# ignore deleted or errored courses
courses_list[course_key] = course
return courses_list.values(), in_process_course_actions
def _accessible_libraries_list(user):
"""
List all libraries available to the logged in user by iterating through all libraries
"""
# No need to worry about ErrorDescriptors - split's get_libraries() never returns them.
return [lib for lib in modulestore().get_libraries() if has_studio_read_access(user, lib.location.library_key)]
@login_required
@ensure_csrf_cookie
def course_listing(request):
"""
List all courses available to the logged in user
"""
courses, in_process_course_actions = get_courses_accessible_to_user(request)
libraries = _accessible_libraries_list(request.user) if LIBRARIES_ENABLED else []
def format_in_process_course_view(uca):
"""
Return a dict of the data which the view requires for each unsucceeded course
"""
return {
'display_name': uca.display_name,
'course_key': unicode(uca.course_key),
'org': uca.course_key.org,
'number': uca.course_key.course,
'run': uca.course_key.run,
'is_failed': True if uca.state == CourseRerunUIStateManager.State.FAILED else False,
'is_in_progress': True if uca.state == CourseRerunUIStateManager.State.IN_PROGRESS else False,
'dismiss_link': reverse_course_url(
'course_notifications_handler',
uca.course_key,
kwargs={
'action_state_id': uca.id,
},
) if uca.state == CourseRerunUIStateManager.State.FAILED else ''
}
def format_library_for_view(library):
"""
Return a dict of the data which the view requires for each library
"""
return {
'display_name': library.display_name,
'library_key': unicode(library.location.library_key),
'url': reverse_library_url('library_handler', unicode(library.location.library_key)),
'org': library.display_org_with_default,
'number': library.display_number_with_default,
'can_edit': has_studio_write_access(request.user, library.location.library_key),
}
courses = _remove_in_process_courses(courses, in_process_course_actions)
in_process_course_actions = [format_in_process_course_view(uca) for uca in in_process_course_actions]
return render_to_response('index.html', {
'courses': courses,
'in_process_course_actions': in_process_course_actions,
'libraries_enabled': LIBRARIES_ENABLED,
'libraries': [format_library_for_view(lib) for lib in libraries],
'user': request.user,
'request_course_creator_url': reverse('contentstore.views.request_course_creator'),
'course_creator_status': _get_course_creator_status(request.user),
'rerun_creator_status': GlobalStaff().has_user(request.user),
'allow_unicode_course_id': settings.FEATURES.get('ALLOW_UNICODE_COURSE_ID', False),
'allow_course_reruns': settings.FEATURES.get('ALLOW_COURSE_RERUNS', True)
})
def _get_rerun_link_for_item(course_key):
""" Returns the rerun link for the given course key. """
return reverse_course_url('course_rerun_handler', course_key)
@login_required
@ensure_csrf_cookie
def course_index(request, course_key):
"""
Display an editable course overview.
org, course, name: Attributes of the Location for the item to edit
"""
# A depth of None implies the whole course. The course outline needs this in order to compute has_changes.
# A unit may not have a draft version, but one of its components could, and hence the unit itself has changes.
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user, depth=None)
lms_link = get_lms_link_for_item(course_module.location)
reindex_link = None
if settings.FEATURES.get('ENABLE_COURSEWARE_INDEX', False):
reindex_link = "/course/{course_id}/search_reindex".format(course_id=unicode(course_key))
sections = course_module.get_children()
course_structure = _course_outline_json(request, course_module)
locator_to_show = request.REQUEST.get('show', None)
course_release_date = get_default_time_display(course_module.start) if course_module.start != DEFAULT_START_DATE else _("Unscheduled")
settings_url = reverse_course_url('settings_handler', course_key)
try:
current_action = CourseRerunState.objects.find_first(course_key=course_key, should_display=True)
except (ItemNotFoundError, CourseActionStateItemNotFoundError):
current_action = None
return render_to_response('course_outline.html', {
'context_course': course_module,
'lms_link': lms_link,
'sections': sections,
'course_structure': course_structure,
'initial_state': course_outline_initial_state(locator_to_show, course_structure) if locator_to_show else None,
'course_graders': json.dumps(
CourseGradingModel.fetch(course_key).graders
),
'rerun_notification_id': current_action.id if current_action else None,
'course_release_date': course_release_date,
'settings_url': settings_url,
'reindex_link': reindex_link,
'notification_dismiss_url': reverse_course_url(
'course_notifications_handler',
current_action.course_key,
kwargs={
'action_state_id': current_action.id,
},
) if current_action else None,
})
def get_courses_accessible_to_user(request):
"""
Try to get all courses by first reversing django groups and fallback to old method if it fails
Note: overhead of pymongo reads will increase if getting courses from django groups fails
"""
if GlobalStaff().has_user(request.user):
# user has global access so no need to get courses from django groups
courses, in_process_course_actions = _accessible_courses_list(request)
else:
try:
courses, in_process_course_actions = _accessible_courses_list_from_groups(request)
except AccessListFallback:
# user have some old groups or there was some error getting courses from django groups
# so fallback to iterating through all courses
courses, in_process_course_actions = _accessible_courses_list(request)
return courses, in_process_course_actions
def _remove_in_process_courses(courses, in_process_course_actions):
"""
removes any in-process courses in courses list. in-process actually refers to courses
that are in the process of being generated for re-run
"""
def format_course_for_view(course):
"""
Return a dict of the data which the view requires for each course
"""
return {
'display_name': course.display_name,
'course_key': unicode(course.location.course_key),
'url': reverse_course_url('course_handler', course.id),
'lms_link': get_lms_link_for_item(course.location),
'rerun_link': _get_rerun_link_for_item(course.id),
'org': course.display_org_with_default,
'number': course.display_number_with_default,
'run': course.location.run
}
in_process_action_course_keys = [uca.course_key for uca in in_process_course_actions]
courses = [
format_course_for_view(c)
for c in courses
if not isinstance(c, ErrorDescriptor) and (c.id not in in_process_action_course_keys)
]
return courses
def course_outline_initial_state(locator_to_show, course_structure):
"""
Returns the desired initial state for the course outline view. If the 'show' request parameter
was provided, then the view's initial state will be to have the desired item fully expanded
and to scroll to see the new item.
"""
def find_xblock_info(xblock_info, locator):
"""
Finds the xblock info for the specified locator.
"""
if xblock_info['id'] == locator:
return xblock_info
children = xblock_info['child_info']['children'] if xblock_info.get('child_info', None) else None
if children:
for child_xblock_info in children:
result = find_xblock_info(child_xblock_info, locator)
if result:
return result
return None
def collect_all_locators(locators, xblock_info):
"""
Collect all the locators for an xblock and its children.
"""
locators.append(xblock_info['id'])
children = xblock_info['child_info']['children'] if xblock_info.get('child_info', None) else None
if children:
for child_xblock_info in children:
collect_all_locators(locators, child_xblock_info)
selected_xblock_info = find_xblock_info(course_structure, locator_to_show)
if not selected_xblock_info:
return None
expanded_locators = []
collect_all_locators(expanded_locators, selected_xblock_info)
return {
'locator_to_show': locator_to_show,
'expanded_locators': expanded_locators
}
@expect_json
def _create_or_rerun_course(request):
"""
To be called by requests that create a new destination course (i.e., create_new_course and rerun_course)
Returns the destination course_key and overriding fields for the new course.
Raises DuplicateCourseError and InvalidKeyError
"""
if not auth.has_access(request.user, CourseCreatorRole()):
raise PermissionDenied()
try:
org = request.json.get('org')
course = request.json.get('number', request.json.get('course'))
display_name = request.json.get('display_name')
# force the start date for reruns and allow us to override start via the client
start = request.json.get('start', CourseFields.start.default)
run = request.json.get('run')
# allow/disable unicode characters in course_id according to settings
if not settings.FEATURES.get('ALLOW_UNICODE_COURSE_ID'):
if _has_non_ascii_characters(org) or _has_non_ascii_characters(course) or _has_non_ascii_characters(run):
return JsonResponse(
{'error': _('Special characters not allowed in organization, course number, and course run.')},
status=400
)
fields = {'start': start}
if display_name is not None:
fields['display_name'] = display_name
if 'source_course_key' in request.json:
return _rerun_course(request, org, course, run, fields)
else:
return _create_new_course(request, org, course, run, fields)
except DuplicateCourseError:
return JsonResponse({
'ErrMsg': _(
'There is already a course defined with the same '
'organization and course number. Please '
'change either organization or course number to be unique.'
),
'OrgErrMsg': _(
'Please change either the organization or '
'course number so that it is unique.'),
'CourseErrMsg': _(
'Please change either the organization or '
'course number so that it is unique.'),
})
except InvalidKeyError as error:
return JsonResponse({
"ErrMsg": _("Unable to create course '{name}'.\n\n{err}").format(name=display_name, err=error.message)}
)
def _create_new_course(request, org, number, run, fields):
"""
Create a new course.
Returns the URL for the course overview page.
Raises DuplicateCourseError if the course already exists
"""
store_for_new_course = modulestore().default_modulestore.get_modulestore_type()
new_course = create_new_course_in_store(store_for_new_course, request.user, org, number, run, fields)
return JsonResponse({
'url': reverse_course_url('course_handler', new_course.id),
'course_key': unicode(new_course.id),
})
def create_new_course_in_store(store, user, org, number, run, fields):
"""
Create course in store w/ handling instructor enrollment, permissions, and defaulting the wiki slug.
Separated out b/c command line course creation uses this as well as the web interface.
"""
# Set a unique wiki_slug for newly created courses. To maintain active wiki_slugs for
# existing xml courses this cannot be changed in CourseDescriptor.
# # TODO get rid of defining wiki slug in this org/course/run specific way and reconcile
# w/ xmodule.course_module.CourseDescriptor.__init__
wiki_slug = u"{0}.{1}.{2}".format(org, number, run)
definition_data = {'wiki_slug': wiki_slug}
fields.update(definition_data)
with modulestore().default_store(store):
# Creating the course raises DuplicateCourseError if an existing course with this org/name is found
new_course = modulestore().create_course(
org,
number,
run,
user.id,
fields=fields,
)
# Make sure user has instructor and staff access to the new course
add_instructor(new_course.id, user, user)
# Initialize permissions for user in the new course
initialize_permissions(new_course.id, user)
return new_course
def _rerun_course(request, org, number, run, fields):
"""
Reruns an existing course.
Returns the URL for the course listing page.
"""
source_course_key = CourseKey.from_string(request.json.get('source_course_key'))
# verify user has access to the original course
if not has_studio_write_access(request.user, source_course_key):
raise PermissionDenied()
# create destination course key
store = modulestore()
with store.default_store('split'):
destination_course_key = store.make_course_key(org, number, run)
# verify org course and run don't already exist
if store.has_course(destination_course_key, ignore_case=True):
raise DuplicateCourseError(source_course_key, destination_course_key)
# Make sure user has instructor and staff access to the destination course
# so the user can see the updated status for that course
add_instructor(destination_course_key, request.user, request.user)
# Mark the action as initiated
CourseRerunState.objects.initiated(source_course_key, destination_course_key, request.user, fields['display_name'])
# Rerun the course as a new celery task
json_fields = json.dumps(fields, cls=EdxJSONEncoder)
rerun_course.delay(unicode(source_course_key), unicode(destination_course_key), request.user.id, json_fields)
# Return course listing page
return JsonResponse({
'url': reverse_url('course_handler'),
'destination_course_key': unicode(destination_course_key)
})
# pylint: disable=unused-argument
@login_required
@ensure_csrf_cookie
@require_http_methods(["GET"])
def course_info_handler(request, course_key_string):
"""
GET
html: return html for editing the course info handouts and updates.
"""
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', 'text/html'):
return render_to_response(
'course_info.html',
{
'context_course': course_module,
'updates_url': reverse_course_url('course_info_update_handler', course_key),
'handouts_locator': course_key.make_usage_key('course_info', 'handouts'),
'base_asset_url': StaticContent.get_base_url_path_for_course_assets(course_module.id)
}
)
else:
return HttpResponseBadRequest("Only supports html requests")
# pylint: disable=unused-argument
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT", "DELETE"))
@expect_json
def course_info_update_handler(request, course_key_string, provided_id=None):
"""
restful CRUD operations on course_info updates.
provided_id should be none if it's new (create) and index otherwise.
GET
json: return the course info update models
POST
json: create an update
PUT or DELETE
json: change an existing update
"""
if 'application/json' not in request.META.get('HTTP_ACCEPT', 'application/json'):
return HttpResponseBadRequest("Only supports json requests")
course_key = CourseKey.from_string(course_key_string)
usage_key = course_key.make_usage_key('course_info', 'updates')
if provided_id == '':
provided_id = None
# check that logged in user has permissions to this item (GET shouldn't require this level?)
if not has_studio_write_access(request.user, usage_key.course_key):
raise PermissionDenied()
if request.method == 'GET':
course_updates = get_course_updates(usage_key, provided_id, request.user.id)
if isinstance(course_updates, dict) and course_updates.get('error'):
return JsonResponse(course_updates, course_updates.get('status', 400))
else:
return JsonResponse(course_updates)
elif request.method == 'DELETE':
try:
return JsonResponse(delete_course_update(usage_key, request.json, provided_id, request.user))
except:
return HttpResponseBadRequest(
"Failed to delete",
content_type="text/plain"
)
# can be either and sometimes django is rewriting one to the other:
elif request.method in ('POST', 'PUT'):
try:
return JsonResponse(update_course_updates(usage_key, request.json, provided_id, request.user))
except:
return HttpResponseBadRequest(
"Failed to save",
content_type="text/plain"
)
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "PUT", "POST"))
@expect_json
def settings_handler(request, course_key_string):
"""
Course settings for dates and about pages
GET
html: get the page
json: get the CourseDetails model
PUT
json: update the Course and About xblocks through the CourseDetails model
"""
course_key = CourseKey.from_string(course_key_string)
prerequisite_course_enabled = settings.FEATURES.get('ENABLE_PREREQUISITE_COURSES', False)
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', '') and request.method == 'GET':
upload_asset_url = reverse_course_url('assets_handler', course_key)
# see if the ORG of this course can be attributed to a 'Microsite'. In that case, the
# course about page should be editable in Studio
about_page_editable = not microsite.get_value_for_org(
course_module.location.org,
'ENABLE_MKTG_SITE',
settings.FEATURES.get('ENABLE_MKTG_SITE', False)
)
short_description_editable = settings.FEATURES.get('EDITABLE_SHORT_DESCRIPTION', True)
settings_context = {
'context_course': course_module,
'course_locator': course_key,
'lms_link_for_about_page': utils.get_lms_link_for_about_page(course_key),
'course_image_url': utils.course_image_url(course_module),
'details_url': reverse_course_url('settings_handler', course_key),
'about_page_editable': about_page_editable,
'short_description_editable': short_description_editable,
'upload_asset_url': upload_asset_url,
'course_handler_url': reverse_course_url('course_handler', course_key),
}
if prerequisite_course_enabled:
courses, in_process_course_actions = get_courses_accessible_to_user(request)
# exclude current course from the list of available courses
courses = [course for course in courses if course.id != course_key]
if courses:
courses = _remove_in_process_courses(courses, in_process_course_actions)
settings_context.update({'possible_pre_requisite_courses': courses})
return render_to_response('settings.html', settings_context)
elif 'application/json' in request.META.get('HTTP_ACCEPT', ''):
if request.method == 'GET':
course_details = CourseDetails.fetch(course_key)
return JsonResponse(
course_details,
# encoder serializes dates, old locations, and instances
encoder=CourseSettingsEncoder
)
# For every other possible method type submitted by the caller...
else:
# if pre-requisite course feature is enabled set pre-requisite course
if prerequisite_course_enabled:
prerequisite_course_keys = request.json.get('pre_requisite_courses', [])
if prerequisite_course_keys:
if not all(is_valid_course_key(course_key) for course_key in prerequisite_course_keys):
return JsonResponseBadRequest({"error": _("Invalid prerequisite course key")})
set_prerequisite_courses(course_key, prerequisite_course_keys)
# If the entrance exams feature has been enabled, we'll need to check for some
# feature-specific settings and handle them accordingly
# We have to be careful that we're only executing the following logic if we actually
# need to create or delete an entrance exam from the specified course
if settings.FEATURES.get('ENTRANCE_EXAMS', False):
course_entrance_exam_present = course_module.entrance_exam_enabled
entrance_exam_enabled = request.json.get('entrance_exam_enabled', '') == 'true'
ee_min_score_pct = request.json.get('entrance_exam_minimum_score_pct', None)
# If the entrance exam box on the settings screen has been checked...
if entrance_exam_enabled:
# Load the default minimum score threshold from settings, then try to override it
entrance_exam_minimum_score_pct = float(settings.ENTRANCE_EXAM_MIN_SCORE_PCT)
if ee_min_score_pct:
entrance_exam_minimum_score_pct = float(ee_min_score_pct)
if entrance_exam_minimum_score_pct.is_integer():
entrance_exam_minimum_score_pct = entrance_exam_minimum_score_pct / 100
entrance_exam_minimum_score_pct = unicode(entrance_exam_minimum_score_pct)
# If there's already an entrance exam defined, we'll update the existing one
if course_entrance_exam_present:
exam_data = {
'entrance_exam_minimum_score_pct': entrance_exam_minimum_score_pct
}
update_entrance_exam(request, course_key, exam_data)
# If there's no entrance exam defined, we'll create a new one
else:
create_entrance_exam(request, course_key, entrance_exam_minimum_score_pct)
# If the entrance exam box on the settings screen has been unchecked,
# and the course has an entrance exam attached...
elif not entrance_exam_enabled and course_entrance_exam_present:
delete_entrance_exam(request, course_key)
# Perform the normal update workflow for the CourseDetails model
return JsonResponse(
CourseDetails.update_from_json(course_key, request.json, request.user),
encoder=CourseSettingsEncoder
)
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT", "DELETE"))
@expect_json
def grading_handler(request, course_key_string, grader_index=None):
"""
Course Grading policy configuration
GET
html: get the page
json no grader_index: get the CourseGrading model (graceperiod, cutoffs, and graders)
json w/ grader_index: get the specific grader
PUT
json no grader_index: update the Course through the CourseGrading model
json w/ grader_index: create or update the specific grader (create if index out of range)
"""
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', '') and request.method == 'GET':
course_details = CourseGradingModel.fetch(course_key)
return render_to_response('settings_graders.html', {
'context_course': course_module,
'course_locator': course_key,
'course_details': json.dumps(course_details, cls=CourseSettingsEncoder),
'grading_url': reverse_course_url('grading_handler', course_key),
})
elif 'application/json' in request.META.get('HTTP_ACCEPT', ''):
if request.method == 'GET':
if grader_index is None:
return JsonResponse(
CourseGradingModel.fetch(course_key),
# encoder serializes dates, old locations, and instances
encoder=CourseSettingsEncoder
)
else:
return JsonResponse(CourseGradingModel.fetch_grader(course_key, grader_index))
elif request.method in ('POST', 'PUT'): # post or put, doesn't matter.
# None implies update the whole model (cutoffs, graceperiod, and graders) not a specific grader
if grader_index is None:
return JsonResponse(
CourseGradingModel.update_from_json(course_key, request.json, request.user),
encoder=CourseSettingsEncoder
)
else:
return JsonResponse(
CourseGradingModel.update_grader_from_json(course_key, request.json, request.user)
)
elif request.method == "DELETE" and grader_index is not None:
CourseGradingModel.delete_grader(course_key, grader_index, request.user)
return JsonResponse()
# pylint: disable=invalid-name
def _add_tab(request, tab_type, course_module):
"""
Adds tab to the course.
"""
# Add tab to the course if needed
changed, new_tabs = add_extra_panel_tab(tab_type, course_module)
# If a tab has been added to the course, then send the
# metadata along to CourseMetadata.update_from_json
if changed:
course_module.tabs = new_tabs
request.json.update({'tabs': {'value': new_tabs}})
# Indicate that tabs should not be filtered out of
# the metadata
return True
return False
# pylint: disable=invalid-name
def _remove_tab(request, tab_type, course_module):
"""
Removes the tab from the course.
"""
changed, new_tabs = remove_extra_panel_tab(tab_type, course_module)
if changed:
course_module.tabs = new_tabs
request.json.update({'tabs': {'value': new_tabs}})
return True
return False
def is_advanced_component_present(request, advanced_components):
"""
Return True when one of `advanced_components` is present in the request.
raises TypeError
when request.ADVANCED_COMPONENT_POLICY_KEY is malformed (not iterable)
"""
if ADVANCED_COMPONENT_POLICY_KEY not in request.json:
return False
new_advanced_component_list = request.json[ADVANCED_COMPONENT_POLICY_KEY]['value']
for ac_type in advanced_components:
if ac_type in new_advanced_component_list and ac_type in ADVANCED_COMPONENT_TYPES:
return True
def is_field_value_true(request, field_list):
"""
Return True when one of field values is set to True by request
"""
return any([request.json.get(field, {}).get('value') for field in field_list])
# pylint: disable=invalid-name
def _modify_tabs_to_components(request, course_module):
"""
Automatically adds/removes tabs if user indicated that they want
respective modules enabled in the course
Return True when tab configuration has been modified.
"""
tab_component_map = {
# 'tab_type': (check_function, list_of_checked_components_or_values),
# open ended tab by combinedopendended or peergrading module
'open_ended': (is_advanced_component_present, OPEN_ENDED_COMPONENT_TYPES),
# notes tab
'notes': (is_advanced_component_present, NOTE_COMPONENT_TYPES),
# student notes tab
'edxnotes': (is_field_value_true, ['edxnotes'])
}
tabs_changed = False
for tab_type in tab_component_map.keys():
check, component_types = tab_component_map[tab_type]
try:
tab_enabled = check(request, component_types)
except TypeError:
# user has failed to put iterable value into advanced component list.
# return immediately and let validation handle.
return
if tab_enabled:
# check passed, some of this component_types are present, adding tab
if _add_tab(request, tab_type, course_module):
# tab indeed was added, the change needs to propagate
tabs_changed = True
else:
# the tab should not be present (anymore)
if _remove_tab(request, tab_type, course_module):
# tab indeed was removed, the change needs to propagate
tabs_changed = True
return tabs_changed
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT"))
@expect_json
def advanced_settings_handler(request, course_key_string):
"""
Course settings configuration
GET
html: get the page
json: get the model
PUT, POST
json: update the Course's settings. The payload is a json rep of the
metadata dicts.
"""
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', '') and request.method == 'GET':
return render_to_response('settings_advanced.html', {
'context_course': course_module,
'advanced_dict': json.dumps(CourseMetadata.fetch(course_module)),
'advanced_settings_url': reverse_course_url('advanced_settings_handler', course_key)
})
elif 'application/json' in request.META.get('HTTP_ACCEPT', ''):
if request.method == 'GET':
return JsonResponse(CourseMetadata.fetch(course_module))
else:
try:
# do not process tabs unless they were modified according to course metadata
filter_tabs = not _modify_tabs_to_components(request, course_module)
# validate data formats and update
is_valid, errors, updated_data = CourseMetadata.validate_and_update_from_json(
course_module,
request.json,
filter_tabs=filter_tabs,
user=request.user,
)
if is_valid:
return JsonResponse(updated_data)
else:
return JsonResponseBadRequest(errors)
# Handle all errors that validation doesn't catch
except (TypeError, ValueError) as err:
return HttpResponseBadRequest(
django.utils.html.escape(err.message),
content_type="text/plain"
)
class TextbookValidationError(Exception):
"An error thrown when a textbook input is invalid"
pass
def validate_textbooks_json(text):
"""
Validate the given text as representing a single PDF textbook
"""
try:
textbooks = json.loads(text)
except ValueError:
raise TextbookValidationError("invalid JSON")
if not isinstance(textbooks, (list, tuple)):
raise TextbookValidationError("must be JSON list")
for textbook in textbooks:
validate_textbook_json(textbook)
# check specified IDs for uniqueness
all_ids = [textbook["id"] for textbook in textbooks if "id" in textbook]
unique_ids = set(all_ids)
if len(all_ids) > len(unique_ids):
raise TextbookValidationError("IDs must be unique")
return textbooks
def validate_textbook_json(textbook):
"""
Validate the given text as representing a list of PDF textbooks
"""
if isinstance(textbook, basestring):
try:
textbook = json.loads(textbook)
except ValueError:
raise TextbookValidationError("invalid JSON")
if not isinstance(textbook, dict):
raise TextbookValidationError("must be JSON object")
if not textbook.get("tab_title"):
raise TextbookValidationError("must have tab_title")
tid = unicode(textbook.get("id", ""))
if tid and not tid[0].isdigit():
raise TextbookValidationError("textbook ID must start with a digit")
return textbook
def assign_textbook_id(textbook, used_ids=()):
"""
Return an ID that can be assigned to a textbook
and doesn't match the used_ids
"""
tid = Location.clean(textbook["tab_title"])
if not tid[0].isdigit():
# stick a random digit in front
tid = random.choice(string.digits) + tid
while tid in used_ids:
# add a random ASCII character to the end
tid = tid + random.choice(string.ascii_lowercase)
return tid
@require_http_methods(("GET", "POST", "PUT"))
@login_required
@ensure_csrf_cookie
def textbooks_list_handler(request, course_key_string):
"""
A RESTful handler for textbook collections.
GET
html: return textbook list page (Backbone application)
json: return JSON representation of all textbooks in this course
POST
json: create a new textbook for this course
PUT
json: overwrite all textbooks in the course with the given list
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course = get_course_and_check_access(course_key, request.user)
if "application/json" not in request.META.get('HTTP_ACCEPT', 'text/html'):
# return HTML page
upload_asset_url = reverse_course_url('assets_handler', course_key)
textbook_url = reverse_course_url('textbooks_list_handler', course_key)
return render_to_response('textbooks.html', {
'context_course': course,
'textbooks': course.pdf_textbooks,
'upload_asset_url': upload_asset_url,
'textbook_url': textbook_url,
})
# from here on down, we know the client has requested JSON
if request.method == 'GET':
return JsonResponse(course.pdf_textbooks)
elif request.method == 'PUT':
try:
textbooks = validate_textbooks_json(request.body)
except TextbookValidationError as err:
return JsonResponse({"error": err.message}, status=400)
tids = set(t["id"] for t in textbooks if "id" in t)
for textbook in textbooks:
if "id" not in textbook:
tid = assign_textbook_id(textbook, tids)
textbook["id"] = tid
tids.add(tid)
if not any(tab['type'] == PDFTextbookTabs.type for tab in course.tabs):
course.tabs.append(PDFTextbookTabs())
course.pdf_textbooks = textbooks
store.update_item(course, request.user.id)
return JsonResponse(course.pdf_textbooks)
elif request.method == 'POST':
# create a new textbook for the course
try:
textbook = validate_textbook_json(request.body)
except TextbookValidationError as err:
return JsonResponse({"error": err.message}, status=400)
if not textbook.get("id"):
tids = set(t["id"] for t in course.pdf_textbooks if "id" in t)
textbook["id"] = assign_textbook_id(textbook, tids)
existing = course.pdf_textbooks
existing.append(textbook)
course.pdf_textbooks = existing
if not any(tab['type'] == PDFTextbookTabs.type for tab in course.tabs):
course.tabs.append(PDFTextbookTabs())
store.update_item(course, request.user.id)
resp = JsonResponse(textbook, status=201)
resp["Location"] = reverse_course_url(
'textbooks_detail_handler',
course.id,
kwargs={'textbook_id': textbook["id"]}
)
return resp
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT", "DELETE"))
def textbooks_detail_handler(request, course_key_string, textbook_id):
"""
JSON API endpoint for manipulating a textbook via its internal ID.
Used by the Backbone application.
GET
json: return JSON representation of textbook
POST or PUT
json: update textbook based on provided information
DELETE
json: remove textbook
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user)
matching_id = [tb for tb in course_module.pdf_textbooks
if unicode(tb.get("id")) == unicode(textbook_id)]
if matching_id:
textbook = matching_id[0]
else:
textbook = None
if request.method == 'GET':
if not textbook:
return JsonResponse(status=404)
return JsonResponse(textbook)
elif request.method in ('POST', 'PUT'): # can be either and sometimes
# django is rewriting one to the other
try:
new_textbook = validate_textbook_json(request.body)
except TextbookValidationError as err:
return JsonResponse({"error": err.message}, status=400)
new_textbook["id"] = textbook_id
if textbook:
i = course_module.pdf_textbooks.index(textbook)
new_textbooks = course_module.pdf_textbooks[0:i]
new_textbooks.append(new_textbook)
new_textbooks.extend(course_module.pdf_textbooks[i + 1:])
course_module.pdf_textbooks = new_textbooks
else:
course_module.pdf_textbooks.append(new_textbook)
store.update_item(course_module, request.user.id)
return JsonResponse(new_textbook, status=201)
elif request.method == 'DELETE':
if not textbook:
return JsonResponse(status=404)
i = course_module.pdf_textbooks.index(textbook)
remaining_textbooks = course_module.pdf_textbooks[0:i]
remaining_textbooks.extend(course_module.pdf_textbooks[i + 1:])
course_module.pdf_textbooks = remaining_textbooks
store.update_item(course_module, request.user.id)
return JsonResponse()
class GroupConfigurationsValidationError(Exception):
"""
An error thrown when a group configurations input is invalid.
"""
pass
class GroupConfiguration(object):
"""
Prepare Group Configuration for the course.
"""
def __init__(self, json_string, course, configuration_id=None):
"""
Receive group configuration as a json (`json_string`), deserialize it
and validate.
"""
self.configuration = GroupConfiguration.parse(json_string)
self.course = course
self.assign_id(configuration_id)
self.assign_group_ids()
self.validate()
@staticmethod
def parse(json_string):
"""
Deserialize given json that represents group configuration.
"""
try:
configuration = json.loads(json_string)
except ValueError:
raise GroupConfigurationsValidationError(_("invalid JSON"))
configuration["version"] = UserPartition.VERSION
return configuration
def validate(self):
"""
Validate group configuration representation.
"""
if not self.configuration.get("name"):
raise GroupConfigurationsValidationError(_("must have name of the configuration"))
if len(self.configuration.get('groups', [])) < 1:
raise GroupConfigurationsValidationError(_("must have at least one group"))
def assign_id(self, configuration_id=None):
"""
Assign id for the json representation of group configuration.
"""
if configuration_id:
self.configuration['id'] = int(configuration_id)
else:
self.configuration['id'] = generate_int_id(
MINIMUM_GROUP_ID, MYSQL_MAX_INT, GroupConfiguration.get_used_ids(self.course)
)
def assign_group_ids(self):
"""
Assign ids for the group_configuration's groups.
"""
used_ids = [g.id for p in self.course.user_partitions for g in p.groups]
# Assign ids to every group in configuration.
for group in self.configuration.get('groups', []):
if group.get('id') is None:
group["id"] = generate_int_id(MINIMUM_GROUP_ID, MYSQL_MAX_INT, used_ids)
used_ids.append(group["id"])
@staticmethod
def get_used_ids(course):
"""
Return a list of IDs that already in use.
"""
return set([p.id for p in course.user_partitions])
def get_user_partition(self):
"""
Get user partition for saving in course.
"""
return UserPartition.from_json(self.configuration)
@staticmethod
def _get_usage_info(course, unit, item, usage_info, group_id, scheme_name=None):
"""
Get usage info for unit/module.
"""
unit_url = reverse_usage_url(
'container_handler',
course.location.course_key.make_usage_key(unit.location.block_type, unit.location.name)
)
usage_dict = {'label': u"{} / {}".format(unit.display_name, item.display_name), 'url': unit_url}
if scheme_name == RANDOM_SCHEME:
validation_summary = item.general_validation_message()
usage_dict.update({'validation': validation_summary.to_json() if validation_summary else None})
usage_info[group_id].append(usage_dict)
return usage_info
@staticmethod
def get_content_experiment_usage_info(store, course):
"""
Get usage information for all Group Configurations currently referenced by a split_test instance.
"""
split_tests = store.get_items(course.id, qualifiers={'category': 'split_test'})
return GroupConfiguration._get_content_experiment_usage_info(store, course, split_tests)
@staticmethod
def get_split_test_partitions_with_usage(store, course):
"""
Returns json split_test group configurations updated with usage information.
"""
usage_info = GroupConfiguration.get_content_experiment_usage_info(store, course)
configurations = []
for partition in get_split_user_partitions(course.user_partitions):
configuration = partition.to_json()
configuration['usage'] = usage_info.get(partition.id, [])
configurations.append(configuration)
return configurations
@staticmethod
def _get_content_experiment_usage_info(store, course, split_tests):
"""
Returns all units names, their urls and validation messages.
Returns:
{'user_partition_id':
[
{
'label': 'Unit 1 / Experiment 1',
'url': 'url_to_unit_1',
'validation': {'message': 'a validation message', 'type': 'warning'}
},
{
'label': 'Unit 2 / Experiment 2',
'url': 'url_to_unit_2',
'validation': {'message': 'another validation message', 'type': 'error'}
}
],
}
"""
usage_info = {}
for split_test in split_tests:
if split_test.user_partition_id not in usage_info:
usage_info[split_test.user_partition_id] = []
unit = split_test.get_parent()
if not unit:
log.warning("Unable to find parent for split_test %s", split_test.location)
continue
usage_info = GroupConfiguration._get_usage_info(
course=course,
unit=unit,
item=split_test,
usage_info=usage_info,
group_id=split_test.user_partition_id,
scheme_name=RANDOM_SCHEME
)
return usage_info
@staticmethod
def get_content_groups_usage_info(store, course):
"""
Get usage information for content groups.
"""
items = store.get_items(course.id, settings={'group_access': {'$exists': True}})
return GroupConfiguration._get_content_groups_usage_info(course, items)
@staticmethod
def _get_content_groups_usage_info(course, items):
"""
Returns all units names and their urls.
Returns:
{'group_id':
[
{
'label': 'Unit 1 / Problem 1',
'url': 'url_to_unit_1'
},
{
'label': 'Unit 2 / Problem 2',
'url': 'url_to_unit_2'
}
],
}
"""
usage_info = {}
for item in items:
if hasattr(item, 'group_access') and item.group_access:
(__, group_ids), = item.group_access.items()
for group_id in group_ids:
if group_id not in usage_info:
usage_info[group_id] = []
unit = item.get_parent()
if not unit:
log.warning("Unable to find parent for component %s", item.location)
continue
usage_info = GroupConfiguration._get_usage_info(
course,
unit=unit,
item=item,
usage_info=usage_info,
group_id=group_id
)
return usage_info
@staticmethod
def update_usage_info(store, course, configuration):
"""
Update usage information for particular Group Configuration.
Returns json of particular group configuration updated with usage information.
"""
configuration_json = None
# Get all Experiments that use particular Group Configuration in course.
if configuration.scheme.name == RANDOM_SCHEME:
split_tests = store.get_items(
course.id,
category='split_test',
content={'user_partition_id': configuration.id}
)
configuration_json = configuration.to_json()
usage_information = GroupConfiguration._get_content_experiment_usage_info(store, course, split_tests)
configuration_json['usage'] = usage_information.get(configuration.id, [])
elif configuration.scheme.name == COHORT_SCHEME:
# In case if scheme is "cohort"
configuration_json = GroupConfiguration.update_content_group_usage_info(store, course, configuration)
return configuration_json
@staticmethod
def update_content_group_usage_info(store, course, configuration):
"""
Update usage information for particular Content Group Configuration.
Returns json of particular content group configuration updated with usage information.
"""
usage_info = GroupConfiguration.get_content_groups_usage_info(store, course)
content_group_configuration = configuration.to_json()
for group in content_group_configuration['groups']:
group['usage'] = usage_info.get(group['id'], [])
return content_group_configuration
@staticmethod
def get_or_create_content_group(store, course):
"""
Returns the first user partition from the course which uses the
CohortPartitionScheme, or generates one if no such partition is
found. The created partition is not saved to the course until
the client explicitly creates a group within the partition and
POSTs back.
"""
content_group_configuration = get_cohorted_user_partition(course.id)
if content_group_configuration is None:
content_group_configuration = UserPartition(
id=generate_int_id(MINIMUM_GROUP_ID, MYSQL_MAX_INT, GroupConfiguration.get_used_ids(course)),
name=CONTENT_GROUP_CONFIGURATION_NAME,
description=CONTENT_GROUP_CONFIGURATION_DESCRIPTION,
groups=[],
scheme_id=COHORT_SCHEME
)
return content_group_configuration.to_json()
content_group_configuration = GroupConfiguration.update_content_group_usage_info(
store,
course,
content_group_configuration
)
return content_group_configuration
def remove_content_or_experiment_group(request, store, course, configuration, group_configuration_id, group_id=None):
"""
Remove content group or experiment group configuration only if it's not in use.
"""
configuration_index = course.user_partitions.index(configuration)
if configuration.scheme.name == RANDOM_SCHEME:
usages = GroupConfiguration.get_content_experiment_usage_info(store, course)
used = int(group_configuration_id) in usages
if used:
return JsonResponse(
{"error": _("This group configuration is in use and cannot be deleted.")},
status=400
)
course.user_partitions.pop(configuration_index)
elif configuration.scheme.name == COHORT_SCHEME:
if not group_id:
return JsonResponse(status=404)
group_id = int(group_id)
usages = GroupConfiguration.get_content_groups_usage_info(store, course)
used = group_id in usages
if used:
return JsonResponse(
{"error": _("This content group is in use and cannot be deleted.")},
status=400
)
matching_groups = [group for group in configuration.groups if group.id == group_id]
if matching_groups:
group_index = configuration.groups.index(matching_groups[0])
configuration.groups.pop(group_index)
else:
return JsonResponse(status=404)
course.user_partitions[configuration_index] = configuration
store.update_item(course, request.user.id)
return JsonResponse(status=204)
@require_http_methods(("GET", "POST"))
@login_required
@ensure_csrf_cookie
def group_configurations_list_handler(request, course_key_string):
"""
A RESTful handler for Group Configurations
GET
html: return Group Configurations list page (Backbone application)
POST
json: create new group configuration
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course = get_course_and_check_access(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', 'text/html'):
group_configuration_url = reverse_course_url('group_configurations_list_handler', course_key)
course_outline_url = reverse_course_url('course_handler', course_key)
should_show_experiment_groups = are_content_experiments_enabled(course)
if should_show_experiment_groups:
experiment_group_configurations = GroupConfiguration.get_split_test_partitions_with_usage(store, course)
else:
experiment_group_configurations = None
content_group_configuration = GroupConfiguration.get_or_create_content_group(store, course)
return render_to_response('group_configurations.html', {
'context_course': course,
'group_configuration_url': group_configuration_url,
'course_outline_url': course_outline_url,
'experiment_group_configurations': experiment_group_configurations,
'should_show_experiment_groups': should_show_experiment_groups,
'content_group_configuration': content_group_configuration
})
elif "application/json" in request.META.get('HTTP_ACCEPT'):
if request.method == 'POST':
# create a new group configuration for the course
try:
new_configuration = GroupConfiguration(request.body, course).get_user_partition()
except GroupConfigurationsValidationError as err:
return JsonResponse({"error": err.message}, status=400)
course.user_partitions.append(new_configuration)
response = JsonResponse(new_configuration.to_json(), status=201)
response["Location"] = reverse_course_url(
'group_configurations_detail_handler',
course.id,
kwargs={'group_configuration_id': new_configuration.id} # pylint: disable=no-member
)
store.update_item(course, request.user.id)
return response
else:
return HttpResponse(status=406)
@login_required
@ensure_csrf_cookie
@require_http_methods(("POST", "PUT", "DELETE"))
def group_configurations_detail_handler(request, course_key_string, group_configuration_id, group_id=None):
"""
JSON API endpoint for manipulating a group configuration via its internal ID.
Used by the Backbone application.
POST or PUT
json: update group configuration based on provided information
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course = get_course_and_check_access(course_key, request.user)
matching_id = [p for p in course.user_partitions
if unicode(p.id) == unicode(group_configuration_id)]
if matching_id:
configuration = matching_id[0]
else:
configuration = None
if request.method in ('POST', 'PUT'): # can be either and sometimes
# django is rewriting one to the other
try:
new_configuration = GroupConfiguration(request.body, course, group_configuration_id).get_user_partition()
except GroupConfigurationsValidationError as err:
return JsonResponse({"error": err.message}, status=400)
if configuration:
index = course.user_partitions.index(configuration)
course.user_partitions[index] = new_configuration
else:
course.user_partitions.append(new_configuration)
store.update_item(course, request.user.id)
configuration = GroupConfiguration.update_usage_info(store, course, new_configuration)
return JsonResponse(configuration, status=201)
elif request.method == "DELETE":
if not configuration:
return JsonResponse(status=404)
return remove_content_or_experiment_group(
request=request,
store=store,
course=course,
configuration=configuration,
group_configuration_id=group_configuration_id,
group_id=group_id
)
def are_content_experiments_enabled(course):
"""
Returns True if content experiments have been enabled for the course.
"""
return (
SPLIT_TEST_COMPONENT_TYPE in ADVANCED_COMPONENT_TYPES and
SPLIT_TEST_COMPONENT_TYPE in course.advanced_modules
)
def _get_course_creator_status(user):
"""
Helper method for returning the course creator status for a particular user,
taking into account the values of DISABLE_COURSE_CREATION and ENABLE_CREATOR_GROUP.
If the user passed in has not previously visited the index page, it will be
added with status 'unrequested' if the course creator group is in use.
"""
if user.is_staff:
course_creator_status = 'granted'
elif settings.FEATURES.get('DISABLE_COURSE_CREATION', False):
course_creator_status = 'disallowed_for_this_site'
elif settings.FEATURES.get('ENABLE_CREATOR_GROUP', False):
course_creator_status = get_course_creator_status(user)
if course_creator_status is None:
# User not grandfathered in as an existing user, has not previously visited the dashboard page.
# Add the user to the course creator admin table with status 'unrequested'.
add_user_with_status_unrequested(user)
course_creator_status = get_course_creator_status(user)
else:
course_creator_status = 'granted'
return course_creator_status
| agpl-3.0 | -2,271,639,474,986,374,400 | 40.732262 | 142 | 0.633274 | false | 4.346207 | true | false | false |
sahiljain/catapult | experimental/buildbot/download.py | 14 | 1194 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import multiprocessing
import sys
import time
import traceback
import buildbot
POLL_INTERVAL = 600
BUILD_HISTORY_COUNT = 200
BUILD_RESULTS_COUNT = 50
def FetchLatestBuildResults(builder):
try:
builder.FetchRecentBuilds(BUILD_HISTORY_COUNT)
print 'Fetching results for', builder
for build in builder.LastBuilds(BUILD_RESULTS_COUNT):
for step in build.steps.itervalues():
step.results # pylint: disable=pointless-statement
except: # multiprocessing doesn't give useful stack traces, so print it here.
traceback.print_exc(file=sys.stderr)
print
raise
def main():
logging.getLogger().setLevel(logging.INFO)
builders = buildbot.Builders('chromium.perf')
process_pool = multiprocessing.Pool(4)
while True:
print 'Refreshing...'
buildbot.Update('chromium.perf', builders)
process_pool.map(FetchLatestBuildResults, builders.itervalues())
print 'Refreshed!'
time.sleep(POLL_INTERVAL)
if __name__ == '__main__':
main()
| bsd-3-clause | 7,621,562,518,442,912,000 | 23.875 | 80 | 0.728643 | false | 3.953642 | false | false | false |
kapilrastogi/Impala | tests/query_test/test_aggregation.py | 2 | 8508 | # Copyright (c) 2012 Cloudera, Inc. All rights reserved.
# Validates all aggregate functions across all datatypes
#
import logging
import pytest
from tests.common.test_vector import *
from tests.common.impala_test_suite import ImpalaTestSuite
from tests.common.test_dimensions import create_exec_option_dimension
from tests.common.test_dimensions import create_uncompressed_text_dimension
from tests.common.skip import SkipIfOldAggsJoins, SkipIfS3
from tests.util.test_file_parser import QueryTestSectionReader
agg_functions = ['sum', 'count', 'min', 'max', 'avg']
data_types = ['int', 'bool', 'double', 'bigint', 'tinyint',
'smallint', 'float', 'timestamp']
result_lut = {
# TODO: Add verification for other types
'sum-tinyint': 45000, 'avg-tinyint': 5, 'count-tinyint': 9000,
'min-tinyint': 1, 'max-tinyint': 9,
'sum-smallint': 495000, 'avg-smallint': 50, 'count-smallint': 9900,
'min-smallint': 1, 'max-smallint': 99,
'sum-int': 4995000, 'avg-int': 500, 'count-int': 9990,
'min-int': 1, 'max-int': 999,
'sum-bigint': 49950000, 'avg-bigint': 5000, 'count-bigint': 9990,
'min-bigint': 10, 'max-bigint': 9990,
}
class TestAggregation(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestAggregation, cls).add_test_dimensions()
# Add two more dimensions
cls.TestMatrix.add_dimension(TestDimension('agg_func', *agg_functions))
cls.TestMatrix.add_dimension(TestDimension('data_type', *data_types))
cls.TestMatrix.add_constraint(lambda v: cls.is_valid_vector(v))
@classmethod
def is_valid_vector(cls, vector):
data_type, agg_func = vector.get_value('data_type'), vector.get_value('agg_func')
file_format = vector.get_value('table_format').file_format
if file_format not in ['parquet']: return False
if cls.exploration_strategy() == 'core':
# Reduce execution time when exploration strategy is 'core'
if vector.get_value('exec_option')['batch_size'] != 0: return False
# Avro doesn't have timestamp type
if file_format == 'avro' and data_type == 'timestamp':
return False
elif agg_func not in ['min', 'max', 'count'] and data_type == 'bool':
return False
elif agg_func == 'sum' and data_type == 'timestamp':
return False
return True
def test_aggregation(self, vector):
data_type, agg_func = (vector.get_value('data_type'), vector.get_value('agg_func'))
query = 'select %s(%s_col) from alltypesagg where day is not null' % (agg_func,
data_type)
result = self.execute_scalar(query, vector.get_value('exec_option'),
table_format=vector.get_value('table_format'))
if 'int' in data_type:
assert result_lut['%s-%s' % (agg_func, data_type)] == int(result)
# AVG
if vector.get_value('data_type') == 'timestamp' and\
vector.get_value('agg_func') == 'avg':
return
query = 'select %s(DISTINCT(%s_col)) from alltypesagg where day is not null' % (
agg_func, data_type)
result = self.execute_scalar(query, vector.get_value('exec_option'))
class TestAggregationQueries(ImpalaTestSuite):
"""Run the aggregation test suite, with codegen enabled and disabled, to exercise our
non-codegen code"""
@classmethod
def get_workload(self):
return 'functional-query'
@classmethod
def add_test_dimensions(cls):
super(TestAggregationQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[False, True]))
if cls.exploration_strategy() == 'core':
cls.TestMatrix.add_dimension(create_uncompressed_text_dimension(cls.get_workload()))
@SkipIfS3.insert
@pytest.mark.execute_serially
def test_non_codegen_tinyint_grouping(self, vector):
# Regression for IMPALA-901. The test includes an INSERT statement, so can only be run
# on INSERT-able formats - text only in this case, since the bug doesn't depend on the
# file format.
if vector.get_value('table_format').file_format == 'text' \
and vector.get_value('table_format').compression_codec == 'none':
self.run_test_case('QueryTest/aggregation_no_codegen_only', vector)
def test_aggregation(self, vector):
if vector.get_value('table_format').file_format == 'hbase':
pytest.xfail(reason="IMPALA-283 - select count(*) produces inconsistent results")
self.run_test_case('QueryTest/aggregation', vector)
def test_distinct(self, vector):
if vector.get_value('table_format').file_format == 'hbase':
pytest.xfail("HBase returns columns in alphabetical order for select distinct *, "
"making the result verication to fail.")
self.run_test_case('QueryTest/distinct', vector)
def test_group_concat(self, vector):
"""group_concat distinct tests
Required to run directly in python because the order in which results will be
merged at the final, single-node aggregation step is non-deterministic (if the
first phase is running on multiple nodes). Need to pull the result apart and
compare the actual items)"""
exec_option = vector.get_value('exec_option')
table_format = vector.get_value('table_format')
# Test group_concat distinct with other aggregate function and groupings.
# expected result is the row: 2010,'1, 2, 3, 4','1-2-3-4','2|3|1|4',40,4
query = """select year, group_concat(distinct string_col),
group_concat(distinct string_col, '-'), group_concat(distinct string_col, '|'),
count(string_col), count(distinct string_col)
from alltypesagg where int_col < 5 and year = 2010 group by year"""
result = self.execute_query(query, exec_option, table_format=table_format)
row = (result.data)[0].split("\t")
assert(len(row) == 6)
assert(row[0] == '2010')
delimiter = [', ', '-', '|']
for i in range(1, 4):
assert(set(row[i].split(delimiter[i-1])) == set(['1', '2', '3', '4']))
assert(row[4] == '40')
assert(row[5] == '4')
# Test group_concat distinct with arrow delimiter, with multiple rows
query = """select day, group_concat(distinct string_col, "->")
from (select * from alltypesagg where id % 100 = day order by id limit 99999) a
group by day order by day"""
result = self.execute_query(query, exec_option, table_format=table_format)
string_col = []
string_col.append(set(['1','101','201','301','401','501','601','701','801','901']))
string_col.append(set(['2','102','202','302','402','502','602','702','802','902']))
string_col.append(set(['3','103','203','303','403','503','603','703','803','903']))
string_col.append(set(['4','104','204','304','404','504','604','704','804','904']))
string_col.append(set(['5','105','205','305','405','505','605','705','805','905']))
string_col.append(set(['6','106','206','306','406','506','606','706','806','906']))
string_col.append(set(['7','107','207','307','407','507','607','707','807','907']))
string_col.append(set(['8','108','208','308','408','508','608','708','808','908']))
string_col.append(set(['9','109','209','309','409','509','609','709','809','909']))
string_col.append(set(['10','110','210','310','410','510','610','710','810','910']))
assert(len(result.data) == 10)
for i in range(10):
row = (result.data)[i].split("\t")
assert(len(row) == 2)
assert(row[0] == str(i+1))
assert(set(row[1].split("->")) == string_col[i])
# Test group_concat distinct with merge node
query = """select group_concat(distinct string_col, ' ') from alltypesagg
where int_col < 10"""
result = self.execute_query(query, exec_option, table_format=table_format)
assert(set((result.data)[0].split(" ")) == set(['1','2','3','4','5','6','7','8','9']))
class TestTPCHAggregationQueries(ImpalaTestSuite):
# Uses the TPC-H dataset in order to have larger aggregations.
@classmethod
def get_workload(cls):
return 'tpch'
@classmethod
def add_test_dimensions(cls):
super(TestTPCHAggregationQueries, cls).add_test_dimensions()
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['parquet'])
def test_tpch_aggregations(self, vector):
self.run_test_case('tpch-aggregations', vector)
@SkipIfOldAggsJoins.passthrough_preagg
def test_tpch_passthrough_aggregations(self, vector):
self.run_test_case('tpch-passthrough-aggregations', vector)
| apache-2.0 | -5,310,935,505,476,193,000 | 44.015873 | 90 | 0.658204 | false | 3.411387 | true | false | false |
haymeister/pico-python | examples/freqmeasure.py | 3 | 2279 | # -*- coding: utf-8
# Example by Colin O'Flynn
#
import math
import time
import inspect
import numpy as np
from picoscope import ps5000a
from matplotlib.mlab import find
class freqMeasure():
def __init__(self):
self.ps = ps5000a.PS5000a(connect=False)
def openScope(self):
self.ps.open()
self.ps.setChannel("A", coupling="DC", VRange=5.0, probeAttenuation=10)
self.ps.setChannel("B", enabled=False)
self.ps.setChannel("C", enabled=False)
self.ps.setChannel("D", enabled=False)
res = self.ps.setSamplingFrequency(1000E6, 50000)
self.sampleRate = res[0]
print "Sampling @ %f MHz, %d samples"%(res[0]/1E6, res[1])
#Use external trigger to mark when we sample
self.ps.setSimpleTrigger(trigSrc="External", threshold_V=0.150, timeout_ms=5000)
def closeScope(self):
self.ps.close()
def armMeasure(self):
self.ps.runBlock()
def freq_from_crossings(self, sig):
"""Estimate frequency by counting zero crossings"""
# From https://gist.github.com/endolith/255291:
fs = self.sampleRate
# Find all indices right before a rising-edge zero crossing
indices = find((sig[1:] >= 0) & (sig[:-1] < 0))
# More accurate, using linear interpolation to find intersample
# zero-crossings (Measures 1000.000129 Hz for 1000 Hz, for instance)
crossings = [i - sig[i] / (sig[i+1] - sig[i]) for i in indices]
# Some other interpolation based on neighboring points might be better. Spline, cubic, whatever
return fs / np.mean(np.diff(crossings))
def measure(self):
print "Waiting for trigger"
while(self.ps.isReady() == False): time.sleep(0.01)
print "Sampling Done"
data = self.ps.getDataV("A", 50000)
data = data - np.mean(data)
freq = self.freq_from_crossings(data)
print freq
if __name__ == "__main__":
fm = freqMeasure()
fm.openScope()
try:
while 1:
fm.armMeasure()
fm.measure()
except KeyboardInterrupt:
pass
fm.closeScope()
| bsd-2-clause | -4,605,082,882,781,879,300 | 30.652778 | 103 | 0.578324 | false | 3.699675 | false | false | false |
mjoblin/netdumplings | netdumplings/dumplingeater.py | 1 | 9914 | import asyncio
import json
import logging
import signal
from typing import Callable, List, Optional
import websockets
from .dumpling import Dumpling
from .exceptions import InvalidDumpling
from ._shared import ND_CLOSE_MSGS, HUB_HOST, HUB_OUT_PORT
class DumplingEater:
"""
Base helper class for Python-based dumpling eaters.
Connects to ``nd-hub`` and listens for any dumplings made by the provided
``chef_filter`` (or all chefs if ``chef_filter`` is ``None``). Can be
given ``async`` callables for any of the following events:
``on_connect(websocket_uri, websocket_obj)``
invoked when the connection to ``nd-hub`` is made
``on_dumpling(dumpling)``
invoked whenever a dumpling is emitted from ``nd-hub``
``on_connection_lost(e)``
invoked when the connection to ``nd-hub`` is closed
**The above callables must be** ``async def`` **methods**.
:param name: Name of the dumpling eater. Is ideally unique per eater.
:param hub: Address where ``nd-hub`` is sending dumplings from.
:param chef_filter: List of chef names whose dumplings this eater wants to
receive. ``None`` means get all chefs' dumplings.
:param on_connect: Called when connection to ``nd-hub`` is made. Is passed
two parameters: the ``nd-hub`` websocket URI (string) and websocket
object (:class:`websockets.client.WebSocketClientProtocol`).
:param on_dumpling: Called whenever a dumpling is received. Is passed the
dumpling as a Python dict.
:param on_connection_lost: Called when connection to ``nd-hub`` is lost. Is
passed the associated exception object.
"""
def __init__(
self,
name: str = 'nameless_eater',
hub: str ='{}:{}'.format(HUB_HOST, HUB_OUT_PORT),
*,
chef_filter: Optional[List[str]] = None,
on_connect: Optional[Callable] = None,
on_dumpling: Optional[Callable] = None,
on_connection_lost: Optional[Callable] = None) -> None:
self.name = name
self.chef_filter = chef_filter
self.hub = hub
self.hub_ws = "ws://{0}".format(hub)
# Configure handlers. If we're not provided with handlers then we
# fall back on the default handlers or the handlers provided by a
# subclass.
self.on_connect = (
on_connect if on_connect is not None else self.on_connect
)
self.on_dumpling = (
on_dumpling if on_dumpling is not None else self.on_dumpling
)
self.on_connection_lost = (
on_connection_lost if on_connection_lost is not None
else self.on_connection_lost
)
self._was_connected = False
self._logger_name = "{}.{}".format(__name__, self.name)
self.logger = logging.getLogger(self._logger_name)
def __repr__(self):
def handler_string(attr):
# We can't use 'repr(self.handler)' for callables because it causes
# an infinite loop as the repr of the handler includes the repr of
# the handler (etc). So we replace handler reprs with
# '<callable: name>'.
return (
'<callable: {}>'.format(attr.__name__) if callable(attr)
else repr(attr)
)
return (
'{}('
'name={}, '
'hub={}, '
'chef_filter={}, '
'on_connect={}, '
'on_dumpling={}, '
'on_connection_lost={})'.format(
type(self).__name__,
repr(self.name),
repr(self.hub),
repr(self.chef_filter),
handler_string(self.on_connect),
handler_string(self.on_dumpling),
handler_string(self.on_connection_lost),
)
)
async def _grab_dumplings(self, dumpling_count=None):
"""
Receives all dumplings from the hub and looks for any dumplings which
were created by the chef(s) we're interested in. All those dumplings
are then passed to the on_dumpling handler (after being converted from
their JSON form back into a Dumpling instance).
:param dumpling_count: Number of dumplings to eat. ``None`` means eat
forever.
"""
dumplings_eaten = 0
websocket = await websockets.client.connect(self.hub_ws)
self._was_connected = True
self.logger.info("{0}: Connected to dumpling hub at {1}".format(
self.name, self.hub_ws))
try:
# Announce ourselves to the dumpling hub.
await websocket.send(json.dumps({'eater_name': self.name}))
if self.on_connect:
await self.on_connect(self.hub_ws, websocket)
while True:
# Eat a single dumpling.
dumpling_json = await websocket.recv()
# Create a Dumpling from the JSON received over the websocket.
# Note that invalid dumplings will probably be stripped out by
# the hub already.
try:
dumpling = Dumpling.from_json(dumpling_json)
except InvalidDumpling as e:
self.logger.error("{0}: Invalid dumpling: {1}".format(
self.name, e))
continue
self.logger.debug("{0}: Received dumpling from {1}".format(
self.name, dumpling.chef_name))
# Call the on_dumpling handler if this dumpling is from a
# chef that we've registered interest in.
if (self.chef_filter is None or
dumpling.chef_name in self.chef_filter):
self.logger.debug(
"{0}: Calling dumpling handler {1}".format(
self.name, self.on_dumpling))
dumplings_eaten += 1
await self.on_dumpling(dumpling)
# Stop eating dumplings if we've reached our threshold.
if dumpling_count is not None and \
dumplings_eaten >= dumpling_count:
await websocket.close(*ND_CLOSE_MSGS['eater_full'])
break
except asyncio.CancelledError:
self.logger.warning(
f"\n{self.name}: Connection to dumpling hub cancelled; "
f"closing..."
)
try:
await websocket.close(*ND_CLOSE_MSGS['conn_cancelled'])
except websockets.exceptions.InvalidState:
pass
except websockets.exceptions.ConnectionClosed as e:
self.logger.warning(
"{}: Lost connection to dumpling hub: {}".format(self.name, e)
)
if self.on_connection_lost:
await self.on_connection_lost(e)
@staticmethod
def _interrupt_handler():
"""
Signal handler. Cancels all running async tasks.
"""
tasks = asyncio.Task.all_tasks()
for task in tasks:
task.cancel()
def run(self, dumpling_count=None):
"""
Run the dumpling eater.
This will block until the desired ``dumpling_count`` is met.
:param dumpling_count: Number of dumplings to eat. ``None`` means eat
forever.
"""
self.logger.info("{0}: Running dumpling eater".format(self.name))
if not callable(self.on_dumpling):
self.logger.error(
"{0}: on_dumpling handler is not callable".format(self.name))
return
self.logger.debug("{0}: Looking for dumpling hub at {1}".format(
self.name, self.hub_ws))
self.logger.debug("{0}: Chefs: {1}".format(
self.name,
", ".join(self.chef_filter) if self.chef_filter else 'all')
)
try:
asyncio.run(self._grab_dumplings(dumpling_count))
except OSError as e:
self.logger.warning(
"{0}: There was a problem with the dumpling hub connection. "
"Is nd-hub available?".format(self.name))
self.logger.warning("{0}: {1}".format(self.name, e))
finally:
if self._was_connected:
self.logger.warning(
"{0}: Done eating dumplings.".format(self.name))
async def on_connect(self, websocket_uri, websocket_obj):
"""
Default on_connect handler.
This will be used if an ``on_connect`` handler is not provided during
instantiation, and if a handler is not provided by a DumplingEater
subclass.
Only logs an warning-level log entry.
"""
self.logger.warning(
'{}: No on_connect handler specified; ignoring '
'connection.'.format(self.name)
)
async def on_dumpling(self, dumpling):
"""
Default on_dumpling handler.
This will be used if an ``on_dumpling`` handler is not provided during
instantiation, and if a handler is not provided by a DumplingEater
subclass.
Only logs an warning-level log entry.
"""
self.logger.warning(
'{}: No on_dumpling handler specified; ignoring '
'dumpling.'.format(self.name)
)
async def on_connection_lost(self, e):
"""
Default on_connection_lost handler.
This will be used if an ``on_connection_lost`` handler is not provided
during instantiation, and if a handler is not provided by a
DumplingEater subclass.
Only logs an warning-level log entry.
"""
self.logger.warning(
'{}: No on_connection_lost handler specified; ignoring '
'connection loss.'.format(self.name)
)
| mit | -3,905,771,031,394,722,000 | 35.718519 | 79 | 0.565463 | false | 4.278809 | false | false | false |
maas/maas | src/maasserver/tests/test_commands_configauth.py | 1 | 17093 | # Copyright 2018 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Test the configauth command."""
from contextlib import contextmanager
from datetime import datetime, timedelta
import json
import tempfile
import unittest
from django.contrib.sessions.models import Session
from django.core.management import call_command
from django.core.management.base import CommandError
from maasserver.management.commands import configauth
from maasserver.models import Config
from maasserver.models.rbacsync import RBAC_ACTION, RBACLastSync, RBACSync
from maasserver.rbac import FakeRBACUserClient
from maasserver.testing.testcase import MAASServerTestCase
class TestConfigAuthCommand(MAASServerTestCase):
def setUp(self):
super().setUp()
self.read_input = self.patch(configauth, "read_input")
self.read_input.return_value = ""
self.mock_print = self.patch(configauth, "print")
self.rbac_user_client = FakeRBACUserClient()
mock_client = self.patch(configauth, "RBACUserClient")
mock_client.return_value = self.rbac_user_client
@contextmanager
def agent_file(self):
with tempfile.NamedTemporaryFile(mode="w+") as agent_file:
config = {
"key": {"public": "public-key", "private": "private-key"},
"agents": [
{
"url": "http://example.com:1234",
"username": "user@admin",
}
],
}
json.dump(config, agent_file)
agent_file.flush()
yield agent_file.name
def printout(self):
prints = []
for call in self.mock_print.mock_calls:
_, output, _ = call
# Empty tuple if print is called with no text
output = output[0] if output else ""
prints.append(output)
return "\n".join(prints)
def test_configauth_changes_empty_string(self):
Config.objects.set_config(
"external_auth_url", "http://example.com/candid"
)
call_command("configauth", candid_agent_file="")
self.assertEqual("", Config.objects.get_config("external_auth_url"))
def test_configauth_changes_auth_prompt_default(self):
self.read_input.return_value = ""
call_command("configauth")
self.assertEqual("", Config.objects.get_config("rbac_url"))
self.assertEqual("", Config.objects.get_config("external_auth_url"))
def test_configauth_changes_auth_invalid_rbac_url(self):
self.assertRaises(
configauth.InvalidURLError,
call_command,
"configauth",
rbac_url="example.com",
)
def test_configauth_delete_sessions(self):
session = Session(
session_key="session_key",
expire_date=datetime.utcnow() + timedelta(days=1),
)
session.save()
call_command("configauth", rbac_url="")
self.assertFalse(Session.objects.all().exists())
def test_update_auth_details(self):
auth_details = configauth.AuthDetails()
with self.agent_file() as agent_file_name:
configauth.update_auth_details_from_agent_file(
agent_file_name, auth_details
)
self.assertEqual(auth_details.url, "http://example.com:1234")
self.assertEqual(auth_details.user, "user@admin")
self.assertEqual(auth_details.key, "private-key")
def test_configauth_interactive(self):
with self.agent_file() as agent_file_name:
self.read_input.side_effect = [
"",
agent_file_name,
"mydomain",
"admins",
]
call_command("configauth")
self.assertEqual("", Config.objects.get_config("rbac_url"))
self.assertEqual(
"http://example.com:1234",
Config.objects.get_config("external_auth_url"),
)
self.assertEqual(
"mydomain", Config.objects.get_config("external_auth_domain")
)
self.assertEqual(
"user@admin", Config.objects.get_config("external_auth_user")
)
self.assertEqual(
"private-key", Config.objects.get_config("external_auth_key")
)
self.assertEqual(
"admins", Config.objects.get_config("external_auth_admin_group")
)
def test_configauth_interactive_domain(self):
with self.agent_file() as agent_file_name:
self.read_input.return_value = "mydomain"
call_command(
"configauth", rbac_url="", candid_agent_file=agent_file_name
)
self.assertEqual(
"http://example.com:1234",
Config.objects.get_config("external_auth_url"),
)
self.assertEqual(
"mydomain", Config.objects.get_config("external_auth_domain")
)
self.assertEqual(
"user@admin", Config.objects.get_config("external_auth_user")
)
self.assertEqual(
"private-key", Config.objects.get_config("external_auth_key")
)
def test_configauth_interactive_domain_empty(self):
with self.agent_file() as agent_file_name:
self.read_input.return_value = ""
call_command(
"configauth", rbac_url="", candid_agent_file=agent_file_name
)
self.assertEqual(
"http://example.com:1234",
Config.objects.get_config("external_auth_url"),
)
self.assertEqual("", Config.objects.get_config("external_auth_domain"))
self.assertEqual(
"user@admin", Config.objects.get_config("external_auth_user")
)
self.assertEqual(
"private-key", Config.objects.get_config("external_auth_key")
)
def test_configauth_interactive_key(self):
with self.agent_file() as agent_file_name:
self.read_input.return_value = "private-key"
call_command(
"configauth",
rbac_url="",
candid_agent_file=agent_file_name,
candid_domain="mydomain",
)
self.assertEqual(
"http://example.com:1234",
Config.objects.get_config("external_auth_url"),
)
self.assertEqual(
"mydomain", Config.objects.get_config("external_auth_domain")
)
self.assertEqual(
"user@admin", Config.objects.get_config("external_auth_user")
)
self.assertEqual(
"private-key", Config.objects.get_config("external_auth_key")
)
def test_configauth_not_interactive(self):
with self.agent_file() as agent_file_name:
call_command(
"configauth",
rbac_url="",
candid_agent_file=agent_file_name,
candid_domain="mydomain",
candid_admin_group="admins",
)
self.assertEqual("", Config.objects.get_config("rbac_url"))
self.assertEqual(
"http://example.com:1234",
Config.objects.get_config("external_auth_url"),
)
self.assertEqual(
"mydomain", Config.objects.get_config("external_auth_domain")
)
self.assertEqual(
"user@admin", Config.objects.get_config("external_auth_user")
)
self.assertEqual(
"private-key", Config.objects.get_config("external_auth_key")
)
self.assertEqual(
"admins", Config.objects.get_config("external_auth_admin_group")
)
self.read_input.assert_not_called()
def test_configauth_agentfile_not_found(self):
error = self.assertRaises(
CommandError,
call_command,
"configauth",
rbac_url="",
candid_agent_file="/not/here",
)
self.assertEqual(
str(error), "[Errno 2] No such file or directory: '/not/here'"
)
def test_configauth_domain_none(self):
with self.agent_file() as agent_file_name:
call_command(
"configauth",
rbac_url="",
candid_agent_file=agent_file_name,
candid_domain="none",
)
self.assertEqual("", Config.objects.get_config("external_auth_domain"))
def test_configauth_json_empty(self):
call_command("configauth", json=True)
self.read_input.assert_not_called()
[print_call] = self.mock_print.mock_calls
_, [output], kwargs = print_call
self.assertEqual({}, kwargs)
self.assertEqual(
{
"external_auth_url": "",
"external_auth_domain": "",
"external_auth_user": "",
"external_auth_key": "",
"external_auth_admin_group": "",
"rbac_url": "",
},
json.loads(output),
)
def test_configauth_json_full(self):
Config.objects.set_config(
"external_auth_url", "http://candid.example.com/"
)
Config.objects.set_config("external_auth_domain", "mydomain")
Config.objects.set_config("external_auth_user", "maas")
Config.objects.set_config("external_auth_key", "secret maas key")
Config.objects.set_config("external_auth_admin_group", "admins")
Config.objects.set_config("rbac_url", "http://rbac.example.com/")
mock_print = self.patch(configauth, "print")
call_command("configauth", json=True)
self.read_input.assert_not_called()
[print_call] = mock_print.mock_calls
_, [output], kwargs = print_call
self.assertEqual({}, kwargs)
self.assertEqual(
{
"external_auth_url": "http://candid.example.com/",
"external_auth_domain": "mydomain",
"external_auth_user": "maas",
"external_auth_key": "secret maas key",
"external_auth_admin_group": "admins",
"rbac_url": "http://rbac.example.com/",
},
json.loads(output),
)
def test_configauth_rbac_with_name_existing(self):
self.rbac_user_client.services = [
{
"name": "mymaas",
"$uri": "/api/rbac/v1/service/4",
"pending": True,
"product": {"$ref" "/api/rbac/v1/product/2"},
}
]
call_command(
"configauth",
rbac_url="http://rbac.example.com",
rbac_service_name="mymaas",
)
self.read_input.assert_not_called()
self.assertEqual(
"http://rbac.example.com", Config.objects.get_config("rbac_url")
)
self.assertEqual(
self.rbac_user_client.registered_services,
["/api/rbac/v1/service/4"],
)
def test_configauth_rbac_with_name_create(self):
patch_prompt = self.patch(configauth, "prompt_for_choices")
patch_prompt.return_value = "yes"
call_command(
"configauth",
rbac_url="http://rbac.example.com",
rbac_service_name="maas",
)
patch_prompt.assert_called_once()
self.assertEqual(
"http://rbac.example.com", Config.objects.get_config("rbac_url")
)
self.assertEqual(
self.rbac_user_client.registered_services,
["/api/rbac/v1/service/4"],
)
def test_configauth_rbac_with_name_abort(self):
patch_prompt = self.patch(configauth, "prompt_for_choices")
patch_prompt.return_value = "no"
error = self.assertRaises(
CommandError,
call_command,
"configauth",
rbac_url="http://rbac.example.com",
rbac_service_name="maas",
)
self.assertEqual(str(error), "Registration with RBAC service canceled")
patch_prompt.assert_called_once()
self.assertEqual(Config.objects.get_config("rbac_url"), "")
self.assertEqual(self.rbac_user_client.registered_services, [])
def test_configauth_rbac_registration_list(self):
self.rbac_user_client.services = [
{
"name": "mymaas",
"$uri": "/api/rbac/v1/service/4",
"pending": False,
"product": {"$ref" "/api/rbac/v1/product/2"},
},
{
"name": "mymaas2",
"$uri": "/api/rbac/v1/service/12",
"pending": True,
"product": {"$ref" "/api/rbac/v1/product/2"},
},
]
# The index of the service to register is prompted
self.read_input.side_effect = ["2"]
call_command("configauth", rbac_url="http://rbac.example.com")
self.assertEqual(
"http://rbac.example.com", Config.objects.get_config("rbac_url")
)
self.assertEqual(
"http://auth.example.com",
Config.objects.get_config("external_auth_url"),
)
self.assertEqual(
"u-1", Config.objects.get_config("external_auth_user")
)
self.assertNotEqual("", Config.objects.get_config("external_auth_key"))
self.assertEqual("", Config.objects.get_config("external_auth_domain"))
self.assertEqual(
"", Config.objects.get_config("external_auth_admin_group")
)
prints = self.printout()
self.assertIn("1 - mymaas", prints)
self.assertIn("2 - mymaas2 (pending)", prints)
self.assertIn('Service "mymaas2" registered', prints)
def test_configauth_rbac_registration_invalid_index(self):
self.rbac_user_client.services = [
{
"name": "mymaas",
"$uri": "/api/rbac/v1/service/4",
"pending": True,
"product": {"$ref" "/api/rbac/v1/product/2"},
}
]
self.read_input.side_effect = ["2"]
error = self.assertRaises(
CommandError,
call_command,
"configauth",
rbac_url="http://rbac.example.com",
)
self.assertEqual(str(error), "Invalid index")
def test_configauth_rbac_no_registerable(self):
error = self.assertRaises(
CommandError,
call_command,
"configauth",
rbac_url="http://rbac.example.com",
)
self.assertEqual(
str(error),
"No registerable MAAS service on the specified RBAC server",
)
def test_configauth_rbac_url_none(self):
with self.agent_file() as agent_file_name:
call_command(
"configauth",
rbac_url="none",
candid_agent_file=agent_file_name,
candid_domain="domain",
candid_admin_group="admins",
)
self.read_input.assert_not_called()
self.assertEqual("", Config.objects.get_config("rbac_url"))
def test_configauth_rbac_url_none_clears_lastsync_and_sync(self):
RBACLastSync.objects.create(resource_type="resource-pool", sync_id=0)
RBACSync.objects.create(resource_type="")
call_command("configauth", rbac_url="none", candid_agent_file="none")
self.assertEqual("", Config.objects.get_config("rbac_url"))
self.assertFalse(RBACLastSync.objects.all().exists())
self.assertFalse(RBACSync.objects.all().exists())
def test_configauth_rbac_clears_lastsync_and_full_sync(self):
RBACLastSync.objects.create(resource_type="resource-pool", sync_id=0)
self.rbac_user_client.services = [
{
"name": "mymaas",
"$uri": "/api/rbac/v1/service/4",
"pending": True,
"product": {"$ref" "/api/rbac/v1/product/2"},
}
]
call_command(
"configauth",
rbac_url="http://rbac.example.com",
rbac_service_name="mymaas",
)
self.read_input.assert_not_called()
self.assertEqual(
"http://rbac.example.com", Config.objects.get_config("rbac_url")
)
self.assertFalse(RBACLastSync.objects.all().exists())
latest = RBACSync.objects.order_by("-id").first()
self.assertEqual(RBAC_ACTION.FULL, latest.action)
self.assertEqual("", latest.resource_type)
self.assertEqual("configauth command called", latest.source)
class TestIsValidUrl(unittest.TestCase):
def test_valid_schemes(self):
for scheme in ["http", "https"]:
url = "{}://example.com/candid".format(scheme)
self.assertTrue(configauth.is_valid_url(url))
def test_invalid_schemes(self):
for scheme in ["ftp", "git+ssh"]:
url = "{}://example.com/candid".format(scheme)
self.assertFalse(configauth.is_valid_url(url))
| agpl-3.0 | 6,869,180,678,300,343,000 | 36.239651 | 79 | 0.560463 | false | 3.924914 | true | false | false |
hackatbrown/2015.hackatbrown.org | hack-at-brown-2015/mentor.py | 1 | 8875 | import webapp2
from template import template
from google.appengine.ext import ndb
from google.appengine.api import datastore_errors
import models
from registration import Hacker
import logging
import json
from google.appengine.api import users
import datetime
maxRating = 5
minRating = 0
def ratingValidator(prop, value):
if value > maxRating:
value = maxRating
if value < minRating:
value = minRating
class MentorResponse(ndb.Model):
rating = ndb.IntegerProperty(default=None, validator=ratingValidator)
request = ndb.KeyProperty(kind='MentorRequest')
mentor = ndb.KeyProperty(kind='Mentor')
dispatched = ndb.DateTimeProperty(auto_now_add=True)
dispatcher = ndb.StringProperty(validator=models.stringValidator)
finished = ndb.DateTimeProperty()
def formatMentorResponse(mentorResponse):
mr = mentorResponse.get()
return {'mentor' : mr.mentor.urlsafe(), 'request' : mr.request.urlsafe(), 'id' : mr.key.urlsafe()}
#Anyone who will give help to a hacker.
class Mentor(ndb.Model):
phone = ndb.StringProperty(validator=models.phoneValidator, default=None)
email = ndb.StringProperty(validator=models.stringValidator, default=None)
name = ndb.StringProperty()
tags = ndb.StringProperty(validator=models.stringValidator, repeated=True)
role = ndb.TextProperty() # e.g. Oracle Engineer
availability = ndb.TextProperty()
details = ndb.TextProperty()
responded = ndb.KeyProperty(kind=MentorResponse, repeated=True)
#perhaps should be key property
assigned = ndb.BooleanProperty(default=False)
def getResponded(self):
return [key.get() for key in self.responded]
def computeAvg(self):
responded = self.getResponded()
ratedResponded = [x for x in responded if x.rating]
if len(ratedResponded) == 0:
return 3
else:
return (reduce(lambda x, y: x + y.rating, ratedResponded, 0) / len(ratedResponded))
def asDict(self, include_keys):
return {key: getattr(self, key, None) for key in include_keys}
def formatMentor(mentor):
md = mentor.asDict(Mentor._properties)
md['responded'] = len(mentor.responded)
md['id'] = mentor.key.urlsafe()
md['rating'] = mentor.computeAvg()
return md
class MentorRequest(ndb.Model):
requester = ndb.KeyProperty(default=None)
requester_phone = ndb.StringProperty(default=None, validator=models.stringValidator)
location = ndb.StringProperty(default=None)
created = ndb.DateTimeProperty(auto_now_add=True)
responses = ndb.KeyProperty(kind=MentorResponse, repeated=True)
issue = ndb.TextProperty(required=False)
tags = ndb.StringProperty(repeated=True)
status = ndb.StringProperty(choices=['solved', 'assigned', 'unassigned'], default='unassigned')
def asDict(self, include_keys):
d = {key: getattr(self, key, None) for key in include_keys}
return d
def formatRequest(mentorRequest):
mr = mentorRequest.asDict(['location', 'created', 'issue', 'tags', 'status'])
mr['created'] = pretty_date(mentorRequest.created)
mr['id'] = mentorRequest.key.urlsafe()
mr['responses'] = len(mentorRequest.responses)
mr['requester_phone'] = mentorRequest.requester_phone
mr['requester_name'] = mentorRequest.requester.get().name if mentorRequest.requester else None
return mr
class MentorRequestHandler(webapp2.RequestHandler):
def get(self):
self.response.write(template('mentor_request.html', {}))
def post(self):
hackers = Hacker.query(Hacker.phone_number == self.request.get('phone')).fetch(keys_only=True)
request = MentorRequest()
request.location = self.request.get('location')
request.issue = self.request.get('issue')
request.tags = self.request.get('tags').split(', ')
if len(hackers):
request.requester = hackers[0]
request.requester_phone = self.request.get('phone')
request.put()
self.redirect('/?dayof=1#mrc') # #mrc: mentor-request-confirm (we don't want that showing up in URLs)
class MentorSignupHandler(webapp2.RequestHandler):
def get(self):
self.response.write(template("mentor_signup.html"))
def post(self):
keys = ['name', 'role', 'email', 'phone', 'availability', 'tags', 'details']
try:
mentor = Mentor()
for key in keys:
val = self.request.get(key)
if key == 'tags':
val = [tag.strip().lower() for tag in val.split(',')]
setattr(mentor, key, val)
mentor.put()
first_name = mentor.name.split(' ')[0] if mentor.name else 'mentor'
self.response.write(template("mentor_signup.html", {"show_confirmation": True, "first_name": first_name}))
except datastore_errors.BadValueError as e:
print "MENTOR SIGNUP ERROR: {0}".format(e)
self.response.write(template("mentor_signup.html", {"error": "There's an invalid or missing field on your form!"}))
class DispatchHandler(webapp2.RequestHandler):
def get(self):
self.response.write(template("mentor_dispatch.html"))
def post(self):
data = json.loads(self.request.body)
request = ndb.Key(urlsafe=data['request']).get()
mentor = ndb.Key(urlsafe=data['mentor']).get()
response = MentorResponse()
response.dispatcher = users.get_current_user().email()
response.mentor = mentor.key
response.request = request.key
response.put()
mentor.responded.append(response.key)
mentor.assigned = True
request.responses.append(response.key)
request.status='assigned'
request.put()
mentor.put()
return self.response.write(json.dumps({'success' : True}))
class ResponseFinishedHandler(webapp2.RequestHandler):
def post(self):
data = json.loads(self.request.body)
response = ndb.Key(urlsafe=data['id']).get()
mentor = response.mentor.get()
request = response.request.get()
if data.get('rating'):
response.rating = int(data.get('rating'))
request.status = data['status'] #could be completed or unassigned
response.finished = datetime.datetime.now()
mentor.assigned = False
response.put()
mentor.put()
request.put()
return self.response.write(json.dumps({'success' : True}))
class GetRequestsHandler(webapp2.RequestHandler):
def get(self):
requests = map(formatRequest, MentorRequest.query(MentorRequest.status == 'unassigned').order(MentorRequest.created).fetch())
return self.response.write(json.dumps({'requests' : requests}))
class GetAssignedHandler(webapp2.RequestHandler):
def get(self):
mentors = Mentor.query(Mentor.assigned == True).fetch()
mentors = map(formatMentor, mentors)
requests = MentorRequest.query(MentorRequest.status == 'assigned').fetch()
pairs = [r.responses[-1] for r in requests if len(r.responses) > 0]
pairs = map(formatMentorResponse, pairs)
requests = map(formatRequest, requests)
self.response.write(json.dumps({'assigned_mentors' : mentors, 'assigned_requests' : requests, 'pairs' : pairs}))
class ViewRequestHandler(webapp2.RequestHandler):
def get(self, id):
request = ndb.Key(urlsafe=id).get()
mentors = map(formatMentor, findMentorsForRequest(request))
return self.response.write(json.dumps({'request' : formatRequest(request), 'mentors' : mentors}))
def findMentorsForRequest(request):
tags = [t.lower() for t in request.tags]
mentors = Mentor.query(Mentor.assigned == False).fetch()
# Each mentor should be assessed based on:
# 1. # of tags matching that of request
# 2. # of previously completed tasks balanced with rating
# should return list of best mentors
#First sort by responded.
mentors.sort(key=lambda m: len(m.responded))
#Then sort by rating
mentors.sort(key=lambda m: m.computeAvg(), reverse=True)
#Finally sort by relevance of tags
mentors.sort(key=lambda m: len([t for t in m.tags if t.lower() in request.tags]), reverse=True)
return mentors
def pretty_date(time=False):
"""
Get a datetime object or a int() Epoch timestamp and return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
"""
now = datetime.datetime.now()
if type(time) is int:
diff = now - datetime.datetime.fromtimestamp(time)
elif isinstance(time,datetime.datetime):
diff = now - time
elif not time:
diff = now - now
second_diff = diff.seconds
day_diff = diff.days
if day_diff < 0:
return ''
if day_diff == 0:
if second_diff < 10:
return "just now"
if second_diff < 60:
return str(second_diff) + " seconds ago"
if second_diff < 120:
return "a minute ago"
if second_diff < 3600:
return str(second_diff / 60) + " minutes ago"
if second_diff < 7200:
return "an hour ago"
if second_diff < 86400:
return str(second_diff / 3600) + " hours ago"
if day_diff == 1:
return "Yesterday"
if day_diff < 7:
return str(day_diff) + " days ago"
if day_diff < 31:
return str(day_diff / 7) + " weeks ago"
if day_diff < 365:
return str(day_diff / 30) + " months ago"
return str(day_diff / 365) + " years ago"
class MentorListHandler(webapp2.RequestHandler):
def get(self):
self.response.write(template("mentor_list.html", {"mentors": Mentor.query().fetch(limit=1000)}))
| mit | 6,380,494,734,375,262,000 | 32.745247 | 127 | 0.716394 | false | 3.128305 | false | false | false |
laurybueno/MoniBus | mapa/settings.py | 1 | 3940 | """
Django settings for mapa project.
Generated by 'django-admin startproject' using Django 1.11.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'l00f7@-+!--7$ah@n%--7sq_zkot66367zs(in+u3(=o9-j102'
# SECURITY WARNING: don't run with debug turned on in production!
if 'DEBUG' in os.environ:
DEBUG = os.getenv('DEBUG')
else:
DEBUG = False
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
'django_filters',
'cronotacografo',
'rest_framework',
'rest_framework_gis',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mapa.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mapa.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'HOST': os.getenv('DB_HOST'),
'NAME': os.getenv('DB_NAME'),
'USER': os.getenv('DB_USER'),
'PASSWORD': os.getenv('DB_PASSWORD')
},
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
'DEFAULT_FILTER_BACKENDS': [
'django_filters.rest_framework.DjangoFilterBackend',
'rest_framework.filters.SearchFilter',
],
}
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Sao_Paulo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_ROOT = '/usr/src/app/static/'
STATIC_URL = '/static/'
# Allow bulk deletion in admin
DATA_UPLOAD_MAX_NUMBER_FIELDS = 9999999999999
| agpl-3.0 | 8,972,003,198,826,264,000 | 25.802721 | 91 | 0.678426 | false | 3.527305 | false | false | false |
erjac77/ansible-module-f5bigip | library/f5bigip_ltm_profile_mssql.py | 2 | 4968 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2016-2018, Eric Jacob <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: f5bigip_ltm_profile_mssql
short_description: BIG-IP ltm profile mssql module
description:
- Configures a profile to manage mssql(tds) database traffic.
version_added: "2.4"
author:
- "Gabriel Fortin (@GabrielFortin)"
options:
partition:
description:
- Displays the administrative partition within which the profile resides.
read_pool:
description:
- Specifies the pool of MS SQL database servers to which the system sends ready-only requests.
read_write_split_by_command:
description:
- When enabled, the system decides which pool to send the client requests the by the content in the message.
default: disabled
choices: ['disabled', 'enabled']
read_write_split_by_user:
description:
- When enabled, the system decides which pool to send the client requests the by user name.
default: disabled
choices: ['disabled', 'enabled']
state:
description:
- Specifies the state of the component on the BIG-IP system.
default: present
choices: ['absent', 'present']
user_can_write_by_default:
description:
- Specifies whether users have write access by default.
default: true
choices: ['false', 'true']
user_list:
description:
- Specifies the users who have read-only access to the MS SQL if user-can-write-by-default is true, or the
users who have write access to the MS SQL database if user-can-write-by-default is false.
write_persist_timer:
description:
- Specify how many minimum time in milliseconds the connection will be persisted to write-pool after
connection switch to write pool.
write_pool:
description:
- Specifies the pool of MS SQL database servers to which the system sends requests that are not read-only.
requirements:
- BIG-IP >= 12.0
- ansible-common-f5
- f5-sdk
'''
EXAMPLES = '''
- name: Create LTM Profile MSSQL
f5bigip_ltm_profile_mssql:
f5_hostname: 172.16.227.35
f5_username: admin
f5_password: admin
f5_port: 443
name: my_mssql_profile
partition: Common
state: present
delegate_to: localhost
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible_common_f5.base import F5_ACTIVATION_CHOICES
from ansible_common_f5.base import F5_NAMED_OBJ_ARGS
from ansible_common_f5.base import F5_PROVIDER_ARGS
from ansible_common_f5.bigip import F5BigIpNamedObject
class ModuleParams(object):
@property
def argument_spec(self):
argument_spec = dict(
read_pool=dict(type='str'),
read_write_split_by_command=dict(type='str', choices=F5_ACTIVATION_CHOICES),
read_write_split_by_user=dict(type='str', choices=F5_ACTIVATION_CHOICES),
user_can_write_by_default=dict(type='str', choices=['false', 'true']),
user_list=dict(type='list'),
write_persist_timer=dict(type='int'),
write_pool=dict(type='str')
)
argument_spec.update(F5_PROVIDER_ARGS)
argument_spec.update(F5_NAMED_OBJ_ARGS)
return argument_spec
@property
def supports_check_mode(self):
return True
class F5BigIpLtmProfileMssql(F5BigIpNamedObject):
def _set_crud_methods(self):
self._methods = {
'create': self._api.tm.ltm.profile.mssqls.mssql.create,
'read': self._api.tm.ltm.profile.mssqls.mssql.load,
'update': self._api.tm.ltm.profile.mssqls.mssql.update,
'delete': self._api.tm.ltm.profile.mssqls.mssql.delete,
'exists': self._api.tm.ltm.profile.mssqls.mssql.exists
}
def main():
params = ModuleParams()
module = AnsibleModule(argument_spec=params.argument_spec, supports_check_mode=params.supports_check_mode)
try:
obj = F5BigIpLtmProfileMssql(check_mode=module.check_mode, **module.params)
result = obj.flush()
module.exit_json(**result)
except Exception as exc:
module.fail_json(msg=str(exc))
if __name__ == '__main__':
main()
| apache-2.0 | 2,509,418,083,242,508,000 | 33.5 | 120 | 0.65942 | false | 3.746606 | false | false | false |
prisis/sublime-text-packages | Packages/Anaconda/anaconda_lib/decorators.py | 5 | 3180 | # -*- coding: utf8 -*-
# Copyright (C) 2013 - Oscar Campos <[email protected]>
# This program is Free Software see LICENSE file for details
"""
Anaconda decorators
"""
import os
import sys
import time
import pstats
import logging
import functools
try:
import cProfile
CPROFILE_AVAILABLE = True
except ImportError:
CPROFILE_AVAILABLE = False
try:
import sublime
from .helpers import get_settings, project_name
except ImportError:
# we just imported the file from jsonserver so we don't need get_settings
pass
def auto_project_switch(func):
"""Auto kill and start a new jsonserver on project switching
"""
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
if not self.green_light:
return
view = sublime.active_window().active_view()
auto_project_switch = get_settings(view, 'auto_project_switch', False)
python_interpreter = get_settings(view, 'python_interpreter')
# expand ~/ in the python_interpreter path
python_interpreter = os.path.expanduser(python_interpreter)
# expand $shell vars in the python_interpreter path
python_interpreter = os.path.expandvars(python_interpreter)
if (
auto_project_switch and hasattr(self, 'project_name') and (
project_name() != self.project_name
or self.process.args[0] != python_interpreter)
):
print('Project or iterpreter switch detected...')
self.process.kill()
self.reconnecting = True
self.start()
else:
func(self, *args, **kwargs)
return wrapper
def timeit(logger):
"""Decorator for timeit timeit timeit
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
starttime = time.time()
result = func(*args, **kwargs)
endtime = time.time()
total = endtime - starttime
logger.debug(
'Func {} took {} secs'.format(func.__name__, total)
)
return result
return wrapper
return decorator
def profile(func):
"""Run the profiler in the given function
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
view = sublime.active_window().active_view()
if get_settings(view, 'anaconda_debug', False) == 'profiler':
if CPROFILE_AVAILABLE:
pr = cProfile.Profile()
pr.enable()
result = func(*args, **kwargs)
pr.disable()
ps = pstats.Stats(pr, stream=sys.stdout)
ps.sort_stats('time')
ps.print_stats(15)
else:
logging.error(
'cProfile doesn\'t seems to can be imported on ST3 + {}, '
'sorry. You may want to use @timeit instead, so sorry '
'really'.format(sys.platform)
)
result = func(*args, **kwargs)
else:
result = func(*args, **kwargs)
return result
return wrapper
| mit | -6,221,040,314,991,987,000 | 25.722689 | 78 | 0.568868 | false | 4.31479 | false | false | false |
vladsaveliev/TargQC | targqc/qualimap/report_parser.py | 1 | 6065 | from targqc.utilz.logger import warn
metric_names = [
'Reference size',
'Regions size/percentage of reference (on target)',
'Regions size/percentage of reference (on target) %',
'Coverage Mean',
'Coverage Mean (on target)',
'Coverage Standard Deviation',
'Coverage Standard Deviation (on target)',
'Reference size',
'Number of reads',
'Mapped reads',
'Mapped reads %',
'Unmapped reads',
'Unmapped reads %',
'Mapped reads (on target)',
'Mapped reads (on target) %',
'Mapped paired reads',
'Mapped paired reads %',
'Paired reads',
'Paired reads %',
'Duplicated reads (flagged)',
'Duplicated reads (flagged) %',
'Duplicated reads (flagged) (on target)',
'Duplicated reads (flagged) (on target) %',
'Read min length',
'Read max length',
'Read mean length',
'Mean Mapping Quality (on target)',
'Mismatches (on target)',
'Insertions (on target)',
'Deletions (on target)',
'Homopolymer indels (on target)',
'Mean Mapping Quality',
'Mismatches',
'Insertions',
'Deletions',
'Homopolymer indels',
]
ALLOWED_UNITS = ['%']
def parse_qualimap_sample_report(report_fpath):
value_by_metric = dict()
def __get_td_tag_contents(line):
## examples:
# <td class=column1>Paired reads</td>
# <td class=column2>80,244 / 99.89%</td>
crop_left = line.split('>')
if len(crop_left) < 2:
return None
crop_right = crop_left[1].split('<')
return crop_right[0].strip()
def __fill_record(metric_name, line):
val = __get_td_tag_contents(line)
val = val.replace(' ', '').replace(',', '')
try:
val = val.replace(b'\xc2\xa0', '')
except:
val = val.replace(b'\xc2\xa0'.decode(), '')
if metric_name == 'Read min/max/mean length': # special case
for metric_infix, value in zip(['min', 'max', 'mean'], val.split('/')):
value_by_metric['Read ' + metric_infix + ' length'] = value
else:
if metric_name not in metric_names:
# warn('Qualimap metric "' + metric_name + '" is not in allowed metric_names')
return
num_chars = []
unit_chars = []
i = 0
while i < len(val) and (val[i].isdigit() or val[i] in ['.']):
num_chars += val[i]
i += 1
while i < len(val):
unit_chars += val[i]
i += 1
val_num = ''.join(num_chars)
val_unit = ''.join(unit_chars)
if val_unit and val_unit in ALLOWED_UNITS:
# metric.unit = val_unit
pass
try:
val = int(val_num)
if val_unit == '%':
val = float(val) / 100
except ValueError:
try:
val = float(val_num)
if val_unit == '%':
val /= 100
except ValueError: # it is a string
val = val_num + val_unit
value_by_metric[metric_name] = val
if val_unit.startswith('/'): # for values like "80,220 / 99.86%"
meta_val = val_unit.replace('/', '').strip()
if '%' in meta_val:
try:
val = float(meta_val.replace('%', '')) / 100.0
except ValueError:
pass
else:
value_by_metric[metric_name + ' %'] = val
sections = [['start', 'Summary'],
['globals (on target)', 'Globals (inside of regions)'],
['globals', 'Globals'],
['coverage (on target)', 'Coverage (inside of regions)'],
['coverage', 'Coverage'],
['mq (on target)', 'Mapping Quality (inside of regions)'],
['mq', 'Mapping Quality'],
['mismatches and indels (on target)', 'Mismatches and indels (inside of regions)'],
['mismatches and indels', 'Mismatches and indels'],
['finish', 'Coverage across reference']] # plots are starting from this line
on_target_stats_suffix = ' (on target)'
coverage_stats_prefix = 'Coverage '
with open(report_fpath) as f:
cur_section = None
cur_metric_name = None
for line in f:
if 'mapped' in line.lower():
pass
if 'class=table-summary' in line:
cur_section = None
continue
if cur_section is None:
for name, pattern in sections:
if pattern in line:
cur_section = name
break
if cur_section is None:
continue
if cur_section == 'finish':
break
if line.find('class=column1') != -1:
cur_metric_name = __get_td_tag_contents(line)
if cur_section.endswith('(on target)'):
cur_metric_name += on_target_stats_suffix
if cur_section.startswith('coverage'):
cur_metric_name = coverage_stats_prefix + cur_metric_name
# if not metric_storage.get_metric(cur_metric_name): # special case for Duplication rate and Clipped reads (Qualimap v.1 and v.2 difference)
# if metric_storage.get_metric(cur_metric_name + on_target_stats_suffix): # extra 'on target' metrics
# cur_metric_name += on_target_stats_suffix
if cur_metric_name and line.find('class=column2') != -1:
__fill_record(cur_metric_name, line)
cur_metric_name = None
return value_by_metric
| gpl-3.0 | -6,310,326,805,800,163,000 | 35.981707 | 157 | 0.484419 | false | 4.241259 | false | false | false |
NSLS-II-XPD/ipython_ophyd | profile_collection/simulators/10-motors-dets-sim.py | 1 | 3925 | # the ttwotheta motor and detector
# test xpd sim of motor movement
import numpy as np
from ophyd.sim import SynSignal, motor1, motor2
from lmfit import Model, Parameter, Parameters
from lmfit.models import VoigtModel, LinearModel
from lmfit.lineshapes import voigt
class SynGaussPeaks(SynSignal):
"""
Evaluate a point on a peaks based on the value of a motor.
Parameters
----------
name : string
motor : Device
motor_field : string
center : number
center of peak
Imax : number
max intensity of peak
sigma : number, optional
Default is 1.
noise : {'poisson', 'uniform', None}, optional
Add noise to the gaussian peak.
noise_multiplier : float, optional
Only relevant for 'uniform' noise. Multiply the random amount of
noise by 'noise_multiplier'
random_state : numpy random state object, optional
np.random.RandomState(0), to generate random number with given seed
Example
-------
motor = SynAxis(name='motor')
det = SynGauss('det', motor, 'motor', center=0, Imax=1, sigma=1)
"""
def __init__(self, name, motor, motor_field, centers, Imax, sigma=1,
noise=None, noise_multiplier=1, random_state=None, offset=None,
**kwargs):
if noise not in ('poisson', 'uniform', None):
raise ValueError("noise must be one of 'poisson', 'uniform', None")
self._motor = motor
if random_state is None:
random_state = np.random
def func():
m = motor.read()[motor_field]['value']
v = m*0
for center in centers:
v += Imax * np.exp(-(m - center) ** 2 / (2 * sigma ** 2))
if offset is not None:
v += offset
if noise == 'poisson':
v += int(random_state.poisson(np.round(v), 1))
elif noise == 'uniform':
v += random_state.uniform(-1, 1) * noise_multiplier
return v
super().__init__(func=func, name=name, **kwargs)
D_SPACINGS = {'LaB6': np.array([4.15772, 2.94676, 2.40116]),
'Si': 5.43095 / np.array([np.sqrt(3), np.sqrt(8), np.sqrt(11), np.sqrt(27)]),
}
import numpy as np
#def gaussian(theta, center, width):
# return 1500 / (np.sqrt(2*np.pi) * width) * np.exp(-((theta - center) / width)**2 / 2)
# for the simulation
SIMULATED_D = "Si"
def intensity(theta, amplitude, width, wavelength):
result = np.clip(5 * np.random.randn(), 0, None) # Gaussian noise
for d in D_SPACINGS['Si']:
assert wavelength < 2 * d, \
"wavelength would result in illegal arg to arcsin"
try:
center = np.arcsin(wavelength / (2 * d))
except Exception:
print("DEAD"); center = 0
result += voigt(theta, amplitude, center, width)
result += voigt(-theta, amplitude, center, width)
return result
def current_intensity_peaks():
amplitude = 0.5
width = 0.004 # degrees
wavelength = 12.398 / 66.4 # angtroms
two_theta = motor1.read()['motor1']['value'] # degrees
theta = np.deg2rad(two_theta / 2) # radians
return intensity(theta, amplitude, np.deg2rad(width), wavelength)
def current_intensity_dips():
amplitude = 0.5
width = 0.004 # degrees
wavelength = 12.398 / 66.4 # angtroms
hw_theta = motor1.read()['motor1']['value'] # degrees
theta = np.deg2rad(hw_theta + 35.26) # radians
return -intensity(theta, amplitude, np.deg2rad(width), wavelength) + 10000
th_cal = motor1
sc = SynSignal(name="det", func=current_intensity_dips)
''' test sim motors
import bluesky.plan_stubs as bps
import bluesky.plans as bp
from bluesky.callbacks import LivePlot
def myplan():
yield from bps.abs_set(motor1, 0)
yield from bp.rel_scan([det_6peaks], motor1, -10, 10, 1000)
RE(myplan(), LivePlot('det_6peaks', 'motor1'))
'''
| bsd-2-clause | 3,955,795,154,368,246,300 | 32.836207 | 91 | 0.604331 | false | 3.526505 | false | false | false |
Lorquas/subscription-manager | src/rhsmlib/facts/cleanup.py | 2 | 2920 | from __future__ import print_function, division, absolute_import
# Copyright (c) 2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import logging
from rhsmlib.facts import collector
log = logging.getLogger(__name__)
class CleanupCollector(collector.FactsCollector):
no_uuid_platforms = ['powervm_lx86', 'xen-dom0', 'ibm_systemz']
def get_all(self):
cleanup_facts = {}
dmi_socket_info = self.replace_socket_count_with_dmi()
cleanup_facts.update(dmi_socket_info)
return cleanup_facts
def explain_lack_of_virt_uuid(self):
# No virt.uuid equiv is available for guests on these hypervisors
#virt_is_guest = self._collected_hw_info['virt.is_guest']
if not self._is_a_virt_host_type_with_virt_uuids():
log.debug("we don't sell virt uuids here")
def _is_a_virt_host_type_with_virt_uuids(self):
virt_host_type = self._collected_hw_info['virt.host_type']
for no_uuid_platform in self.no_uuid_platforms:
if virt_host_type.find(no_uuid_platform) > -1:
return False
return True
def replace_socket_count_with_dmi(self):
cleanup_info = {}
# cpu topology reporting on xen dom0 machines is wrong. So
# if we are a xen dom0, and we found socket info in dmiinfo,
# replace our normal cpu socket calculation with the dmiinfo one
# we have to do it after the virt data and cpu data collection
if 'virt.host_type' not in self._collected_hw_info:
return cleanup_info
if not self._host_is_xen_dom0():
return cleanup_info
if 'dmi.meta.cpu_socket_count' not in self._collected_hw_info:
return cleanup_info
# Alright, lets munge up cpu socket info based on the dmi info.
socket_count = int(self._collected_hw_info['dmi.meta.cpu_socket_count'])
cleanup_info['cpu.cpu_socket(s)'] = socket_count
if 'cpu.cpu(s)' not in self._collected_hw_info:
return cleanup_info
# And the cores per socket count as well
dmi_cpu_cores_per_cpu = int(self._collected_hw_info['cpu.cpu(s)']) // socket_count
cleanup_info['cpu.core(s)_per_socket'] = dmi_cpu_cores_per_cpu
return cleanup_info
def _host_is_xen_dom0(self):
return self._collected_hw_info['virt.host_type'].find('dom0') > -1
| gpl-2.0 | -7,391,271,463,684,545,000 | 38.459459 | 90 | 0.664384 | false | 3.587224 | false | false | false |
Azure/azure-sdk-for-python | sdk/formrecognizer/azure-ai-formrecognizer/samples/sample_recognize_identity_documents.py | 1 | 3765 | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_recognize_identity_documents.py
DESCRIPTION:
This sample demonstrates how to recognize fields from an identity document.
See fields found on identity documents here:
https://aka.ms/formrecognizer/iddocumentfields
USAGE:
python sample_recognize_identity_documents.py
Set the environment variables with your own values before running the sample:
1) AZURE_FORM_RECOGNIZER_ENDPOINT - the endpoint to your Cognitive Services resource.
2) AZURE_FORM_RECOGNIZER_KEY - your Form Recognizer API key
"""
import os
class RecognizeIdDocumentsSample(object):
def recognize_identity_documents(self):
path_to_sample_forms = os.path.abspath(os.path.join(os.path.abspath(__file__),
"..", "./sample_forms/id_documents/license.jpg"))
# [START recognize_identity_documents]
from azure.core.credentials import AzureKeyCredential
from azure.ai.formrecognizer import FormRecognizerClient
endpoint = os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]
key = os.environ["AZURE_FORM_RECOGNIZER_KEY"]
form_recognizer_client = FormRecognizerClient(
endpoint=endpoint, credential=AzureKeyCredential(key)
)
with open(path_to_sample_forms, "rb") as f:
poller = form_recognizer_client.begin_recognize_identity_documents(identity_document=f)
id_documents = poller.result()
for idx, id_document in enumerate(id_documents):
print("--------Recognizing ID document #{}--------".format(idx+1))
first_name = id_document.fields.get("FirstName")
if first_name:
print("First Name: {} has confidence: {}".format(first_name.value, first_name.confidence))
last_name = id_document.fields.get("LastName")
if last_name:
print("Last Name: {} has confidence: {}".format(last_name.value, last_name.confidence))
document_number = id_document.fields.get("DocumentNumber")
if document_number:
print("Document Number: {} has confidence: {}".format(document_number.value, document_number.confidence))
dob = id_document.fields.get("DateOfBirth")
if dob:
print("Date of Birth: {} has confidence: {}".format(dob.value, dob.confidence))
doe = id_document.fields.get("DateOfExpiration")
if doe:
print("Date of Expiration: {} has confidence: {}".format(doe.value, doe.confidence))
sex = id_document.fields.get("Sex")
if sex:
print("Sex: {} has confidence: {}".format(sex.value, sex.confidence))
address = id_document.fields.get("Address")
if address:
print("Address: {} has confidence: {}".format(address.value, address.confidence))
country_region = id_document.fields.get("CountryRegion")
if country_region:
print("Country/Region: {} has confidence: {}".format(country_region.value, country_region.confidence))
region = id_document.fields.get("Region")
if region:
print("Region: {} has confidence: {}".format(region.value, region.confidence))
# [END recognize_identity_documents]
if __name__ == '__main__':
sample = RecognizeIdDocumentsSample()
sample.recognize_identity_documents()
| mit | 8,544,715,683,666,290,000 | 44.914634 | 121 | 0.609296 | false | 4.230337 | false | false | false |
ministryofjustice/cla_backend | cla_backend/apps/checker/tests/api/test_case_api.py | 1 | 10480 | import datetime
import uuid
import mock
from django.utils import timezone
from django.core import mail
from rest_framework import status
from rest_framework.test import APITestCase
from cla_common.constants import CASE_SOURCE
from cla_eventlog.models import Log
from checker.serializers import CaseSerializer
from core.tests.mommy_utils import make_recipe
from core.tests.test_base import SimpleResourceAPIMixin
from legalaid.models import Case, PersonalDetails
from legalaid.tests.views.test_base import CLACheckerAuthBaseApiTestMixin
from call_centre.tests.test_utils import CallCentreFixedOperatingHours
class BaseCaseTestCase(
CLACheckerAuthBaseApiTestMixin, CallCentreFixedOperatingHours, SimpleResourceAPIMixin, APITestCase
):
LOOKUP_KEY = "reference"
API_URL_BASE_NAME = "case"
RESOURCE_RECIPE = "legalaid.case"
def make_resource(self):
return None
def assertCaseResponseKeys(self, response):
self.assertItemsEqual(
response.data.keys(),
[
"eligibility_check",
"personal_details",
"reference",
"requires_action_at",
"callback_window_type",
"adaptation_details",
"thirdparty_details",
],
)
def assertPersonalDetailsEqual(self, data, obj):
if data is None or obj is None:
self.assertEqual(data, obj)
else:
for prop in ["title", "full_name", "postcode", "street", "mobile_phone", "home_phone"]:
self.assertEqual(unicode(getattr(obj, prop)), data[prop])
def assertCaseEqual(self, data, case):
self.assertEqual(case.reference, data["reference"])
self.assertEqual(unicode(case.eligibility_check.reference), data["eligibility_check"])
self.assertPersonalDetailsEqual(data["personal_details"], case.personal_details)
self.assertEqual(Case.objects.count(), 1)
case = Case.objects.first()
self.assertEqual(case.source, CASE_SOURCE.WEB)
def get_personal_details_default_post_data(self):
return {
"title": "MR",
"full_name": "John Doe",
"postcode": "SW1H 9AJ",
"street": "102 Petty France",
"mobile_phone": "0123456789",
"home_phone": "9876543210",
}
class CaseTestCase(BaseCaseTestCase):
def test_methods_not_allowed(self):
"""
Ensure that we can't POST, PUT or DELETE
"""
# LIST
self._test_delete_not_allowed(self.list_url)
# CREATE
def test_create_no_data(self):
"""
CREATE should raise validation error when data is empty
"""
response = self.client.post(self.list_url, data={}, format="json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertItemsEqual(response.data.keys(), ["personal_details"])
self.assertEqual(Case.objects.count(), 0)
def test_create_with_data(self):
check = make_recipe("legalaid.eligibility_check")
data = {
"eligibility_check": unicode(check.reference),
"personal_details": self.get_personal_details_default_post_data(),
}
response = self.client.post(self.list_url, data=data, format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertCaseResponseKeys(response)
self.assertCaseEqual(
response.data,
Case(
reference=response.data["reference"],
eligibility_check=check,
personal_details=PersonalDetails(**data["personal_details"]),
),
)
# test that the Case is in the db and created by 'web' user
self.assertEqual(Case.objects.count(), 1)
case = Case.objects.first()
self.assertEqual(case.created_by.username, "web")
# test that the log is in the db and created by 'web' user
self.assertEqual(Log.objects.count(), 1)
log = Log.objects.first()
self.assertEqual(log.created_by.username, "web")
# no email sent
self.assertEquals(len(mail.outbox), 0)
def _test_method_in_error(self, method, url):
"""
Generic method called by 'create' and 'patch' to test against validation
errors.
"""
invalid_uuid = str(uuid.uuid4())
data = {
"eligibility_check": invalid_uuid,
"personal_details": {
"title": "1" * 21,
"full_name": None,
"postcode": "1" * 13,
"street": "1" * 256,
"mobile_phone": "1" * 21,
"home_phone": "1" * 21,
},
}
method_callable = getattr(self.client, method)
response = method_callable(url, data, format="json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
errors = response.data
self.assertItemsEqual(errors.keys(), ["eligibility_check", "personal_details"])
self.assertEqual(errors["eligibility_check"], [u"Object with reference=%s does not exist." % invalid_uuid])
self.assertItemsEqual(
errors["personal_details"],
[
{
"title": [u"Ensure this value has at most 20 characters (it has 21)."],
"postcode": [u"Ensure this value has at most 12 characters (it has 13)."],
"street": [u"Ensure this value has at most 255 characters (it has 256)."],
"mobile_phone": [u"Ensure this value has at most 20 characters (it has 21)."],
"home_phone": [u"Ensure this value has at most 20 characters (it has 21)."],
}
],
)
def test_create_in_error(self):
self._test_method_in_error("post", self.list_url)
def test_cannot_create_with_other_reference(self):
"""
Cannot create a case passing an eligibility check reference already assigned
to another case
"""
# create a different case
case = make_recipe("legalaid.case")
data = {
"eligibility_check": unicode(case.eligibility_check.reference),
"personal_details": self.get_personal_details_default_post_data(),
}
response = self.client.post(self.list_url, data=data, format="json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertDictEqual(
response.data, {"eligibility_check": [u"Case with this Eligibility check already exists."]}
)
def test_case_serializer_with_dupe_eligibility_check_reference(self):
case = make_recipe("legalaid.case")
data = {
u"eligibility_check": case.eligibility_check.reference,
u"personal_details": self.get_personal_details_default_post_data(),
}
serializer = CaseSerializer(data=data)
self.assertFalse(serializer.is_valid())
self.assertDictEqual(
serializer.errors, {"eligibility_check": [u"Case with this Eligibility check already exists."]}
)
class CallMeBackCaseTestCase(BaseCaseTestCase):
@property
def _default_dt(self):
if not hasattr(self, "__default_dt"):
self.__default_dt = datetime.datetime(2015, 3, 30, 10, 0, 0, 0).replace(tzinfo=timezone.utc)
return self.__default_dt
def test_create_with_callmeback(self):
self.assertEquals(len(mail.outbox), 0)
check = make_recipe("legalaid.eligibility_check")
data = {
"eligibility_check": unicode(check.reference),
"personal_details": self.get_personal_details_default_post_data(),
"requires_action_at": self._default_dt.isoformat(),
}
with mock.patch(
"cla_common.call_centre_availability.current_datetime",
return_value=datetime.datetime(2015, 3, 23, 10, 0, 0, 0),
):
response = self.client.post(self.list_url, data=data, format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertCaseResponseKeys(response)
case = Case.objects.first()
self.assertEqual(case.requires_action_at, self._default_dt)
self.assertEqual(case.callback_attempt, 1)
self.assertEqual(case.outcome_code, "CB1")
self.assertEqual(case.source, CASE_SOURCE.WEB)
self.assertEqual(case.log_set.count(), 2)
self.assertEqual(case.log_set.filter(code="CB1").count(), 1)
log = case.log_set.get(code="CB1")
self.assertEqual(
log.notes,
"Callback scheduled for %s - %s. "
% (
timezone.localtime(self._default_dt).strftime("%d/%m/%Y %H:%M"),
(timezone.localtime(self._default_dt) + datetime.timedelta(minutes=30)).strftime("%H:%M"),
),
)
_dt = timezone.localtime(self._default_dt)
expected_sla_72h = datetime.datetime(2015, 4, 7, 13, 30, 0, 0)
self.assertDictEqual(
log.context,
{
"requires_action_at": _dt.isoformat(),
"sla_120": (_dt + datetime.timedelta(minutes=120)).isoformat(),
"sla_480": (_dt + datetime.timedelta(hours=8)).isoformat(),
"sla_15": (_dt + datetime.timedelta(minutes=15)).isoformat(),
"sla_30": (_dt + datetime.timedelta(minutes=30)).isoformat(),
"sla_72h": timezone.make_aware(expected_sla_72h, _dt.tzinfo).isoformat(),
},
)
# checking email
self.assertEquals(len(mail.outbox), 1)
def test_create_should_ignore_outcome_code(self):
"""
Here only to check backward incompatibility
"""
check = make_recipe("legalaid.eligibility_check")
data = {
"eligibility_check": unicode(check.reference),
"personal_details": self.get_personal_details_default_post_data(),
"requires_action_at": self._default_dt.isoformat(),
"outcome_code": "TEST",
}
response = self.client.post(self.list_url, data=data, format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertCaseResponseKeys(response)
case = Case.objects.first()
self.assertNotEqual(case.outcome_code, "TEST")
| mit | -4,556,641,038,829,528,600 | 36.971014 | 115 | 0.600286 | false | 3.990861 | true | false | false |
bm424/diffraction-toybox | tests/test_tools.py | 1 | 1298 | from unittest import TestCase
import numpy as np
from toybox.tools import *
class TestCheckPoint(TestCase):
def test_valid_point(self):
point = (2, 3)
check_point(point)
def test_valid_point_with_intensity(self):
point = (2, 3, 1.)
check_point(point)
def test_check_3d_point(self):
point = (0., 1., 0., 1.)
with self.assertRaises(ValueError):
check_point(point)
def test_check_invalid_point(self):
point = (0., 'bad value')
with self.assertRaises(ValueError):
check_point(point)
class TestCheckPoints(TestCase):
def test_single_point(self):
point = ((2, 3, None),)
check_points(point)
class TestSortPoints(TestCase):
def test_sort_points(self):
points = np.array([
[1., 1.],
[2., 2.],
[1., 2.],
])
result = sort_points(points)
np.testing.assert_array_almost_equal(result, points)
def test_sort_points_with_zero(self):
points = np.array([
[1., 0.],
[0., 0.]
])
expected = np.array([
[0., 0.],
[1., 0.]
])
result = sort_points(points)
np.testing.assert_array_almost_equal(result, expected) | mit | -2,531,851,163,309,349,000 | 21.789474 | 62 | 0.534669 | false | 3.698006 | true | false | false |
ilostthegiraffe/munin | FreeNAS/HDD-Temp/get_sata_smart.py | 1 | 6396 | #!/usr/bin/env python
#
# Script to get SMART DATA from a FreeBSD based system (ie FreeNAS). Write to csv. Can be called from munin plugin SSH__FreeNAS_HDDTemp
#
'''Import SATA SMART DATA'''
#import handy commands
import os
import sys
import re
import time
import datetime
import csv
smart_device_table=[]
smart_devices = []
current_unix_time = int(time.time())
#just to keep it quiet in testing
def fnGetSmartData():
smart_device_info=[]
#get list of devices in system
smart_list_devices_raw=os.popen("smartctl --scan").read().split('\n')
#print smart_list_devices_raw
#
for index, line in enumerate(smart_list_devices_raw):
line=line.split(' ')
if index <len(smart_list_devices_raw)-1:
#print "index:", index, "line", line[0],line[5]
append_data = line[0],line[5]
smart_devices.append(append_data)
#get data for each device detected
#
#SMART 5 Reallocated_Sector_Count.
#SMART 187eported_Uncorrectable_Errors.
#SMART 188Command_Timeout.
#SMART 197 urrent_Pending_Sector_Count.
#SMART 198 Offline_Uncorrectable.
#get name, serial number etc
for slashdevid in smart_devices:
device_model = "-1"
device_serial = "-2"
device_health = "-3"
device_firmware_version = "-4"
device_capacity = "-5"
device_hrs = "-6"
device_tempC = "-7"
device_sscount = "-8"
device_5reallocate = "-05"
device_198OfflineUncorrectable = "-198"
device_187ReportedUncorrectableErrors = "-187"
device_188CommandTimeout = "-188"
device_197CurrentPendingSector = "-197"
#print "slash", slashdevid
#print "slash0", slashdevid[0]
slashid=slashdevid[0]
#add in slashid rather tahn ada1 to make this work!
smart_device_data=os.popen("smartctl -a " + slashdevid[0] ).read().split('\n')
#print "raw", smart_device_data
#print "rawline:", smart_device_name
#scan through smart_device_data for name field
for index, item in enumerate(smart_device_data):
#print 'raw item', item
if "Device Model" in item:
device_model = item[18:]
#print "device_model", device_model
if "Firmware Version:" in item:
device_firmware_version = item[17:]
#print "device firmware:", device_firmware_version
if "Serial" in item:
device_serial = item[17:]
#print "device serial:", device_serial
if "SMART overall-health self-assessment" in item:
device_health = item.split (":")
device_health = device_health[1]
#print "Smart health", device_health
if "User Capacity" in item:
device_capacity = item.split("[")
device_capacity = device_capacity[1].replace("]", "")
#print "device_capacity", device_capacity
if "Power_On_Hours" in item:
device_hrs = item.split(" ")
device_hrs = device_hrs[43:][0]
#print "Power on hrs", device_hrs
if "4 Start_Stop_Count" in item:
device_sscount = item.split(" ")
device_sscount = device_sscount[-1]
#print "Start_stop_Count", device_sscount
#THE FOLLOWING ARE KEY INDICATORS OF FAILURE (or recorded cause it can be)
#https://www.backblaze.com/blog/hard-drive-smart-stats/
#SMART 5 - Reallocated sector count
if "5 Reallocate" in item:
device_5reallocate = item.split(" ")
device_5reallocate = device_5reallocate[-1]
#print "Reallocated", device_5reallocate
#SMART 187 Reported Uncorrectable Errors
if item.startswith("187 Reported"):
device_187ReportedUncorrectableErrors = item.split(" ")
device_187ReportedUncorrectableErrors = device_187ReportedUncorrectableErrors[-1]
#print "Reported Uncorrectable errors (187):", device_187ReportedUncorrectableErrors
#SMART 188 Command Timeout
if item.startswith("188 Command"):
device_188CommandTimeout = item.split(" ")
device_188CommandTimeout = device_188CommandTimeout[-1]
#print "Command Timeout (188):", device_188CommandTimeout
#SMART 197 Current Pending Sector Count
if item.startswith("197 Current_Pending_Sector"):
device_197CurrentPendingSector = item.split(" ")
device_197CurrentPendingSector = device_197CurrentPendingSector[-1]
#print "Current Pending Sector (197):", device_197CurrentPendingSector
if "Temperature_Celsius" in item:
device_tempC = item.split("(")
device_tempC = device_tempC[0]
device_tempC = device_tempC.split(" ")
device_tempC = device_tempC[-2]
#print "Temperature_Celsius", device_tempC
if "198 Offline_Uncorrectable" in item:
device_198OfflineUncorrectable = item.split(" ")
device_198OfflineUncorrectable = device_198OfflineUncorrectable[-1]
#print "Offline_Uncorrectable (198)", device_198OfflineUncorrectable
# Need to think about device statistics - ie GP04.
# - Device bytes written (logical blocks written/read to TB)
# - Device io commands completed, Lifetime, per hr average.
append_data = slashdevid[0],device_model, device_serial, device_health, device_firmware_version, device_capacity, device_hrs, device_tempC, device_sscount, device_5reallocate, device_187ReportedUncorrectableErrors,device_188CommandTimeout,device_198OfflineUncorrectable,
smart_device_info.append(append_data)
return smart_device_info
def fnExportToCSV(smart_device_table,filename):
#export pool data
with open (filename, 'w') as csvfile:
writeout = csv.writer(csvfile, quoting=csv.QUOTE_NONE)
for line in smart_device_table:
writeout.writerow(line)
#Run the bits we need:
smart_device_table = fnGetSmartData()
fnExportToCSV(smart_device_table,"/root/temp/monitoring/smartsatadata.txt")
| gpl-3.0 | -3,456,092,692,468,660,000 | 35.135593 | 278 | 0.610381 | false | 3.881068 | false | false | false |
robertmuth/PyZwaver | example_tool.py | 1 | 7588 | #!/usr/bin/python3
# Copyright 2016 Robert Muth <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Example command line tool for pairing/unpairing
"""
import argparse
# python import
import logging
import sys
import time
from typing import Tuple
from pyzwaver import zwave as z
from pyzwaver.command import StringifyCommand
from pyzwaver.command_translator import CommandTranslator
from pyzwaver.controller import Controller
from pyzwaver.driver import Driver, MakeSerialDevice
from pyzwaver.node import Nodeset
from pyzwaver.zmessage import ControllerPriority
XMIT_OPTIONS_NO_ROUTE = (z.TRANSMIT_OPTION_ACK |
z.TRANSMIT_OPTION_EXPLORE)
XMIT_OPTIONS = (z.TRANSMIT_OPTION_ACK |
z.TRANSMIT_OPTION_AUTO_ROUTE |
z.TRANSMIT_OPTION_EXPLORE)
XMIT_OPTIONS_SECURE = (z.TRANSMIT_OPTION_ACK |
z.TRANSMIT_OPTION_AUTO_ROUTE)
class TestListener(object):
"""
Demonstrates how to hook into the stream of messages
sent to the controller from other nodes
"""
def __init__(self):
pass
def put(self, n, ts, key, values):
name = "@NONE@"
if key[0] is not None:
name = StringifyCommand(key)
logging.warning("RECEIVED [%d]: %s - %s", n, name, values)
class NodeUpdateListener(object):
def put(self, n, _ts, key, values):
print("RECEIVED ", n, key, values)
def ControllerEventCallback(action, event, node):
print(action, event, node)
def InitController(args, update_routing=False) -> Tuple[Driver, Controller]:
logging.info("opening serial: [%s]", args.serial_port)
device = MakeSerialDevice(args.serial_port)
driver = Driver(device)
controller = Controller(driver, pairing_timeout_secs=args.pairing_timeout_sec)
controller.Initialize()
controller.WaitUntilInitialized()
if update_routing:
controller.UpdateRoutingInfo()
driver.WaitUntilAllPreviousMessagesHaveBeenHandled()
print(controller.StringBasic())
if update_routing:
print(controller.StringRoutes())
# print(controller.props.StringApis())
return driver, controller
def cmd_neighbor_update(args):
driver, controller = InitController(args)
for n in controller.nodes:
if n == controller.GetNodeId(): continue
if n in controller.failed_nodes: continue
controller.NeighborUpdate(n, ControllerEventCallback)
driver.WaitUntilAllPreviousMessagesHaveBeenHandled()
driver.Terminate()
def cmd_pair(args):
driver, controller = InitController(args)
controller.StopAddNodeToNetwork(ControllerEventCallback)
controller.AddNodeToNetwork(ControllerEventCallback)
controller.StopAddNodeToNetwork(ControllerEventCallback)
driver.Terminate()
def cmd_secure_pair(args):
# experimental - make sure you enable security in node.py
driver, controller = InitController(args)
translator = CommandTranslator(driver)
translator.AddListener(TestListener())
nodeset = Nodeset(translator, controller.GetNodeId())
controller.StopAddNodeToNetwork(ControllerEventCallback)
time.sleep(1.0)
controller.AddNodeToNetwork(ControllerEventCallback)
controller.StopAddNodeToNetwork(ControllerEventCallback)
time.sleep(5.0)
# driver.Terminate()
def cmd_unpair(args):
driver, controller = InitController(args)
controller.StopRemoveNodeFromNetwork(None)
time.sleep(1.0)
controller.RemoveNodeFromNetwork(ControllerEventCallback)
controller.StopRemoveNodeFromNetwork(None)
time.sleep(1.0)
driver.Terminate()
def cmd_hard_reset(args):
driver, controller = InitController(args)
controller.SetDefault()
driver.Terminate()
def cmd_controller_details(args):
driver, controller = InitController(args, True)
driver.Terminate()
def cmd_set_basic_multi(args):
driver, controller = InitController(args, True)
translator = CommandTranslator(driver)
logging.info("sending command to %s", args.node)
translator.SendMultiCommand(args.node,
z.Basic_Set,
{"level": args.level},
ControllerPriority(),
XMIT_OPTIONS
)
driver.Terminate()
def cmd_get_basic(args):
driver, controller = InitController(args, True)
translator = CommandTranslator(driver)
translator.AddListener(NodeUpdateListener())
for n in args.node:
translator.SendCommand(n,
z.Basic_Get,
{},
ControllerPriority(),
XMIT_OPTIONS)
time.sleep(2)
driver.Terminate()
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--verbosity", type=int, default=40,
help="increase output verbosity")
parser.add_argument("--pairing_timeout_sec", type=int, default=30,
help="(un)pairing timeout")
parser.add_argument("--serial_port", type=str, default="/dev/ttyUSB0",
help='The USB serial device representing the Z-Wave controller stick. '
'Common settings are: dev/ttyUSB0, dev/ttyACM0')
subparsers = parser.add_subparsers(help="sub-commands")
s = subparsers.add_parser("pair", help="Pair a Z-wave node")
s.set_defaults(func=cmd_pair)
s = subparsers.add_parser("secure_pair", help="Securely pair a Z-wave node")
s.set_defaults(func=cmd_secure_pair)
s = subparsers.add_parser("unpair", help="Unpair a Z-wave node")
s.set_defaults(func=cmd_unpair)
s = subparsers.add_parser("hard_reset", help="Factory reset Z-wave controller")
s.set_defaults(func=cmd_hard_reset)
s = subparsers.add_parser("controller_details", help="Show Z-wave controller details")
s.set_defaults(func=cmd_controller_details)
s = subparsers.add_parser("set_basic_multi", help="Send mutlicast BasicSet command")
s.set_defaults(func=cmd_set_basic_multi)
s.add_argument("--level", type=int, default=99, help="level to set")
s.add_argument('--node', type=int, nargs='+', help="dest node(s) - separate multiple nodes with spaces")
s = subparsers.add_parser("get_basic", help="Run BasicGet command")
s.set_defaults(func=cmd_get_basic)
s.add_argument('--node', type=int, nargs='+', help="dest node(s) - separate multiple nodes with spaces")
s = subparsers.add_parser("neighbor_update", help="Update Node Neighborhoods")
s.set_defaults(func=cmd_neighbor_update)
args = parser.parse_args()
logging.basicConfig(level=args.verbosity)
if "func" in args:
print(args)
args.func(args)
else:
# we should not reach here but there seems to be a bug
parser.error("No command specified - try -h option")
return 0
if __name__ == "__main__":
sys.exit(main())
| gpl-3.0 | -4,392,959,240,617,052,700 | 32.427313 | 108 | 0.675409 | false | 3.899281 | false | false | false |
OliverCole/ZeroNet | plugins/Sidebar/ZipStream.py | 1 | 1619 | import cStringIO as StringIO
import os
import zipfile
class ZipStream(file):
def __init__(self, dir_path):
self.dir_path = dir_path
self.pos = 0
self.buff_pos = 0
self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64=True)
self.buff = StringIO.StringIO()
self.file_list = self.getFileList()
def getFileList(self):
for root, dirs, files in os.walk(self.dir_path):
for file in files:
file_path = root + "/" + file
relative_path = os.path.join(os.path.relpath(root, self.dir_path), file)
yield file_path, relative_path
self.zf.close()
def read(self, size=60 * 1024):
for file_path, relative_path in self.file_list:
self.zf.write(file_path, relative_path)
if self.buff.tell() >= size:
break
self.buff.seek(0)
back = self.buff.read()
self.buff.truncate(0)
self.buff.seek(0)
self.buff_pos += len(back)
return back
def write(self, data):
self.pos += len(data)
self.buff.write(data)
def tell(self):
return self.pos
def seek(self, pos, whence=0):
if pos >= self.buff_pos:
self.buff.seek(pos - self.buff_pos, whence)
self.pos = pos
def flush(self):
pass
if __name__ == "__main__":
zs = ZipStream(".")
out = open("out.zip", "wb")
while 1:
data = zs.read()
print("Write %s" % len(data))
if not data:
break
out.write(data)
out.close()
| gpl-2.0 | 3,281,613,208,173,759,000 | 26.440678 | 88 | 0.537369 | false | 3.558242 | false | false | false |
dopplershift/siphon | examples/ncss/NCSS_Timeseries_Examples.py | 1 | 3156 | # Copyright (c) 2013-2015 University Corporation for Atmospheric Research/Unidata.
# Distributed under the terms of the MIT License.
# SPDX-License-Identifier: MIT
"""
================
NCSS Time Series
================
Use Siphon to query the NetCDF Subset Service for a timeseries.
"""
from datetime import datetime, timedelta
import matplotlib.pyplot as plt
from netCDF4 import num2date
from siphon.catalog import TDSCatalog
###########################################
# First we construct a TDSCatalog instance pointing to our dataset of interest, in
# this case TDS' "Best" virtual dataset for the GFS global 0.5 degree collection of
# GRIB files. We see this catalog contains a single dataset.
best_gfs = TDSCatalog('http://thredds.ucar.edu/thredds/catalog/grib/NCEP/GFS/'
'Global_0p5deg/catalog.xml?dataset=grib/NCEP/GFS/Global_0p5deg/Best')
print(best_gfs.datasets)
###########################################
# We pull out this dataset and get the NCSS access point
best_ds = best_gfs.datasets[0]
ncss = best_ds.subset()
###########################################
# We can then use the `ncss` object to create a new query object, which
# facilitates asking for data from the server.
query = ncss.query()
###########################################
# We construct a query asking for data corresponding to latitude 40N and longitude 105W,
# for the next 7 days. We also ask for NetCDF version 4 data, for the variable
# 'Temperature_isobaric', at the vertical level of 100000 Pa (approximately surface).
# This request will return all times in the range for a single point. Note the string
# representation of the query is a properly encoded query string.
now = datetime.utcnow()
query.lonlat_point(-105, 40).vertical_level(100000).time_range(now, now + timedelta(days=7))
query.variables('Temperature_isobaric').accept('netcdf')
###########################################
# We now request data from the server using this query. The `NCSS` class handles parsing
# this NetCDF data (using the `netCDF4` module). If we print out the variable names, we
# see our requested variables, as well as a few others (more metadata information)
data = ncss.get_data(query)
list(data.variables.keys())
###########################################
# We'll pull out the temperature and time variables.
temp = data.variables['Temperature_isobaric']
time = data.variables['time']
###########################################
# The time values are in hours relative to the start of the entire model collection.
# Fortunately, the `netCDF4` module has a helper function to convert these numbers into
# Python `datetime` objects. We can see the first 5 element output by the function look
# reasonable.
time_vals = num2date(time[:].squeeze(), time.units)
print(time_vals[:5])
###########################################
# Now we can plot these up using matplotlib, which has ready-made support for `datetime`
# objects.
fig, ax = plt.subplots(1, 1, figsize=(9, 8))
ax.plot(time_vals, temp[:].squeeze(), 'r', linewidth=2)
ax.set_ylabel('{} ({})'.format(temp.standard_name, temp.units))
ax.set_xlabel('Forecast Time (UTC)')
ax.grid(True)
| mit | -1,750,322,844,628,799,500 | 42.232877 | 92 | 0.660963 | false | 3.816203 | false | false | false |
marioluan/abstract-data-types | python/src/trees/tree.py | 1 | 1116 | class Tree():
""" implementation of a tree """
def __init__(self, cargo, left=None, right=None):
""" create a tree """
# can be any type
self.cargo = cargo
# should be also tree nodes
self.left = left
self.right = right
def __str__(self):
""" representation of a tree: tree.cargo """
return str(self.cargo)
def getCargo(self):
""" return the cargo of the tree """
return self.cargo
def getLeft(self):
""" return the left node of the tree """
return self.left
def getRight(self):
""" return the right node of the tree """
return self.right
def setLeft(self, left):
""" set the left node of the tree """
self.left = left
def setRight(self, right):
""" set the right node of the tree """
self.right = right
def setCargo(self, cargo):
""" set the cargo of the tree """
self.cargo = cargo
@classmethod
def total(self, tree):
""" recursively sums the total cargos of a tree """
if tree == None: return 0
return tree.cargo + \
Tree.total(tree.left) + \
Tree.total(tree.right) | mit | -6,952,235,343,456,820,000 | 23.282609 | 55 | 0.594982 | false | 3.683168 | false | false | false |
davidzyx/PythonNotes | Part I/ch03_notes.py | 1 | 1042 | # ch03_notes.py
# Chapter 3 notes taken from Automate the Boring Stuff with Python (2015).pdf
# Created by Davidzz on 7/20/2016
# Functions:
# no parameters:
# define:
def hello():
print('Howdy!')
print('Howdy!!!')
print('Hello there.')
# call:
hello()
# with parameters:
# define:
def hello(name):
print('Hello ' + name + '!')
# call:
hello('David')
hello('Belinda')
# returns in functions
import random
def getRandomIntBetweenZeroAnd(num):
gen = random.randint(0, num)
return gen
for i in range(5):
string = str(getRandomIntBetweenZeroAnd(9))
print(string)
# null == None (N capitalized)
# printing to console
print('Pyt', end='')
print('hon')
print('Hello', 'world!')
print('a', 'a', 'a', 'a', sep = 'A')
# global Statement
def a():
global num
char = 'aaaaa'
char = 'bbbbb'
print(char)
# Exceptions
def spam(divideBy):
try:
return 42 / divideBy
except ZeroDivisionError:
print('Error: Invalid argument.')
print(spam(2))
print(spam(12))
print(spam(0))
print(spam(1))
| gpl-3.0 | -4,757,172,229,270,486,000 | 16.965517 | 77 | 0.642994 | false | 3.037901 | false | false | false |
projecthamster/experiments | hamster_sun.py | 1 | 5446 | #!/usr/bin/env python
# - coding: utf-8 -
# Copyright (C) 2010 Toms Bauģis <toms.baugis at gmail.com>
"""Base template"""
from gi.repository import Gtk as gtk
from lib import graphics
import math
import hamster.client
import datetime as dt
from collections import defaultdict
import itertools
class Chart(graphics.Sprite):
def __init__(self):
graphics.Sprite.__init__(self, interactive = False)
def do_stuff(self, years, categories):
step = (360.0 / 365) * math.pi / 180.0
g = self.graphics
g.set_color("#999")
g.set_line_style(width = 1)
# em
colors = ["#009966", "#33cc00", "#9933cc", "#aaaaaa", "#ff9999", "#99cccc"]
colors.reverse()
# em contrast
colors = ["#00a05f", "#1ee100", "#a0a000", "#ffa000", "#a01ee1", "#a0a0a0", "#ffa0a0", "#a0e1e1"]
colors.reverse()
# tango light
colors = ["#fce94f", "#89e034", "#fcaf3e", "#729fcf", "#ad7fa8", "#e9b96e", "#ef2929", "#eeeeec", "#888a85"]
# tango medium
colors =["#edd400", "#73d216", "#f57900", "#3465a4", "#75507b", "#c17d11", "#cc0000", "#d3d7cf", "#555753"]
#colors = colors[1:]
#colors = ("#ff0000", "#00ff00", "#0000ff", "#aaa000")
hour_step = 15
spacing = 20
current_pixel = 1220
g.set_line_style(width = 1)
g.circle(0, 0, current_pixel - 2)
g.stroke("#fff", 0.2)
g.set_line_style(width=1)
for year in sorted(years.keys()):
for category in categories:
ring_height = hour_step * 3
for day, hours in years[year][category]:
year_day = day.isocalendar()[1] * 7 + day.weekday()
angle = year_day * step - math.pi / 2
distance = current_pixel
height = ring_height
#bar per category
g.move_to(math.cos(angle) * distance + 0,
math.sin(angle) * distance + 0)
g.line_to(math.cos(angle) * (distance + height),
math.sin(angle) * (distance + height))
g.line_to(math.cos(angle+step) * (distance + height),
math.sin(angle+step) * (distance + height))
g.line_to(math.cos(angle+step) * distance,
math.sin(angle+step) * distance)
g.close_path()
if years[year][category]:
current_pixel += ring_height + 7 + spacing
color = "#fff" #colors[categories.index(category)]
g.set_color(color)
g.fill()
current_pixel += spacing * 3
g.set_line_style(width = 4)
g.circle(0, 0, current_pixel - spacing * 2)
g.stroke("#fff", 0.5)
current_pixel += 3
class Scene(graphics.Scene):
def __init__(self):
graphics.Scene.__init__(self)
storage = hamster.client.Storage()
self.facts = storage.get_facts(dt.date(2009,1,1), dt.date(2009,12,31))
print len(self.facts)
self.day_counts = {}
categories = defaultdict(int)
self.years = {}
for year, facts in itertools.groupby(sorted(self.facts, key=lambda fact:fact.date), lambda fact:fact.date.year):
self.years[year] = defaultdict(list)
for category, category_facts in itertools.groupby(sorted(facts, key=lambda fact:fact.category), lambda fact:fact.category):
for day, day_facts in itertools.groupby(sorted(category_facts, key=lambda fact:fact.date), lambda fact:fact.date):
delta = dt.timedelta()
for fact in day_facts:
delta += fact.delta
delta = delta.seconds / 60 / 60 + delta.days * 24
self.years[year][category].append((day, delta))
categories[category] += 1
self.categories = categories.keys()
self.chart = Chart()
self.add_child(self.chart)
self.chart.do_stuff(self.years, self.categories)
self.connect("on-enter-frame", self.on_enter_frame)
self.connect("on-mouse-move", self.on_mouse_move)
#self.animate(self.chart, rotation=math.pi * 2, duration = 3)
def on_mouse_move(self, scene, event):
x, y = self.width / 2, self.height / 2
max_distance = math.sqrt((self.width / 2) ** 2 + (self.height / 2) ** 2)
distance = math.sqrt((x - event.x) ** 2 + (y - event.y) ** 2)
#self.chart.scale_x = 2 - 2 * (distance / float(max_distance))
#self.chart.scale_y = 2 - 2 * (distance / float(max_distance))
#self.redraw()
def on_enter_frame(self, scene, context):
g = graphics.Graphics(context)
g.fill_area(0, 0, self.width, self.height, "#20b6de")
self.chart.x = self.width / 2
self.chart.y = self.height / 2
self.chart.scale_x = 0.18
self.chart.scale_y = 0.18
class BasicWindow:
def __init__(self):
window = gtk.Window()
window.set_size_request(700, 600)
window.connect("delete_event", lambda *args: gtk.main_quit())
window.add(Scene())
window.show_all()
example = BasicWindow()
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL) # gtk3 screws up ctrl+c
gtk.main()
| mit | -4,718,483,545,852,678,000 | 28.592391 | 135 | 0.537925 | false | 3.563482 | false | false | false |
robisen1/AndroidWifiCracker | services/autocrack/aircrack.py | 1 | 19705 | #!/usr/bin/python
import os, signal, sys
import commands
import subprocess
import random
import threading
import socket
from ConfigParser import ConfigParser
import time
#LOG_PATH = "/media/card/caps"
#WIRELESS_DEVICE = "wlan0"
#MONITOR_DEVICE = "mon0"
MONITOR_MODE_CMD = "airmon-ng start wlan0"
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
def get_hw_address(ifname):
import fcntl, struct
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
info = fcntl.ioctl(s.fileno(), 0x8927, struct.pack('256s', ifname[:15]))
hwaddr = []
for char in info[18:24]:
hdigit = hex(ord(char))[2:]
if len(hdigit) == 1:
hdigit = "0%s" % hdigit
hwaddr.append(hdigit)
return hwaddr
def getCellInfo():
rawcells = commands.getoutput("iwlist %s scanning" % WIRELESS_DEVICE).split("Cell")
cells = {}
for celld in rawcells:
cell = {}
cell_data = celld.split('\n')
for field in cell_data:
field = field.strip()
vals = field.split(':')
#print vals
if "Address" in vals[0]:
if len(vals) < 7:
print vals
else:
cell["address"] = "%s:%s:%s:%s:%s:%s" % (vals[1], vals[2], vals[3], vals[4], vals[5], vals[6])
elif "Channel" in vals[0]:
cell["channel"] = vals[1]
elif "ESSID" in vals[0]:
cell["essid"] = vals[1].replace('"', '').upper()
elif "Quality" in vals[0]:
cell["quality"] = vals[0].split('=')[1]
if cell.has_key("essid"):
cells[cell["essid"]] = cell
return cells
def getCellByAddress(bssid, cells=None):
if cells is None:
cells = getCellInfo()
for cell in cells.values():
if cell["address"].lower() == bssid.lower():
return cell
return None
def isMonitorMode(monitor_device):
out = commands.getoutput("iwconfig %s" % monitor_device.name).lower()
if "mode:monitor" in out:
return True
return False
def setMonitorMode(wireless_device, monitor_device):
if isMonitorMode(monitor_device):
return True
print "setting device mode to 'Monitor'..."
commands.getoutput(MONITOR_MODE_CMD)
if isMonitorMode(monitor_device):
print "devine now in monitor mode"
return True
print "failed to get device into monitor mode"
return False
FAKE_AUTH_CMD = "aireplay-ng -1 0 -a %s -h 00:26:F2:B7:71:C2 mon0"
CRACK_CMD = "aircrack-ng /caps/%s.*.cap"
class NetworkInterface(object):
"""docstring for NetworkInterface"""
def __init__(self, name):
self.name = name
self.ip = None
self.mac = None
def getMac(self):
if self.mac is None:
self.mac = ":".join(get_hw_address(self.name))
return self.mac
def getDynamicIP(self, ip):
pass
def setIP(self, ip):
pass
def changeMac(self, mac):
# take interface down
# change interface mac
# bring interface back up
pass
def setKey(self, key):
pass
def setEssid(self, essid):
out = commands.getoutput("iwconfig %s essid %s" % (self.name, essid))
if len(out) < 2:
return True
return False
def setChannel(self, channel):
out = commands.getoutput("iwconfig %s channel %s" % (self.name, channel))
if len(out) < 2:
return True
return False
class AccessPoint(object):
"""represents a wireless network"""
def __init__(self, mac, log_path):
self.mac = mac
self.vulnerability = 0
self.cracked = False
self.crack_attempts = 0
self.first_seen = None
self.last_seen = None
self.age = 5000
self.channel = 0
self.speed = 0
self.privacy = "n/a"
self.cipher = None
self.auth = None
self.power = 0
self.beacons = 0
self.ivs = 0
self.lan = 0
self.ip = 0
self.id_length = 0
self.essid = "n/a"
self.key = None
self.stations = {}
def update(self, fields):
# BSSID, First time seen, Last time seen, channel, Speed, Privacy, Cipher, Authentication, Power, # beacons, # IV, LAN IP, ID-length, ESSID, Key
if self.first_seen is None:
self.first_seen = time.mktime(time.strptime(fields[1].strip(), "%Y-%m-%d %H:%M:%S"))
try:
self.last_seen = time.mktime(time.strptime(fields[2].strip(), "%Y-%m-%d %H:%M:%S"))
except:
self.last_seen = 0
self.age = time.time() - self.last_seen
self.channel = int(fields[3].strip())
self.speed = int(fields[4].strip())
self.privacy = fields[5].strip()
self.cipher = fields[6].strip()
self.auth = fields[7].strip()
self.power = int(fields[8].strip())
self.beacons = int(fields[9].strip())
self.ivs = int(fields[10].strip())
self.ip = fields[11].strip()
self.id_length = fields[12].strip()
self.essid = fields[13].strip()
if len(self.essid) == 0:
self.essid = "unknown"
#if self.key is None or len(self.key) < 2:
# self.key = fields[14].strip()
def asJSON(self):
d = {}
for k, i in self.__dict__.items():
if type(i) in [str, int, float]:
d[k] = i
d["stations"] = {}
for s in self.stations.values():
d["stations"][s.mac] = s.asJSON()
return d
def __str__(self):
return "ap('%s'): channel: '%s' privacy: '%s' cipher: '%s' auth: %s power: '%s' " % (self.essid,
self.channel, self.privacy, self.cipher, self.auth, self.power)
class Station(object):
"""docstring for Station"""
def __init__(self, mac):
self.mac = mac
self.first_seen = None
self.last_seen = None
self.power = 0
self.packets = 0
self.ap_mac = None
def update(self, fields):
self.first_seen = fields[1]
self.last_seen = fields[2]
self.power = fields[3]
self.packets = fields[4]
self.ap_mac = fields[5]
def asJSON(self):
d = {}
for k, i in self.__dict__.items():
if type(i) in [str, int, float]:
d[k] = i
return d
class AttackProperties(object):
"""info about the current attack"""
def __init__(self, monitor_device, inject_device, log_path):
self.monitor_device = monitor_device
self.inject_device = inject_device
self.log_path = log_path
self.log_prefix = log_path
self.aps = {}
self.historic_aps = {}
self.target = None
self.history_file = os.path.join(log_path, "crack-history.ini")
self.loadHistory()
def hasAP(self, ap_mac):
return self.aps.has_key(ap_mac) or self.historic_aps.has_key(ap_mac)
def getAP(self, ap_mac):
if self.aps.has_key(ap_mac):
return self.aps[ap_mac]
elif self.historic_aps.has_key(ap_mac):
return self.historic_aps[ap_mac]
return None
def getActiveAP(self, ap_mac):
if self.aps.has_key(ap_mac):
return self.aps[ap_mac]
if self.historic_aps.has_key(ap_mac):
ap = self.historic_aps[ap_mac]
self.aps[ap_mac] = ap
return ap
return None
def addActiveAP(self, ap):
if not self.aps.has_key(ap.mac):
self.aps[ap.mac] = ap
def clearActive(self):
self.aps.clear()
def loadHistory(self):
if os.path.exists(self.history_file):
config = ConfigParser()
config.read(self.history_file)
for section in config.sections():
ap_mac = section
if ap_mac != None:
self.historic_aps[ap_mac] = AccessPoint(ap_mac, self.log_path)
self.historic_aps[ap_mac].first_seen = config.get(section, "first_seen", None)
self.historic_aps[ap_mac].last_seen = config.get(section, "last_seen", None)
self.historic_aps[ap_mac].essid = config.get(section, "essid", None)
if config.has_option(section, "key"):
self.historic_aps[ap_mac].key = config.get(section, "key", None)
def saveHistory(self):
config = ConfigParser()
config.read(self.history_file)
for ap_mac in self.aps:
if not config.has_section(ap_mac):
config.add_section(ap_mac)
ap = self.aps[ap_mac]
config.set(ap_mac, "first_seen", ap.first_seen)
config.set(ap_mac, "last_seen", ap.last_seen)
config.set(ap_mac, "essid", ap.essid)
if ap.key != None:
config.set(ap_mac, "key", ap.key)
with open(self.history_file, 'w') as configfile:
config.write(configfile)
def setTarget(self, target):
self.target = target
if self.target != None:
self.log_prefix = os.path.join(self.log_path, target.essid.replace(' ', '_'))
else:
self.log_prefix = self.log_path
def parseMonitorLog(log_file, attack_props):
"""update our info from the log files"""
if not os.path.exists(log_file):
return
report = open(log_file, 'r')
lines = report.readlines()
#print lines
report.close()
readingStations = False
readingAps = False
for line in lines:
line = line.strip()
#print line
if not readingStations and not readingAps:
if line.startswith("BSSID"):
readingAps = True
continue
elif line.startswith("Station"):
readingStations = True
continue
elif readingAps:
if len(line) < 4:
readingAps =False
else:
fields = line.split(',')
#print fields
ap_mac = fields[0].strip()
if attack_props.hasAP(ap_mac):
ap = attack_props.getActiveAP(ap_mac)
else:
ap = AccessPoint(ap_mac, attack_props.log_path)
attack_props.addActiveAP(ap)
ap.update(fields)
elif readingStations and len(line) > 4:
fields = line.split(',')
station_mac = fields[0].strip()
ap_mac = fields[5].strip()
if attack_props.hasAP(ap_mac):
ap = attack_props.getAP(ap_mac)
if ap.stations.has_key(station_mac):
station = ap.stations[station_mac]
else:
station = Station(station_mac)
ap.stations[station_mac] = station
station.ap = station
station.update(fields)
class AirMonitor(object):
"""Monitors channels 1-12 for wireless networks"""
EXPLORE_COMMAND = "airodump-ng -o csv --ivs --write %s %s"
def __init__(self, attack_props, auto_start=False):
self.attack_props = attack_props
self.file_prefix = os.path.join(attack_props.log_path, "monitor")
self.monitor_log = self.file_prefix + "-01.csv"
self.process = None
self.aps = attack_props.aps
if auto_start:
self.start()
def isRunning(self):
try:
res = self.process != None and self.process.poll() is None
return res
except:
pass
return False
def start(self):
if self.process is None:
commands.getoutput("rm %s*" % self.file_prefix)
cmd = AirMonitor.EXPLORE_COMMAND % (self.file_prefix, self.attack_props.monitor_device.name)
self.FNULL = open('/dev/null', 'w')
self.process = subprocess.Popen(cmd, shell=True, stdout=self.FNULL, stderr=self.FNULL)
else:
raise Exception("AirMonitor already running")
def stop(self):
if self.process != None:
try:
self.process.kill()
commands.getoutput("kill -9 %s" % self.process.pid)
commands.getoutput("killall airodump-ng")
except:
pass
self.process = None
self.FNULL.close()
def update(self):
"""
self.attack_props.log_path + "-01.txt"
"""
parseMonitorLog(self.monitor_log, self.attack_props)
class AirCapture(AirMonitor):
"""Captures IVs into cap files for cracking WEPs"""
CAPTURE_COMMAND = "airodump-ng --channel %s --bssid %s --write %s %s"
def __init__(self, attack_props):
AirMonitor.__init__(self, attack_props, False)
self.file_prefix = attack_props.log_prefix
self.monitor_log = self.file_prefix + "-01.csv"
self.start()
def start(self):
commands.getoutput("rm %s*" % self.file_prefix)
cmd = AirCapture.CAPTURE_COMMAND % (self.attack_props.target.channel, self.attack_props.target.mac,
self.file_prefix, self.attack_props.monitor_device.name)
self.FNULL = open('/dev/null', 'w')
self.process = subprocess.Popen(cmd, shell=True, stdout=self.FNULL, stderr=self.FNULL)
class AirPlay(object):
"""Ability to inject packets into the wireless network we are attacking"""
ARP_INJECTION_CMD = "aireplay-ng -3 -b %s -h %s %s > %s-arp_inject.log"
DEAUTHENTICATE_CMD = "aireplay-ng --deauth %d -a %s -h %s"
FAKE_AUTHENTICATE_CMD = """aireplay-ng --fakeauth %d -e "%s" -a %s -h %s"""
def __init__(self, attack_props):
self.attack_props = attack_props
self.process = None
self.attack_props.monitor_device.setChannel(self.attack_props.target.channel)
def deauthenticate(self, count=1, target_mac=None):
"""Attempts to deauthenticate all stations or a target station"""
cmd = AirPlay.DEAUTHENTICATE_CMD % (count, self.attack_props.target.mac, self.attack_props.monitor_device.getMac())
if target_mac != None:
cmd += " -c %s" % target_mac
cmd += " %s" % self.attack_props.inject_device.name
lines = commands.getoutput(cmd).split('\n')
for line in lines:
if len(line) > 2:
if not "Waiting for beacon frame" in line: # spam
if not "No source MAC" in line: # spam
if not "Sending" in line: # spam
print "deauthentication erros: "
print "\n".join(lines)
return False
return True
def fakeAuthenticate(self, auth_delay=0, keep_alive_seconds=None, prga_file=None):
"""Fake authentication with AP"""
# setup the wireless card to be on the correct channel
# print "\tsetting channel: %d" % self.attack_props.target.channel
if not self.attack_props.monitor_device.setChannel(self.attack_props.target.channel):
print "failed to set correct channel for authentication"
return False
cmd = AirPlay.FAKE_AUTHENTICATE_CMD % (auth_delay, self.attack_props.target.essid,
self.attack_props.target.mac, self.attack_props.monitor_device.getMac())
# print cmd
if keep_alive_seconds != None:
cmd += " -q %i" % keep_alive_seconds
if prga_file != None:
cmd += " -y %s" % prga_file
cmd += " %s" % self.attack_props.monitor_device.name
lines = commands.getoutput(cmd).split('\n')
success = False
for line in lines:
if "Association successful" in line:
success = True
if "Authentication successful" in line:
success = True
elif "AP rejects open-system authentication" in line:
success = False
elif "Denied (Code 1) is WPA in use?" in line:
success = False
elif "doesn't match the specified MAC" in line:
success = False
elif "Attack was unsuccessful" in line:
success = False
# if not success:
# print lines
return success
def startArpInjection(self):
cmd = AirPlay.ARP_INJECTION_CMD % (self.attack_props.target.mac, self.attack_props.inject_device.getMac(),
self.attack_props.monitor_device.name, self.attack_props.log_prefix)
self.process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def isRunning(self):
return self.process != None and self.process.poll() is None
def stop(self):
if self.process != None:
self.process.kill()
commands.getoutput("killall aireplay-ng")
self.process = None
class AirCracker(object):
"""runs a process that attempts to crack the network reading the captured packets"""
def __init__(self, attack_props, auto_start=True):
self.attack_props = attack_props
self.process = None
self.start()
def isRunning(self):
return self.process != None and self.process.poll() is None
def start(self, key_file=None, dictionary=None):
"""
This command starts a cracker and returns the key if it's able
to find it. Optional KeyFile can be used to specify a keyfile
otherwise all keyfiles will be used. If dictionary is specified
it will try to crack key using it (WPA2). aircrack-ng will run
quitely until key is found (WEP/WPA) or cross-reference with
dictionary fails.
Dictionary can be a string of several dicts seperated by ","
Like: "dict1.txt,dictpro2.txt,others.txt"
"""
cmd = "aircrack-ng -q -b %s" % self.attack_props.target.mac # -q for quite to only output the key if found
if dictionary != None: # Use dictionary if one is specified
cmd += " -w %s" % dictionary
if key_file is None: # If keyfile is specified us it, else use standard path
cmd += " %s*.cap" % self.attack_props.log_prefix
else:
cmd += " ", Keyfile
self.process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def stop(self):
if self.process != None:
self.process.kill()
commands.getoutput("killall aircrack-ng")
self.process = None
def checkResults(self):
if self.process is None:
return False
output = self.process.communicate()[0]
for line in output.split("\n"):
if not line == "":
if "KEY FOUND" in line: # Key found, lets call event KeyFound
words = line.split(" ")
#print words
self.attack_props.target.key = words[3]
self.attack_props.saveHistory()
return True
return False
| mit | 6,800,069,612,372,409,000 | 34.892532 | 152 | 0.547323 | false | 3.774904 | true | false | false |
kingdz2008/test | test/downloadAckunMusic.py | 1 | 6077 | #!/bin/env python
# coding=utf8
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_v1_5 as Cipher_pkcs1_v1_5
import base64
import requests
import json
import urllib
import time
import random
import datetime
import hashlib
# 获得响应头信息中的Content-Type域
def urlOpenGetHeaders(url):
req = urllib.request.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36')
page = urllib.request.urlopen(req)
html = page.getheader('Content-Type')
return html
# 获得url的源码
def urlOpen(url):
req = urllib.request.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36')
if False:
proxies = ['110.73.8.151:8123', '110.73.10.78:8123', '36.249.193.19:8118']
proxy = random.choice(proxies)
proxy_support = urllib.request.ProxyHandler({'http':proxy})
opener = urllib.request.build_opener(proxy_support)
opener.addheaders = [('User-Agent','Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36')]
urllib.request.install_opener(opener)
page = urllib.request.urlopen(url)
else:
page = urllib.request.urlopen(req)
html = page.read()
return html
# 根据名字、后缀和url下载到本地文件夹
def download(title,post,url):
filename = title + "." +post
with open(filename, 'wb') as f:
music = urlOpen(url)
f.write(music)
def getPostStr(pKey, song_id):
rsaKey = RSA.importKey(pKey)
cipher = Cipher_pkcs1_v1_5.new(rsaKey)
encry = cipher.encrypt(song_id)
return base64.b64encode(encry)
# 获取歌曲的实际的url
def getSongRealUrl(songidVal,timeVal,md5Val):
url = 'http://www.aekun.com/api/getMusicbyid/'
r = requests.post(url, {
'songid': songidVal,
't':timeVal,
'sign':md5Val
})
return r.content
# 将需要的数据写入本地文件
def writeStrToFile(writeStr):
print(writeStr)
with open("downurl.txt","a",encoding="UTF-8") as f:
f.write(writeStr)
f.write("\n")
# 获取最新的推荐歌曲编号
def getMaxSongs():
url = "http://www.aekun.com/new/"
html = urlOpen(url).decode('utf-8')
a = html.find('<tr musicid=') + 13
b = html.find('"',a)
result = int(html[a:b])
return result
# 获取目前已经获得的最大曲目编号
def getNowSongId(songIdInt):
f = open("downurl.txt","r",encoding="UTF-8")
lines = f.readlines() #读取全部内容
for line in lines:
if line.find('|')!=-1:
line = line.split("|")
line = int(line[0])
if line > songIdInt:
songIdInt = line
return songIdInt
# 下载歌曲的主程序部分
def downloadMusicMain():
# 获取pKey
f = open('public.pem')
pKey = f.read()
f.close()
songIdInt = 3509719
songIdInt = getNowSongId(songIdInt)
songIdInt = songIdInt + 1
maxSong = getMaxSongs()
print("start from:%s,end with:%s"%(songIdInt,maxSong))
# 3505251 |10 |2015084685 |▌▌Chillout ▌▌Losing Ground Michael FK & Groundfold -----3505251.mp3
while(False):
if songIdInt > maxSong:
break
time.sleep(10)
try:
urlOpen("http://www.aekun.com/song/" + str(songIdInt))
except ConnectionResetError:
print("Error occur")
songId = str(songIdInt).encode('utf-8')
print(songId)
songidVal = getPostStr(pKey, songId)
songidVal = songidVal.decode('utf-8')
t = time.time()
t = int(round(t * 1000))
timeVal = getPostStr(pKey,str(t).encode('utf-8'))
timeVal = timeVal.decode('utf-8')
m2 = hashlib.md5()
src = str(songIdInt) + "|" + str(t)
m2.update(src.encode("utf8"))
t = m2.hexdigest()
md5Val = getPostStr(pKey,str(t).encode('utf-8'))
md5Val = md5Val.decode('utf-8')
try:
print(songidVal)
print(timeVal)
print(md5Val)
ret = getSongRealUrl(songidVal,timeVal,md5Val)
except (ConnectionError , ConnectionResetError):
print("ConnectionError")
time.sleep(3)
continue
ret = ret.decode('utf-8')
#ret = '{"state":"success","message":"ok","action":null,"data":{"url":"http://us.aekun.com/upload/75AAB77BC2D16123F9F2E8B6C68FCB8E.mp3","song_name":"就算遇到挫折、受到嘲笑,也要勇敢的向前跑!","coll":0,"singername":"小哥","singerpic":"https://m4.aekun.com/user_l_5973822_20170513135220.png"}}'
print(ret)
ret = json.loads(ret)
print(ret)
status = ret['state']
if status != 'success':
print(status)
break
downUrl = ret['data']
if isinstance(downUrl,str):
if downUrl.strip() == '':
html = urlOpen("http://www.aekun.com/song/" + str(songIdInt)).decode('utf-8')
songIdInt = songIdInt + 1
continue
elif isinstance(downUrl,dict):
pass
else:
continue
downUrl = ret['data']['url']
if downUrl is None:
continue
if downUrl.strip() == "":
continue
post = downUrl[-3:]
post = post.lower()
if post != 'mp3' and post != 'm4a':
tmp = urlOpenGetHeaders(downUrl)
if tmp.find('mp3') != -1:
post = 'mp3'
songName = ret['data']['song_name']
writeStr = "%-10s|%-50s|%-5s|%s"%(songIdInt,songName,post,downUrl)
writeStrToFile(writeStr)
songIdInt = songIdInt + 1
now = datetime.datetime.now()
now = now.strftime('%Y-%m-%d %H:%M:%S')
writeStrToFile(str(now) + '\t\t\t' + str(maxSong))
if __name__ == '__main__':
downloadMusicMain()
| mit | -3,432,212,792,785,237,500 | 31.049451 | 278 | 0.588548 | false | 2.929684 | true | false | false |
tpudlik/sbf | performance/profile.py | 1 | 2000 | import argparse, cProfile, importlib, itertools, pstats, sys
from os import path
import numpy as np
# Path hack
sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) )
def profile(f, args):
"""Profile the sbf function f."""
z = 10**np.linspace(-3, 4, 1000)
n = np.arange(200)
zz, nn = np.meshgrid(z, n)
fnames = get_input_filenames(args)
cProfile.runctx("f(nn, zz)", globals(), locals(), fnames[0])
phases = np.exp(2*np.pi*np.random.rand(zz.shape[0], zz.shape[1]))
zz = zz*phases
fname = "{}_{}_complex.pstats".format(args.sbf, args.algo)
cProfile.runctx("f(nn, zz)", globals(), locals(), fnames[1])
def get_input_filenames(args):
return ("{}_{}_real.pstats".format(args.sbf, args.algo),
"{}_{}_complex.pstats".format(args.sbf, args.algo))
def get_output_filenames(args):
return ("{}_{}_real.txt".format(args.sbf, args.algo),
"{}_{}_complex.txt".format(args.sbf, args.algo))
def print_stats(args):
f_ins = get_input_filenames(args)
f_outs = get_output_filenames(args)
for f_in, f_out in itertools.izip(f_ins, f_outs):
with open(f_out, "w") as f:
p = pstats.Stats(f_in, stream=f)
p.strip_dirs().sort_stats("cumulative").print_stats(50)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("sbf",
help="The spherical Bessel function to profile.",
choices=["jn", "yn", "h1n", "h2n", "i1n", "i2n", "kn"])
parser.add_argument("algo",
help="The implementation to profile.",
choices=["default", "bessel", "a_recur", "cai",
"power_series", "d_recur_miller",
"candidate"])
args = parser.parse_args()
m = importlib.import_module("algos.{}".format(args.algo))
f = getattr(m, "sph_{}".format(args.sbf))
profile(f, args)
print_stats(args)
| mit | 6,744,523,673,732,388,000 | 31.786885 | 79 | 0.566 | false | 3.189793 | false | false | false |
shollen/evennia | evennia/contrib/slow_exit.py | 8 | 4375 | """
Slow Exit typeclass
Contribution - Griatch 2014
This is an example of an Exit-type that delays its traversal.This
simulates slow movement, common in many different types of games. The
contrib also contains two commands, CmdSetSpeed and CmdStop for changing
the movement speed and abort an ongoing traversal, respectively.
To try out an exit of this type, you could connect two existing rooms
using something like this:
@open north:contrib.slow_exit.SlowExit = <destination>
Installation:
To make all new exits of this type, add the following line to your
settings:
BASE_EXIT_TYPECLASS = "contrib.slow_exit.SlowExit"
To get the ability to change your speed and abort your movement,
simply import and add CmdSetSpeed and CmdStop from this module to your
default cmdset (see tutorials on how to do this if you are unsure).
Notes:
This implementation is efficient but not persistent; so incomplete
movement will be lost in a server reload. This is acceptable for most
game types - to simulate longer travel times (more than the couple of
seconds assumed here), a more persistent variant using Scripts or the
TickerHandler might be better.
"""
from evennia import DefaultExit, utils, Command
MOVE_DELAY = {"stroll": 6,
"walk": 4,
"run": 2,
"sprint": 1}
class SlowExit(DefaultExit):
"""
This overloads the way moving happens.
"""
def at_traverse(self, traversing_object, target_location):
"""
Implements the actual traversal, using utils.delay to delay the move_to.
"""
# if the traverser has an Attribute move_speed, use that,
# otherwise default to "walk" speed
move_speed = traversing_object.db.move_speed or "walk"
move_delay = MOVE_DELAY.get(move_speed, 4)
def move_callback():
"This callback will be called by utils.delay after move_delay seconds."
source_location = traversing_object.location
if traversing_object.move_to(target_location):
self.at_after_traverse(traversing_object, source_location)
else:
if self.db.err_traverse:
# if exit has a better error message, let's use it.
self.caller.msg(self.db.err_traverse)
else:
# No shorthand error message. Call hook.
self.at_failed_traverse(traversing_object)
traversing_object.msg("You start moving %s at a %s." % (self.key, move_speed))
# create a delayed movement
deferred = utils.delay(move_delay, callback=move_callback)
# we store the deferred on the character, this will allow us
# to abort the movement. We must use an ndb here since
# deferreds cannot be pickled.
traversing_object.ndb.currently_moving = deferred
#
# set speed - command
#
SPEED_DESCS = {"stroll": "strolling",
"walk": "walking",
"run": "running",
"sprint": "sprinting"}
class CmdSetSpeed(Command):
"""
set your movement speed
Usage:
setspeed stroll|walk|run|sprint
This will set your movement speed, determining how long time
it takes to traverse exits. If no speed is set, 'walk' speed
is assumed.
"""
key = "setspeed"
def func(self):
"""
Simply sets an Attribute used by the SlowExit above.
"""
speed = self.args.lower().strip()
if speed not in SPEED_DESCS:
self.caller.msg("Usage: setspeed stroll|walk|run|sprint")
elif self.caller.db.move_speed == speed:
self.caller.msg("You are already %s." % SPEED_DESCS[speed])
else:
self.caller.db.move_speed = speed
self.caller.msg("You are now %s." % SPEED_DESCS[speed])
#
# stop moving - command
#
class CmdStop(Command):
"""
stop moving
Usage:
stop
Stops the current movement, if any.
"""
key = "stop"
def func(self):
"""
This is a very simple command, using the
stored deferred from the exit traversal above.
"""
currently_moving = self.caller.ndb.currently_moving
if currently_moving:
currently_moving.cancel()
self.caller.msg("You stop moving.")
else:
self.caller.msg("You are not moving.")
| bsd-3-clause | -6,332,526,666,602,655,000 | 29.594406 | 86 | 0.637943 | false | 3.937894 | false | false | false |
yaukwankiu/armor | start3.py | 1 | 3163 | #################
# codes for testing armor.patternMatching.pipeline and armor.patternMatching.algorithms
import time
import shutil
import os
time0 = time.time()
startTime =time.asctime()
from armor import defaultParameters as dp
from armor import misc
from armor import pattern, pattern2
from armor.patternMatching import pipeline as pp, algorithms
from armor.filter import filters
kongreyDSS = pattern2.kongreyDSS
hualien4 = misc.getFourCorners(dp.hualienCounty)
yilan4 = misc.getFourCorners(dp.yilanCounty)
kaohsiung4 = misc.getFourCorners(dp.kaohsiungCounty)
taipei4 = misc.getFourCorners(dp.taipeiCounty)
taitung4 = misc.getFourCorners(dp.taitungCounty)
regions = [{'name': "hualien", 'points': hualien4, 'weight': 0.25}, # equal weights: Yilan and Taipei are smaller but are visited by typhoons more
# {'name': "kaohsiung", 'points':kaohsiung4, 'weight':0.3}, #commented out: it's on the west coast and we want the window not to cross the central mountainous regions
{'name': "taipei", 'points':taipei4, 'weight':0.25},
{'name':"taitung", 'points':taitung4, 'weight':0.25},
{'name':"yilan", 'points':yilan4, 'weight':0.25}, # no need to add to 1
]
regionsString = "_".join([v['name']+str(round(v['weight'],2)) for v in regions])
outputFolder = dp.defaultRootFolder + "labReports/2014-03-07-filter-matching-scoring-pipeline/"+regionsString +'/'
### next up: work on the i/o so that i don't need to exit/re-enter ipython every time
# for loop added 18-03-2014
dss = kongreyDSS
obs = dss.obs
#obs.list = [v for v in obs if "00" in v.dataTime and v.dataTime>="20130828.0010"] # trim it down
obs.list = [v for v in obs if "00" in v.dataTime and (not ".00" in v.dataTime) and v.dataTime>="20130829.1800"] # trim it down
if not os.path.exists(outputFolder):
os.makedirs(outputFolder)
shutil.copyfile(dp.defaultRootFolder+"python/armor/start3.py", outputFolder+"start3.py")
for a in obs:
#obsTime="20130829.1800"
#kongreyDSS.load() # reload
dss.unload()
obsTime = a.dataTime
pp.pipeline(dss=kongreyDSS,
filteringAlgorithm = filters.gaussianFilter,
filteringAlgorithmArgs = {'sigma':5,
'stream_key': "wrfs" },
matchingAlgorithm = algorithms.nonstandardKernel,
matchingAlgorithmArgs = {'obsTime': obsTime, 'maxHourDiff':7,
'regions':regions,
'k' : 24, # steps of semi-lagrangian advections performed
'shiibaArgs':{'searchWindowWidth':11, 'searchWindowHeight':11, },
'outputFolder':outputFolder,
} ,
outputFolder=outputFolder,
toLoad=False,
#remarks= "Covariance used, rather than correlation: algorithms.py line 221: tempScore = a1.cov(w1)[0,1]",
remarks = "Correlation used"
)
print 'start time:', startTime
print 'total time spent:', time.time()-time0
| cc0-1.0 | 7,521,243,379,194,565,000 | 44.84058 | 179 | 0.625356 | false | 3.350636 | false | false | false |
Tefx/Brick | Brick/service/qing.py | 1 | 1976 | import gevent
from qingcloud.iaas import connect_to_zone
from Brick.sockserver import SockClient
from base import ServiceBase
class QingService(ServiceBase):
port = 42424
def __init__(self, s_id, conf,
api_keypath, zone, image, keypair, vxnets):
super(QingService, self).__init__(s_id, conf)
with open(api_keypath) as f:
self.api_id = f.readline().split()[1].strip("'")
self.api_key = f.readline().split()[1].strip("'")
self.zone = zone
self.image = image
self.keypair = keypair
self.instance_id = None
self.vxnets = vxnets
self.host = None
def wait_booting(self, conn):
ret = conn.describe_instances(instances=self.instance_id)
if ret["instance_set"][0]["status"] != "running":
gevent.sleep(3)
return self.wait_booting(conn)
elif not ret["instance_set"][0]["vxnets"][0]["private_ip"]:
gevent.sleep(3)
return self.wait_booting(conn)
else:
return ret["instance_set"][0]
def conn_puppet(self):
self.puppet = SockClient((self.host, self.port), keep_alive=False)
self.puppet.hire_worker()
def real_start(self):
conn = connect_to_zone(self.zone, self.api_id, self.api_key)
ret = conn.run_instances(image_id=self.image,
instance_type=self.conf,
login_mode="keypair",
login_keypair=self.keypair,
vxnets=[self.vxnets])
self.instance_id = ret["instances"]
ret = self.wait_booting(conn)
self.host = ret["vxnets"][0]["private_ip"]
self.conn_puppet()
def real_terminate(self):
self.puppet.fire_worker()
self.puppet.shutdown()
conn = connect_to_zone(self.zone, self.api_id, self.api_key)
conn.terminate_instances(self.instance_id)
| gpl-3.0 | -8,515,703,323,268,287,000 | 34.285714 | 74 | 0.564271 | false | 3.721281 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.