text
stringlengths 29
850k
|
---|
from django.conf import settings
from django.conf.urls import patterns, url
from haystack.views import SearchView
from elections.forms import ElectionForm
from elections.views import ElectionsSearchByTagView, HomeView, ElectionDetailView,\
CandidateDetailView, SoulMateDetailView, FaceToFaceView, AreaDetailView, \
CandidateFlatPageDetailView, ElectionRankingView, QuestionsPerCandidateView
from sitemaps import *
from django.views.decorators.cache import cache_page
from elections.preguntales_views import MessageDetailView, ElectionAskCreateView, AnswerWebHook
media_root = getattr(settings, 'MEDIA_ROOT', '/')
new_answer_endpoint = r"^new_answer/%s/?$" % (settings.NEW_ANSWER_ENDPOINT)
sitemaps = {
'elections': ElectionsSitemap,
'candidates': CandidatesSitemap,
}
urlpatterns = patterns('',
url(new_answer_endpoint,AnswerWebHook.as_view(), name='new_answer_endpoint' ),
url(r'^/?$', cache_page(60 * settings.CACHE_MINUTES)(HomeView.as_view(template_name='elections/home.html')), name='home'),
url(r'^buscar/?$', SearchView(template='search.html',
form_class=ElectionForm), name='search'),
url(r'^busqueda_tags/?$', ElectionsSearchByTagView.as_view(), name='tags_search'),
url(r'^election/(?P<slug>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/election_detail.html')),
name='election_view'),
url(r'^election/(?P<slug>[-\w]+)/questionary/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/election_questionary.html')),
name='questionary_detail_view'),
#compare two candidates
url(r'^election/(?P<slug>[-\w]+)/face-to-face/(?P<slug_candidate_one>[-\w]+)/(?P<slug_candidate_two>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(FaceToFaceView.as_view(template_name='elections/compare_candidates.html')),
name='face_to_face_two_candidates_detail_view'),
#one candidate for compare
url(r'^election/(?P<slug>[-\w]+)/face-to-face/(?P<slug_candidate_one>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/compare_candidates.html')),
name='face_to_face_one_candidate_detail_view'),
#no one candidate
url(r'^election/(?P<slug>[-\w]+)/face-to-face/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/compare_candidates.html')),
name='face_to_face_no_candidate_detail_view'),
#soulmate
url(r'^election/(?P<slug>[-\w]+)/soul-mate/?$',
SoulMateDetailView.as_view(template_name='elections/soulmate_candidate.html'),
name='soul_mate_detail_view'),
# Preguntales
url(r'^election/(?P<election_slug>[-\w]+)/messages/(?P<pk>\d+)/?$',
MessageDetailView.as_view(template_name='elections/message_detail.html'),
name='message_detail'),
#ranking
url(r'^election/(?P<slug>[-\w]+)/ranking/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionRankingView.as_view(template_name='elections/ranking_candidates.html')),
name='ranking_view'),
url(r'^election/(?P<election_slug>[-\w]+)/(?P<slug>[-\w]+)/questions?$',
QuestionsPerCandidateView.as_view(template_name='elections/questions_per_candidate.html'),
name='questions_per_candidate'
),
#ask
url(r'^election/(?P<slug>[-\w]+)/ask/?$',
ElectionAskCreateView.as_view(template_name='elections/ask_candidate.html'),
name='ask_detail_view'),
url(r'^election/(?P<election_slug>[-\w]+)/(?P<slug>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(CandidateDetailView.as_view(template_name='elections/candidate_detail.html')),
name='candidate_detail_view'
),
# End Preguntales
url(r'^election/(?P<election_slug>[-\w]+)/(?P<slug>[-\w]+)/(?P<url>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(CandidateFlatPageDetailView.as_view()),
name='candidate_flatpage'
),
url(r'^election/(?P<slug>[-\w]+)/extra_info.html$',
ElectionDetailView.as_view(template_name='elections/extra_info.html'),
name='election_extra_info'),
url(r'^area/(?P<slug>[-\w]+)/?$',
AreaDetailView.as_view(template_name='elections/area.html'),
name='area'),
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
)
urlpatterns += patterns('',
url(r'^cache/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': media_root})
)
|
If you have honeycomb shades, such as Duettes by Hunter Douglas, in some of your windows, try to catch a malfunction before it becomes a major problem.
Sometimes one or more of the cords in the pull-tassel will come undone. Stop using that shade until the cords are put back in place.
You could call the shop where you bought the shade to ask how to open the plastic tassel and put back the cords where they belong.
However, you might also be able to do it right then and there. Hold the tassel in your left hand. Pull the heavier, single cord and the small cap out of the tassel. Inside you will find the smaller cords held together by a clip. Reinstall the cords that came loose and reassemble the tassel.
If you do not put the loose cords back where they belong and you keep operating the shade, those loose cords will eventually retract back into the headrail and you will have a major repair on your hands.
If you still have the original pleated shades from the 1970s or 1980s such as Verosol, Veroline or Veropaque, they are probably in a dark color such as bronze. Broken cords can be replaced if the folds in the fabric still stack correctly (they should not pop out of line) and if the fabric itself is still strong.
But you may have to settle for white cords. With each passing year, it is harder to find darker color cords.
In most cases, the white cord is not objectionable. Most window-covering shops can replace cords without sending the shade back to the manufacturer.
Rule-of-thumb: The simpler the control mechanism, the easier the repair. Some spring-loaded controls can only be fixed by the manufacturer.
By the way, do not try to replace the cords in those pleated shades on your own. The overlapping panels in older shades will disengage and you will spend hours trying to get the shade reassembled. Trust the pros.
|
"""
Outputs the object class tree read from LDAPv3 schema
of a given server
Usage: schema_oc_tree.py [--html] [LDAP URL]
"""
import sys,getopt,ldap,ldap.schema
ldap.trace_level = 1
def PrintSchemaTree(schema,se_class,se_tree,se_oid,level):
"""ASCII text output for console"""
se_obj = schema.get_obj(se_class,se_oid)
if se_obj!=None:
print '| '*(level-1)+'+---'*(level>0), \
', '.join(se_obj.names), \
'(%s)' % se_obj.oid
for sub_se_oid in se_tree[se_oid]:
print '| '*(level+1)
PrintSchemaTree(schema,se_class,se_tree,sub_se_oid,level+1)
def HTMLSchemaTree(schema,se_class,se_tree,se_oid,level):
"""HTML output for browser"""
se_obj = schema.get_obj(se_class,se_oid)
if se_obj!=None:
print """
<dt><strong>%s (%s)</strong></dt>
<dd>
%s
""" % (', '.join(se_obj.names),se_obj.oid,se_obj.desc)
if se_tree[se_oid]:
print '<dl>'
for sub_se_oid in se_tree[se_oid]:
HTMLSchemaTree(schema,se_class,se_tree,sub_se_oid,level+1)
print '</dl>'
print '</dd>'
ldap.set_option(ldap.OPT_DEBUG_LEVEL,0)
ldap._trace_level = 0
subschemasubentry_dn,schema = ldap.schema.urlfetch(sys.argv[-1],ldap.trace_level)
if subschemasubentry_dn is None:
print 'No sub schema sub entry found!'
sys.exit(1)
try:
options,args=getopt.getopt(sys.argv[1:],'',['html'])
except getopt.error,e:
print 'Error: %s\nUsage: schema_oc_tree.py [--html] [LDAP URL]'
html_output = options and options[0][0]=='--html'
oc_tree = schema.tree(ldap.schema.ObjectClass)
at_tree = schema.tree(ldap.schema.AttributeType)
#for k,v in oc_tree.items():
# print k,'->',v
#for k,v in at_tree.items():
# print k,'->',v
if html_output:
print """<html>
<head>
<title>Object class tree</title>
</head>
<body bgcolor="#ffffff">
<h1>Object class tree</h1>
<dl>
"""
HTMLSchemaTree(schema,ldap.schema.ObjectClass,oc_tree,'2.5.6.0',0)
print """</dl>
<h1>Attribute type tree</h1>
<dl>
"""
for a in schema.listall(ldap.schema.AttributeType):
if at_tree[a]:
HTMLSchemaTree(schema,ldap.schema.AttributeType,at_tree,a,0)
print
print """</dl>
</body>
</html>
"""
else:
print '*** Object class tree ***\n'
print
PrintSchemaTree(schema,ldap.schema.ObjectClass,oc_tree,'2.5.6.0',0)
print '\n*** Attribute types tree ***\n'
PrintSchemaTree(schema,ldap.schema.AttributeType,at_tree,'_',0)
|
Here’s our range of yummy ClearSpot Tofu products.
To keep our tofu at its best it should be refrigerated below 5C and is suitable for home freezing.
It's just simply plain tofu and nothing more.
Naturally smoked over beechwood chipping in our smoking kiln with just a pinch of sea salt.
Our own little creation! These little patties are made with both our original and smoked tofu, blended with crispy seaweed.
New Organic Oriental Tofu, 100% organic soya beans infused with a blend of sweet chilli and aromatic spices. It's perfect for adding a little extra flavour to any meal occasion. Try it in stir-fry, soup, and salad.
New Organic Mexican Tofu, 100% organic soya beans infused with a blend of smoky paprika and aromatic spices. It's perfect for adding a little extra flavour to any meal occasion. Try it in fajitas, tacos, and chilli.
|
import datetime
import types
import json
import collections
import textwrap
import os
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.views import generic
from django.http import HttpResponseRedirect, HttpResponse, Http404
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.decorators import login_required
from delivery.models import Delivery
from member.models import Member, Route
from django.http import JsonResponse
from django.core.urlresolvers import reverse_lazy
from django.contrib.admin.models import LogEntry, ADDITION, CHANGE
from django.contrib.contenttypes.models import ContentType
from django.db.models.functions import Lower
from .apps import DeliveryConfig
from sqlalchemy import func, or_, and_
import labels # package pylabels
from reportlab.graphics import shapes
from .models import Delivery
from .forms import DishIngredientsForm
from order.models import (
Order, component_group_sorting, SIZE_CHOICES_REGULAR, SIZE_CHOICES_LARGE)
from meal.models import (
COMPONENT_GROUP_CHOICES, COMPONENT_GROUP_CHOICES_MAIN_DISH,
Component, Ingredient,
Menu, Menu_component,
Component_ingredient)
from member.apps import db_sa_session
from member.models import Client, Route
from datetime import date
from . import tsp
MEAL_LABELS_FILE = os.path.join(settings.BASE_DIR, "meallabels.pdf")
class Orderlist(generic.ListView):
# Display all the order on a given day
model = Delivery
template_name = 'review_orders.html'
context_object_name = 'orders'
def get_queryset(self):
queryset = Order.objects.get_orders_for_date()
return queryset
def get_context_data(self, **kwargs):
context = super(Orderlist, self).get_context_data(**kwargs)
context['orders_refresh_date'] = None
if LogEntry.objects.exists():
log = LogEntry.objects.latest('action_time')
context['orders_refresh_date'] = log
return context
class MealInformation(generic.View):
# Choose today's main dish and its ingredients
def get(self, request, **kwargs):
# Display today's main dish and its ingredients
date = datetime.date.today()
main_dishes = Component.objects.order_by(Lower('name')).filter(
component_group=COMPONENT_GROUP_CHOICES_MAIN_DISH)
if 'id' in kwargs:
# today's main dish has been chosen by user
main_dish = Component.objects.get(id=int(kwargs['id']))
# delete existing ingredients for the date + dish
Component_ingredient.objects.filter(
component=main_dish, date=date).delete()
else:
# see if a menu exists for today
menu_comps = Menu_component.objects.filter(
menu__date=date,
component__component_group=COMPONENT_GROUP_CHOICES_MAIN_DISH)
if menu_comps:
# main dish is known in today's menu
main_dish = menu_comps[0].component
else:
# take first main dish
main_dish = main_dishes[0]
# see if existing chosen ingredients for the dish
dish_ingredients = Component.get_day_ingredients(
main_dish.id, date)
if not dish_ingredients:
# get recipe ingredients for the dish
dish_ingredients = Component.get_recipe_ingredients(
main_dish.id)
form = DishIngredientsForm(
initial={
'maindish': main_dish.id,
'ingredients': dish_ingredients})
return render(
request,
'ingredients.html',
{'form': form,
'date': str(date)})
def post(self, request):
# Choose ingredients in today's main dish
# print("Pick Ingredients POST request=", request.POST) # For testing
date = datetime.date.today()
form = DishIngredientsForm(request.POST)
if '_restore' in request.POST:
# restore ingredients of main dish to those in recipe
if form.is_valid():
component = form.cleaned_data['maindish']
# delete existing ingredients for the date + dish
Component_ingredient.objects.filter(
component=component, date=date).delete()
return HttpResponseRedirect(
reverse_lazy("delivery:meal_id", args=[component.id]))
elif '_next' in request.POST:
# forward to kitchen count
if form.is_valid():
ingredients = form.cleaned_data['ingredients']
component = form.cleaned_data['maindish']
# delete existing ingredients for the date + dish
Component_ingredient.objects.filter(
component=component, date=date).delete()
# add revised ingredients for the date + dish
for ing in ingredients:
ci = Component_ingredient(
component=component,
ingredient=ing,
date=date)
ci.save()
# END FOR
# Create menu and its components for today
compnames = [component.name] # main dish
# take first sorted name of each other component group
for group, ignore in COMPONENT_GROUP_CHOICES:
if group != COMPONENT_GROUP_CHOICES_MAIN_DISH:
compnames.append(
Component.objects.order_by(Lower('name')).filter(
component_group=group)[0].name)
Menu.create_menu_and_components(date, compnames)
return HttpResponseRedirect(
reverse_lazy("delivery:kitchen_count"))
# END IF
# END IF
return render(
request,
'ingredients.html',
{'date': date,
'form': form})
class RoutesInformation(generic.ListView):
# Display all the route information for a given day
model = Delivery
template_name = "routes.html"
def get_context_data(self, **kwargs):
context = super(RoutesInformation, self).get_context_data(**kwargs)
context['routes'] = Route.objects.all()
return context
# Kitchen count report view, helper classes and functions
class KitchenCount(generic.View):
def get(self, request, **kwargs):
# Display kitchen count report for given delivery date
# or for today by default
if 'year' in kwargs and 'month' in kwargs and 'day' in kwargs:
date = datetime.date(
int(kwargs['year']), int(kwargs['month']), int(kwargs['day']))
else:
date = datetime.date.today()
kitchen_list = Order.get_kitchen_items(date)
component_lines, meal_lines = kcr_make_lines(kitchen_list, date)
num_labels = kcr_make_labels(kitchen_list)
# release session for SQLAlchemy TODO use signals instead
db_sa_session.remove()
return render(request, 'kitchen_count.html',
{'component_lines': component_lines,
'meal_lines': meal_lines,
'num_labels': num_labels})
class Component_line(types.SimpleNamespace):
# line to display component count summary
def __init__(self,
component_group='', rqty=0, lqty=0,
name='', ingredients=''):
self.__dict__.update(
{k: v for k, v in locals().items() if k != 'self'})
class Meal_line(types.SimpleNamespace):
# line to display client meal specifics
def __init__(self,
client='', rqty='', lqty='', comp_clash='',
ingr_clash='', preparation='', rest_comp='',
rest_ingr='', rest_item=''):
self.__dict__.update(
{k: v for k, v in locals().items() if k != 'self'})
def meal_line(v):
# factory for Meal_line
return Meal_line(
client=v.lastname + ', ' + v.firstname[0:2] + '.',
rqty=str(v.meal_qty) if v.meal_size == SIZE_CHOICES_REGULAR else '',
lqty=str(v.meal_qty) if v.meal_size == SIZE_CHOICES_LARGE else '',
comp_clash=', '.join(v.incompatible_components),
ingr_clash=', '.join(v.incompatible_ingredients),
preparation=', '.join(v.preparation),
rest_comp=', '.join(v.other_components),
rest_ingr=', '.join(v.other_ingredients),
rest_item=', '.join(v.restricted_items))
def kcr_cumulate(regular, large, meal):
# count cumulative meal quantities by size
if meal.meal_size == SIZE_CHOICES_REGULAR:
regular = regular + meal.meal_qty
else:
large = large + meal.meal_qty
return (regular, large)
def kcr_total_line(lines, label, regular, large):
# add line to display subtotal or total quantities by size
if regular or large:
lines.append(
Meal_line(client=label, rqty=str(regular), lqty=str(large)))
def kcr_make_lines(kitchen_list, date):
# generate all the lines for the kitchen count report
component_lines = {}
for k, item in kitchen_list.items():
for component_group, meal_component \
in item.meal_components.items():
component_lines.setdefault(
component_group,
Component_line(
component_group=component_group,
name=meal_component.name,
ingredients=", ".join(
[ing.name for ing in
Component.get_day_ingredients(
meal_component.id, date)])))
if (component_group == COMPONENT_GROUP_CHOICES_MAIN_DISH and
item.meal_size == SIZE_CHOICES_LARGE):
component_lines[component_group].lqty += \
meal_component.qty
else:
component_lines[component_group].rqty += \
meal_component.qty
# END FOR
# END FOR
items = component_lines.items()
if items:
component_lines_sorted = \
[component_lines[COMPONENT_GROUP_CHOICES_MAIN_DISH]]
component_lines_sorted.extend(
sorted([v for k, v in items if
k != COMPONENT_GROUP_CHOICES_MAIN_DISH],
key=lambda x: x.component_group))
else:
component_lines_sorted = []
meal_lines = []
rtotal, ltotal = (0, 0)
# part 1 Components clashes (and other columns)
rsubtotal, lsubtotal = (0, 0)
for v in sorted(
[val for val in kitchen_list.values() if
val.incompatible_components],
key=lambda x: x.lastname + x.firstname):
meal_lines.append(meal_line(v))
rsubtotal, lsubtotal = kcr_cumulate(rsubtotal, lsubtotal, v)
# END FOR
kcr_total_line(meal_lines, 'SUBTOTAL', rsubtotal, lsubtotal)
rtotal, ltotal = (rtotal + rsubtotal, ltotal + lsubtotal)
# part 2 Ingredients clashes , no components clashes (and other columns)
rsubtotal, lsubtotal = (0, 0)
clients = iter(sorted(
[(ke, val) for ke, val in kitchen_list.items() if
(val.incompatible_ingredients and
not val.incompatible_components)],
key=lambda x: x[1].incompatible_ingredients))
k, v = next(clients, (0, 0))
while k > 0:
combination = v.incompatible_ingredients
meal_lines.append(meal_line(v))
rsubtotal, lsubtotal = kcr_cumulate(rsubtotal, lsubtotal, v)
k, v = next(clients, (0, 0))
if k == 0 or combination != v.incompatible_ingredients:
kcr_total_line(meal_lines, 'SUBTOTAL', rsubtotal, lsubtotal)
rtotal, ltotal = (rtotal + rsubtotal, ltotal + lsubtotal)
rsubtotal, lsubtotal = (0, 0)
# END WHILE
# part 3 No clashes but preparation (and other columns)
rsubtotal, lsubtotal = (0, 0)
for v in sorted(
[val for val in kitchen_list.values() if
(not val.incompatible_ingredients and
not val.incompatible_components and
val.preparation)],
key=lambda x: x.lastname + x.firstname):
meal_lines.append(meal_line(v))
rsubtotal, lsubtotal = kcr_cumulate(rsubtotal, lsubtotal, v)
# END FOR
kcr_total_line(meal_lines, 'SUBTOTAL', rsubtotal, lsubtotal)
rtotal, ltotal = (rtotal + rsubtotal, ltotal + lsubtotal)
kcr_total_line(meal_lines, 'TOTAL SPECIALS', rtotal, ltotal)
rsubtotal, lsubtotal = (0, 0)
# part 4 No clashes nor preparation but other restrictions (NOT PRINTED)
for v in sorted(
[val for val in kitchen_list.values() if
(not val.incompatible_ingredients and
not val.incompatible_components and
not val.preparation and
(val.other_components or
val.other_ingredients or
val.restricted_items))],
key=lambda x: x.lastname + x.firstname):
meal_lines.append(meal_line(v))
rsubtotal, lsubtotal = kcr_cumulate(rsubtotal, lsubtotal, v)
# END FOR
# part 5 All columns empty (NOT PRINTED)
for v in sorted(
[val for val in kitchen_list.values() if
(not val.incompatible_ingredients and
not val.incompatible_components and
not val.preparation and
not val.other_components and
not val.other_ingredients and
not val.restricted_items)],
key=lambda x: x.lastname + x.firstname):
meal_lines.append(meal_line(v))
rsubtotal, lsubtotal = kcr_cumulate(rsubtotal, lsubtotal, v)
# END FOR
kcr_total_line(meal_lines, 'SUBTOTAL', rsubtotal, lsubtotal)
return (component_lines_sorted, meal_lines)
def kcr_make_labels(kitchen_list):
# see https://github.com/bcbnz/pylabels
# dimensions are in millimeters; 1 inch = 25.4 mm
specs = labels.Specification(
sheet_width=8.5 * 25.4, sheet_height=11 * 25.4,
columns=2, rows=7,
label_width=4 * 25.4, label_height=1.33 * 25.4,
top_margin=20, bottom_margin=20,
corner_radius=2)
def draw_label(label, width, height, data):
# callback function
obj, j, qty = data
label.add(shapes.String(2, height * 0.8,
obj.lastname + ", " + obj.firstname[0:2] + ".",
fontName="Helvetica-Bold",
fontSize=12))
label.add(shapes.String(width-2, height * 0.8,
"{}".format(datetime.date.today().
strftime("%a, %b-%d")),
fontName="Helvetica",
fontSize=10,
textAnchor="end"))
if obj.meal_size == SIZE_CHOICES_LARGE:
label.add(shapes.String(2, height * 0.65,
"LARGE",
fontName="Helvetica",
fontSize=10))
if qty > 1:
label.add(shapes.String(width * 0.5, height * 0.65,
"(" + str(j) + " of " + str(qty) + ")",
fontName="Helvetica",
fontSize=10))
label.add(shapes.String(width-3, height * 0.65,
obj.routename,
fontName="Helvetica-Oblique",
fontSize=8,
textAnchor="end"))
special = obj.preparation or []
special.extend(["No " + item for item in obj.incompatible_ingredients])
special.extend(["No " + item for item in obj.other_ingredients])
special.extend(["No " + item for item in obj.restricted_items])
special = textwrap.wrap(
' / '.join(special), width=68,
break_long_words=False, break_on_hyphens=False)
position = height * 0.45
for line in special:
label.add(shapes.String(2, position,
line,
fontName="Helvetica",
fontSize=9))
position -= 10
sheet = labels.Sheet(specs, draw_label, border=True)
# obj is a KitchenItem instance (see order/models.py)
for obj in sorted(
list(kitchen_list.values()),
key=lambda x: x.lastname + x.firstname):
qty = obj.meal_qty
for j in range(1, qty + 1):
sheet.add_label((obj, j, qty))
if sheet.label_count > 0:
sheet.save(MEAL_LABELS_FILE)
print("SousChef Printed {} meal label(s) on {} page(s)"
" into file {}".format(
sheet.label_count, sheet.page_count, MEAL_LABELS_FILE))
return sheet.label_count
# END Kitchen count report view, helper classes and functions
# Delivery route sheet view, helper classes and functions
class MealLabels(generic.View):
def get(self, request, **kwargs):
try:
f = open(MEAL_LABELS_FILE, "rb")
except:
raise Http404("File " + MEAL_LABELS_FILE + " does not exist")
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = \
'attachment; filename="labels{}.pdf"'. \
format(datetime.date.today().strftime("%Y%m%d"))
response.write(f.read())
f.close()
return response
class DeliveryRouteSheet(generic.View):
def get(self, request, **kwargs):
# Display today's delivery sheet for given route
route_id = int(kwargs['id'])
date = datetime.date.today()
route = Route.objects.get(id=route_id)
route_client_ids = route.get_client_sequence(date)
# print("delivery route sheet", "route_client_ids", route_client_ids)
route_list = Order.get_delivery_list(date, route_id)
route_list = sort_sequence_ids(route_list, route_client_ids)
# TODO sort route_list using sequence from leaflet
summary_lines, detail_lines = drs_make_lines(route_list, date)
return render(request, 'route_sheet.html',
{'route': route,
'summary_lines': summary_lines,
'detail_lines': detail_lines})
RouteSummaryLine = \
collections.namedtuple(
'RouteSummaryLine',
['component_group',
'rqty',
'lqty'])
def drs_make_lines(route_list, date):
# generate all the lines for the delivery route sheet
summary_lines = {}
for k, item in route_list.items():
# print("\nitem = ", item)
for delivery_item in item.delivery_items:
component_group = delivery_item.component_group
if component_group:
line = summary_lines.setdefault(
component_group,
RouteSummaryLine(
component_group,
rqty=0,
lqty=0))
# print("\nline", line)
if (component_group == COMPONENT_GROUP_CHOICES_MAIN_DISH and
delivery_item.size == SIZE_CHOICES_LARGE):
summary_lines[component_group] = \
line._replace(lqty=line.lqty +
delivery_item.total_quantity)
elif component_group != '':
summary_lines[component_group] = \
line._replace(rqty=line.rqty +
delivery_item.total_quantity)
# END IF
# END IF
# END FOR
# END FOR
# print("values before sort", summary_lines.values())
summary_lines_sorted = sorted(
summary_lines.values(),
key=component_group_sorting)
# print("values after sort", summary_lines_sorted)
return summary_lines_sorted, list(route_list.values())
def sort_sequence_ids(dic, seq):
# sort items in dictionary according to sequence of keys
# dic : dictionary for which some keys are not items in sequence
# seq : list of keys that may not all be entries in dic
# build an ordered dictionary from seq skipping keys not in dic
od = collections.OrderedDict()
if seq:
for k in seq:
if dic.get(k):
od[k] = None
# place all values in dic into ordered dict;
# keys not in seq will be at the end.
for k, val in dic.items():
od[k] = val
# print("sort_sequence_ids",
# "dic.items()", dic.items(),
# "seq", seq,
# "od.items()", od.items())
return od
# END Delivery route sheet view, helper classes and functions
def dailyOrders(request):
data = []
route_id = request.GET.get('route')
# Load all orders for the day
orders = Order.objects.get_orders_for_date()
for order in orders:
if order.client.route is not None:
if order.client.route.id == int(route_id):
waypoint = {
'id': order.client.member.id,
'latitude': order.client.member.address.latitude,
'longitude': order.client.member.address.longitude,
'distance': order.client.member.address.distance,
'member': "{} {}".format(
order.client.member.firstname,
order.client.member.lastname),
'address': order.client.member.address.street
}
# print("waypoint=", waypoint)
data.append(waypoint)
# Since the
# https://www.mapbox.com/api-documentation/#retrieve-a-duration-matrix
# endpoint is not yet available, we solve an approximation of the
# problem by assuming the world is flat and has no obstacles (2D
# Euclidean plane). This should still give good results.
node_to_waypoint = {}
nodes = [tsp.Node(None, 45.516564, -73.575145)] # Santropol
for waypoint in data:
node = tsp.Node(waypoint['id'], float(waypoint['latitude']),
float(waypoint['longitude']))
node_to_waypoint[node] = waypoint
nodes.append(node)
nodes = tsp.solve(nodes)
data = []
for node in nodes:
# Guard against Santropol which is not in node_to_waypoint
if node in node_to_waypoint:
data.append(node_to_waypoint[node])
waypoints = {'waypoints': data}
return JsonResponse(waypoints, safe=False)
@csrf_exempt
def saveRoute(request):
# print("saveRoute1", "request", request, "request.body=", request.body)
data = json.loads(request.body.decode('utf-8'))
# print("saveRoute2", "data=", data)
member_ids = [member['id'] for member in data['members']]
route_id = data['route'][0]['id']
route_client_ids = \
[Client.objects.get(member__id=member_id).id
for member_id in member_ids]
# print("saveRoute3", "route_id=", route_id,
# "route_client_ids=", route_client_ids)
route = Route.objects.get(id=route_id)
route.set_client_sequence(datetime.date.today(), route_client_ids)
route.save()
# To do print roadmap according the list of members received
return JsonResponse('OK', safe=False)
def refreshOrders(request):
creation_date = date.today()
delivery_date = date.today()
last_refresh_date = datetime.datetime.now()
clients = Client.active.all()
Order.create_orders_on_defaults(creation_date, delivery_date, clients)
LogEntry.objects.log_action(
user_id=1, content_type_id=1,
object_id="", object_repr="Generation of order for " + str(
datetime.datetime.now().strftime('%Y-%m-%d %H:%M')),
action_flag=ADDITION,
)
return HttpResponseRedirect(reverse_lazy("delivery:order"))
|
The trafficking of Africans by Europeans began in the 1500s. From the time Africans were enslaved in their motherland, to the time of their arrival in the New World, they sought ways to rebel, to fight back and escape.
‘That calculations had been made, with all possible exactness, to determine which was … the more saving method of managing slaves: whether, to appoint them moderate work, plenty of provision, and such treatment as might enable them to protract their lives to old age? Or, by rigorously straining their strength to the utmost, with little relaxation, hard fare, and hard usage, to wear them out before they became useless and unable to do service, and then, to buy new ones, to fill up their places?
|
#!/usr/bin/env python3
from Digraph import Digraph
class DepthFirstDirectedPaths:
def __init__(self, G, s):
self.marked = [0]*G.V
self.edgeTo = [0]*G.V
self.s = s
self.validateVertex(s)
self.dfs(G, s)
def dfs(self, G, v):
self.marked[v] = 1
for w in G.getAdj(v):
if not self.marked[w]:
self.edgeTo[w] = v
self.dfs(G, w)
def validateVertex(self, v):
V = len(self.marked)
assert 0 <= v < V, f'vertex {v} is not between 0 and {V-1}'
def hasPathTo(self, v):
self.validateVertex(v)
return self.marked[v]
def pathTo(self, v):
self.validateVertex(v)
if not self.hasPathTo(v):
return None
path = [v]
while self.edgeTo[v] != s:
w = self.edgeTo[v]
path, v = path+[w], w
path.append(s)
return path[::-1]
if __name__ == '__main__':
# The test case can be downloaded from here
# https://algs4.cs.princeton.edu/42digraph/tinyDG.txt
# https://algs4.cs.princeton.edu/42digraph/mediumDG.txt
# https://algs4.cs.princeton.edu/42digraph/largeDG.txt
import sys
G = Digraph(sys.argv[1])
print(G)
s = int(sys.argv[2])
dfs = DepthFirstDirectedPaths(G, s)
for v in range(G.V):
if dfs.hasPathTo(v):
print(f'{s} to {v}: {"-".join(map(str, dfs.pathTo(v)))}')
else:
print(f'{s} to {v}: not connected')
|
Our seas and coasts are an asset with rich and varied resources, living and non-living, which support livelihoods, provide a sense of place and identity and define cultures. Degradation of coral reef ecosystems, overfishing, increased resource extraction - the need for improved stewardship of coastal and marine resources is increasingly evident around the globe. But what does marine and coastal stewardship mean and how can we apply stewardship in these environments?
|
#!/usr/bin/python3
import logging
from operator import itemgetter
from timeit import default_timer as timer
import rdflib
from .abstract_instruction_set import AbstractInstructionSet
from readers import rdf
from writers import rule_set, pickler
from samplers import by_definition as sampler
from algorithms.semantic_rule_learning import generate_semantic_association_rules,\
generate_semantic_item_sets,\
generate_common_behaviour_sets,\
support_of,\
confidence_of
class PakbonLD(AbstractInstructionSet):
def __init__(self, time=""):
self.time = time
self.logger = logging.getLogger(__name__)
def print_header(self):
header = "PAKBON: Context ('Sporen') with 12 attributes"
print(header)
print('-' * len(header))
def load_dataset(self, abox, tbox):
"""
# pakbonLD SPARQL endpoint
endpoint = "http://pakbon-ld.spider.d2s.labs.vu.nl/sparql/"
# query
query_string = "" "
prefix pbont: <http://pakbon-ld.spider.d2s.labs.vu.nl/ont/>
prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
SELECT DISTINCT ?s ?p ?o
WHERE {
?s a pbont:SIKB0102S_Vondstcontext;
?p ?o.
FILTER (?p != rdf:type)
} LIMIT 1000"" "
# perform query and return a KnowledgeGraph instance
kg_i = rdf.query(query_string, endpoint)
"""
# read graphs
kg_i = rdf.read(local_path=abox)
kg_s = rdf.read(local_path=tbox)
# sample by pattern
pattern = (None,
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_grondspoortype"),
None)
# define context
# spoor with vulling
context = [rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_grondspoortype"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P53i_is_former_or_current_location_of"),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P89_falls_within"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_contexttype")),
(rdflib.URIRef("http://purl.org/crmeh#EHP3i"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_kleur")),
(rdflib.URIRef("http://purl.org/crmeh#EHP3i"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_textuur")),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P53i_is_former_or_current_location_of"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_structuurtype")),
(rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_diepte"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P40_observed_dimension"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P90_has_value")),
(rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_diepte"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P40_observed_dimension"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P91_has_unit")),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P140i_was_attributed_by"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P141_assigned"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_beginperiode")),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P140i_was_attributed_by"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P141_assigned"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_eindperiode")),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P53i_is_former_or_current_location_of"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P140i_was_attributed_by"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P141_assigned"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_beginperiode")),
(rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P53i_is_former_or_current_location_of"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P140i_was_attributed_by"),
rdflib.URIRef("http://www.cidoc-crm.org/cidoc-crm/P141_assigned"),
rdflib.URIRef("http://pakbon-ld.spider.d2s.labs.vu.nl/ont/SIKB0102S_eindperiode"))]
kg_i_sampled = kg_i.sample(sampler, patterns=[pattern], context=context)
return (kg_i_sampled, kg_s)
def run_program(self, dataset, hyperparameters):
self.logger.info("Starting run\nParameters:\n{}".format(
"\n".join(["\t{}: {}".format(k,v) for k,v in hyperparameters.items()])))
kg_i, kg_s = dataset
# fit model
t0 = timer()
# generate semantic item sets from sampled graph
si_sets = generate_semantic_item_sets(kg_i)
# generate common behaviour sets
cbs_sets = generate_common_behaviour_sets(si_sets,
hyperparameters["similarity_threshold"],
hyperparameters["max_cbs_size"])
# generate semantic association rules
rules = generate_semantic_association_rules(kg_i,
kg_s,
cbs_sets,
hyperparameters["minimal_local_support"])
# calculate support and confidence, skip those not meeting minimum requirements
final_rule_set = []
for rule in rules:
support = support_of(kg_i, rule)
confidence = confidence_of(kg_i, rule)
if support >= hyperparameters["minimal_support"] and\
confidence >= hyperparameters["minimal_confidence"]:
final_rule_set.append((rule, support, confidence))
# sorting rules on both support and confidence
final_rule_set.sort(key=itemgetter(2, 1), reverse=True)
# time took
t1 = timer()
dt = t1 - t0
print(" Program completed in {:.3f} ms".format(dt))
print(" Found {} rules".format(len(final_rule_set)))
return final_rule_set
def write_to_file(self, path="./of/latest", output=[]):
overwrite = False
print(" Writing output to {}...".format(path))
rule_set.pretty_write(output, path, overwrite)
pickler.write(output, path+".pickle", overwrite)
def run(self, abox, tbox, output_path):
self.print_header()
print(" {}\n".format(self.time))
hyperparameters = {}
hyperparameters["similarity_threshold"] = .8
hyperparameters["max_cbs_size"] = 4
hyperparameters["minimal_local_support"] = 0.0
hyperparameters["minimal_support"] = 0.0
hyperparameters["minimal_confidence"] = 0.0
print(" Importing Data Sets...")
dataset = self.load_dataset(abox, tbox)
print(" Initiated Pattern Learning...")
output = self.run_program(dataset, hyperparameters)
if len(output) > 0:
self.write_to_file(output_path, output)
|
Compassion and competition are two words that we don’t typically use in the same sentence. Compassion is the kingdom way; competition is the American way. As Americans, we celebrate winning at all costs. For us bigger is better. This is not God’s way. He calls us to sacrifice, service, and surrender. God rejoices in the laying down of one’s life. We often find ourselves trying to live in both of these realities. Yet, the longer we walk with Christ the more we feel the tension between the two.
The spirit of competition has worked itself into the place it should never exist: The Church. Now, don’t misunderstand me, I love a great ball game. Organized competition in sports and other arenas of life are fun, but when the same mindset spills over into the church it’s detrimental to the work of the kingdom. Think about it, competition puts us at odds with other people. It’s the process of trying to outshine another person. How can we genuinely serve others if we are constantly trying to surpass them?
The kingdom-life is a call to compassion, not competition. Compassion is a word that usually evokes positive feelings. We all like to consider ourselves compassionate people. However, compassion is not as natural of a phenomenon as we might think. The word “compassion” suggests: suffering with another, becoming weak, laying down one’s life. In fact, the bible teaches us to present ourselves as “living sacrifices.” The problem with living sacrifices is that they have the tendency to crawl off the altar, stand up, and start competing again.
There are two powerful narratives at work in our lives simultaneously. The first is the American narrative: a story of power, wealth, and prestige. The American story is one of competition: bigger, better, faster, and stronger. Americans don’t like limits. We like graphs in which all lines move up. This narrative tells us that limits are bad and boundaries are to be crossed. The American way is up.
The other narrative at work is called the Gospel. This is the way of Jesus embodied by his life, teaching, death, and resurrection. This narrative requires downward mobility. In Christ we don’t compete for a place in the kingdom; instead, God has chosen to be God-with-us. Jesus came down in an act of divine compassion. In turn, God calls us to reflect Christ in how we live and interact with one another. Subsequently, through us, the kingdom crashes into the world.
The way of Jesus is down. Jesus taught that the only way to find your life is to lose it. He said the first must be last… If you try and keep your life, you will actually end up losing everything. This teaching is woven throughout the Gospels. In fact, if you attempt to remove it, you would end up removing a large portion of what Jesus taught.
See for yourself: Blessed are the poor in spirit, those who mourn, the meek, the merciful, and the persecuted (Matt. 5:3-10). Don’t store up treasures on earth (Matt. 6:19-21). The one who seeks to save their life will lose it, but whoever loses their life for me will find it (Matt. 16:25). The last will be first and the first will be last (Matt. 20:16). If you want to be great, you have to become a servant (Matt. 20:26). The greatest among you will be the servant; the humble will be exalted and the exalted will be humbled (Matt. 23:11-12). Anyone who wants to be Jesus’ disciple must deny themselves, take up their cross daily, and follow him (Luke 9:23). If you cling to your life, you’ll lose it; if you let your life go, you’ll find it (Luke 17:33). We could go on, but I think you get the picture.
The Jesus narrative and the American narrative are worlds apart. The American story teaches us that life is about success. Our worth is connected to being bigger and better. Life is a competition and the one who dies with the most toys wins. If we are going to follow Jesus, we have to switch narratives because his story is one of compassion, humility, vulnerability, and servanthood.
The kingdom invites us to pour out our lives in certain assurance that life will never run out. The Christian life obligates itself to intentionally move downward. It’s counterintuitive; it’s not natural. Nonetheless, it’s the way of Christ and it is good. The kingdom narrative calls us to devote ourselves to a lifestyle of compassion as we learn to embody the good news of a better way. This is the tale of two tales: American and Kingdom. We’re born into one; we choose the other.
|
from functools import partial
import sys
import textwrap
from .vendor import six
from .context import Context
from .loader import Loader
from .parser import Parser, Context as ParserContext, Argument
from .executor import Executor
from .exceptions import Failure, CollectionNotFound, ParseError
from .util import debug, pty_size
from ._version import __version__
def task_name_to_key(x):
return (x.count('.'), x)
sort_names = partial(sorted, key=task_name_to_key)
indent_num = 2
indent = " " * indent_num
def print_help(tuples):
padding = 3
# Calculate column sizes: don't wrap flag specs, give what's left over
# to the descriptions.
flag_width = max(len(x[0]) for x in tuples)
desc_width = pty_size()[0] - flag_width - indent_num - padding - 1
wrapper = textwrap.TextWrapper(width=desc_width)
for flag_spec, help_str in tuples:
# Wrap descriptions/help text
help_chunks = wrapper.wrap(help_str)
# Print flag spec + padding
flag_padding = flag_width - len(flag_spec)
spec = ''.join((
indent,
flag_spec,
flag_padding * ' ',
padding * ' '
))
# Print help text as needed
if help_chunks:
print(spec + help_chunks[0])
for chunk in help_chunks[1:]:
print((' ' * len(spec)) + chunk)
else:
print(spec)
print('')
def parse_gracefully(parser, argv):
"""
Run ``parser.parse_argv(argv)`` & gracefully handle ``ParseError``.
'Gracefully' meaning to print a useful human-facing error message instead
of a traceback; the program will still exit if an error is raised.
If no error is raised, returns the result of the ``parse_argv`` call.
"""
try:
return parser.parse_argv(argv)
except ParseError as e:
sys.exit(str(e))
def parse(argv, collection=None):
"""
Parse ``argv`` list-of-strings into useful core & per-task structures.
:returns:
Three-tuple of ``args`` (core, non-task `.Argument` objects), ``collection``
(compiled `.Collection` of tasks, using defaults or core arguments
affecting collection generation) and ``tasks`` (a list of
`~.parser.context.Context` objects representing the requested task
executions).
"""
# Initial/core parsing (core options can affect the rest of the parsing)
initial_context = ParserContext(args=(
# TODO: make '--collection' a list-building arg, not a string
Argument(
names=('collection', 'c'),
help="Specify collection name to load. May be given >1 time."
),
Argument(
names=('root', 'r'),
help="Change root directory used for finding task modules."
),
Argument(
names=('help', 'h'),
optional=True,
help="Show core or per-task help and exit."
),
Argument(
names=('version', 'V'),
kind=bool,
default=False,
help="Show version and exit."
),
Argument(
names=('list', 'l'),
kind=bool,
default=False,
help="List available tasks."
),
Argument(
names=('no-dedupe',),
kind=bool,
default=False,
help="Disable task deduplication."
),
Argument(
names=('echo', 'e'),
kind=bool,
default=False,
help="Echo executed commands before running.",
),
Argument(
names=('warn-only', 'w'),
kind=bool,
default=False,
help="Warn, instead of failing, when shell commands fail.",
),
Argument(
names=('pty', 'p'),
kind=bool,
default=False,
help="Use a pty when executing shell commands.",
),
Argument(
names=('hide', 'H'),
help="Set default value of run()'s 'hide' kwarg.",
)
))
# 'core' will result an .unparsed attribute with what was left over.
debug("Parsing initial context (core args)")
parser = Parser(initial=initial_context, ignore_unknown=True)
core = parse_gracefully(parser, argv)
debug("After core-args pass, leftover argv: %r" % (core.unparsed,))
args = core[0].args
# Print version & exit if necessary
if args.version.value:
print("Invoke %s" % __version__)
sys.exit(0)
# Core (no value given) --help output
# TODO: if this wants to display context sensitive help (e.g. a combo help
# and available tasks listing; or core flags modified by plugins/task
# modules) it will have to move farther down.
if args.help.value == True:
print("Usage: inv[oke] [--core-opts] task1 [--task1-opts] ... taskN [--taskN-opts]")
print("")
print("Core options:")
print_help(initial_context.help_tuples())
sys.exit(0)
# Load collection (default or specified) and parse leftovers
# (Skip loading if somebody gave us an explicit task collection.)
if not collection:
debug("No collection given, loading from %r" % args.root.value)
loader = Loader(root=args.root.value)
collection = loader.load_collection(args.collection.value)
parser = Parser(contexts=collection.to_contexts())
debug("Parsing actual tasks against collection %r" % collection)
tasks = parse_gracefully(parser, core.unparsed)
# Per-task help. Use the parser's contexts dict as that's the easiest way
# to obtain Context objects here - which are what help output needs.
name = args.help.value
if name in parser.contexts:
# Setup
ctx = parser.contexts[name]
tuples = ctx.help_tuples()
docstring = collection[name].__doc__
header = "Usage: inv[oke] [--core-opts] %s %%s[other tasks here ...]" % name
print(header % ("[--options] " if tuples else ""))
print("")
print("Docstring:")
if docstring:
# Really wish textwrap worked better for this.
doclines = docstring.lstrip().splitlines()
for line in doclines:
print(indent + textwrap.dedent(line))
# Print trailing blank line if docstring didn't end with one
if textwrap.dedent(doclines[-1]):
print("")
else:
print(indent + "none")
print("")
print("Options:")
if tuples:
print_help(tuples)
else:
print(indent + "none")
print("")
sys.exit(0)
# Print discovered tasks if necessary
if args.list.value:
print("Available tasks:\n")
# Sort in depth, then alpha, order
task_names = collection.task_names
names = sort_names(task_names.keys())
for primary in names:
aliases = sort_names(task_names[primary])
out = primary
if aliases:
out += " (%s)" % ', '.join(aliases)
print(" %s" % out)
print("")
sys.exit(0)
# Return to caller so they can handle the results
return args, collection, tasks
def derive_opts(args):
run = {}
if args['warn-only'].value:
run['warn'] = True
if args.pty.value:
run['pty'] = True
if args.hide.value:
run['hide'] = args.hide.value
if args.echo.value:
run['echo'] = True
return {'run': run}
def dispatch(argv):
args, collection, tasks = parse(argv)
results = []
executor = Executor(collection, Context(**derive_opts(args)))
# Take action based on 'core' options and the 'tasks' found
for context in tasks:
kwargs = {}
for _, arg in six.iteritems(context.args):
# Use the arg obj's internal name - not what was necessarily given
# on the CLI. (E.g. --my-option vs --my_option for
# mytask(my_option=xxx) requires this.)
# TODO: store 'given' name somewhere in case somebody wants to see
# it when handling args.
kwargs[arg.name] = arg.value
try:
# TODO: allow swapping out of Executor subclasses based on core
# config options
results.append(executor.execute(
name=context.name,
kwargs=kwargs,
dedupe=not args['no-dedupe']
))
except Failure as f:
sys.exit(f.result.exited)
return results
def main():
# Parse command line
argv = sys.argv[1:]
debug("Base argv from sys: %r" % (argv,))
dispatch(argv)
|
My husband likes pudding but has to watch his sugar intake, being a diabetic. So he enjoys the 12 cup Snack Pack Pudding Cups that are sugar free. There are vanilla and chocolate cups included in this snack pack, but my husband prefers the taste of the chocolate (as do I, because I sometimes will eat a cup). The chocolate pudding cups have 70 calories a cup with 3.5 fat grams, 0 cholesterol, 15 grams of total carbohydrate, 0 grams of sugars and 9 grams of sugar alcohol.
These pudding cups are made with real nonfat milk, and for being sugar free, the chocolate really doesn't taste bad at all. I happen to enjoy the chocolate flavor more than the vanilla when I have one, because the vanilla flavor is really not the best. My husband and I have checked Walmart to see if we can find a 12 pack of sugar free pudding that is all chocolate in flavor, but unfortunately, we haven't been able to find anything in the 12 pack size that is sugar free except the vanilla and chocolate combined.
The pudding is sweetened with Splenda, and each cup is 3.5 ounces. If you are looking for a sugar free chocolate snack which really does taste OK, you might want to try these pudding cups. You can also get the chocolate (or vanilla, if you prefer) in sizes smaller than a 12 pack.
Mallo Cups For Gooey Deliciousness!!
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core, workspace
import caffe2.python.hypothesis_test_util as hu
from hypothesis import given
from hypothesis import strategies as st
import numpy as np
import time
class TestTensorPackOps(hu.HypothesisTestCase):
def pack_segments_ref(self, return_presence_mask=False):
def pack_segments_ref(lengths, data):
arr = []
constant_values = 0
if data.dtype.char == 'S':
constant_values = ''
for idx in range(np.size(lengths)):
chunk = data[np.sum(lengths[:idx]):np.sum(lengths[:idx + 1])]
pad_length = np.max(lengths) - lengths[idx]
# ((0, pad_length), (0, 0)) says add pad_length rows of padding
# below chunk and 0 rows of padding elsewhere
arr.append(
np.pad(
chunk, ((0, pad_length), (0, 0)),
mode=str("constant"),
constant_values=constant_values
)
)
result = [arr]
if return_presence_mask:
presence_arr = []
for length in lengths:
pad_length = np.max(lengths) - length
presence_arr.append(
np.pad(
np.ones((length), dtype=np.bool), ((0, pad_length)),
mode=str("constant")
)
)
result.append(presence_arr)
return result
return pack_segments_ref
@given(
num_seq=st.integers(10, 500),
cell_size=st.integers(1, 10),
**hu.gcs
)
def test_pack_ops(self, num_seq, cell_size, gc, dc):
# create data
lengths = np.arange(num_seq, dtype=np.int32) + 1
num_cell = np.sum(lengths)
data = np.zeros(num_cell * cell_size, dtype=np.float32)
left = np.cumsum(np.arange(num_seq) * cell_size)
right = np.cumsum(lengths * cell_size)
for i in range(num_seq):
data[left[i]:right[i]] = i + 1.0
data.resize(num_cell, cell_size)
print("\nnum seq:{}, num cell: {}, cell size:{}\n".format(
num_seq, num_cell, cell_size)
+ "=" * 60
)
# run test
op = core.CreateOperator(
'PackSegments', ['l', 'd'], ['t'])
workspace.FeedBlob('l', lengths)
workspace.FeedBlob('d', data)
start = time.time()
self.assertReferenceChecks(
device_option=gc,
op=op,
inputs=[lengths, data],
reference=self.pack_segments_ref(),
)
end = time.time()
print("{} used time: {}".format(gc, end - start).replace('\n', ' '))
with core.DeviceScope(gc):
workspace.FeedBlob('l', lengths)
workspace.FeedBlob('d', data)
workspace.RunOperatorOnce(core.CreateOperator(
'PackSegments',
['l', 'd'],
['t'],
device_option=gc))
workspace.RunOperatorOnce(core.CreateOperator(
'UnpackSegments',
['l', 't'],
['newd'],
device_option=gc))
assert((workspace.FetchBlob('newd') == workspace.FetchBlob('d')).all())
@given(
**hu.gcs_cpu_only
)
def test_pack_ops_str(self, gc, dc):
# GPU does not support string. Test CPU implementation only.
workspace.FeedBlob('l', np.array([1, 2, 3], dtype=np.int64))
strs = np.array([
["a", "a"],
["b", "b"],
["bb", "bb"],
["c", "c"],
["cc", "cc"],
["ccc", "ccc"]],
dtype='|S')
workspace.FeedBlob('d', strs)
workspace.RunOperatorOnce(core.CreateOperator(
'PackSegments',
['l', 'd'],
['t'],
device_option=gc))
workspace.RunOperatorOnce(core.CreateOperator(
'UnpackSegments',
['l', 't'],
['newd'],
device_option=gc))
assert((workspace.FetchBlob('newd') == workspace.FetchBlob('d')).all())
def test_pad_minf(self):
workspace.FeedBlob('l', np.array([1, 2, 3], dtype=np.int32))
workspace.FeedBlob(
'd',
np.array([
[1.0, 1.1],
[2.0, 2.1],
[2.2, 2.2],
[3.0, 3.1],
[3.2, 3.3],
[3.4, 3.5]],
dtype=np.float32))
workspace.RunOperatorOnce(core.CreateOperator(
'PackSegments', ['l', 'd'], ['t'], pad_minf=True))
workspace.RunOperatorOnce(core.CreateOperator(
'Exp', ['t'], ['r']
))
result = workspace.FetchBlob('t')
assert(result[0, -1, 0] < -1000.0)
# The whole point of padding with -inf is that when we exponentiate it
# then it should be zero.
exponentiated = workspace.FetchBlob('r')
assert(exponentiated[0, -1, 0] == 0.0)
@given(**hu.gcs_cpu_only)
def test_presence_mask(self, gc, dc):
lengths = np.array([1, 2, 3], dtype=np.int32)
data = np.array(
[
[1.0, 1.0], [2.0, 2.0], [2.0, 2.0], [3.0, 3.0], [3.0, 3.0],
[3.0, 3.0]
],
dtype=np.float32
)
op = core.CreateOperator(
'PackSegments', ['l', 'd'], ['t', 'p'], return_presence_mask=True
)
workspace.FeedBlob('l', lengths)
workspace.FeedBlob('d', data)
inputs = [lengths, data]
self.assertReferenceChecks(
device_option=gc,
op=op,
inputs=inputs,
reference=self.pack_segments_ref(return_presence_mask=True),
)
op = core.CreateOperator(
'PackSegments', ['l', 'd'], ['t', 'p'], return_presence_mask=True
)
workspace.RunOperatorOnce(op)
output = workspace.FetchBlob('t')
expected_output_shape = (3, 3, 2)
self.assertEquals(output.shape, expected_output_shape)
presence_mask = workspace.FetchBlob('p')
expected_presence_mask = np.array(
[[True, False, False], [True, True, False], [True, True, True]],
dtype=np.bool
)
self.assertEqual(presence_mask.shape, expected_presence_mask.shape)
np.testing.assert_array_equal(presence_mask, expected_presence_mask)
def test_presence_mask_empty(self):
lengths = np.array([], dtype=np.int32)
data = np.array([], dtype=np.float32)
op = core.CreateOperator(
'PackSegments', ['l', 'd'], ['t', 'p'], return_presence_mask=True
)
workspace.FeedBlob('l', lengths)
workspace.FeedBlob('d', data)
workspace.RunOperatorOnce(op)
output = workspace.FetchBlob('p')
expected_output_shape = (0, 0)
self.assertEquals(output.shape, expected_output_shape)
@given(**hu.gcs_cpu_only)
def test_out_of_bounds(self, gc, dc):
# Copy pasted from test_pack_ops but with 3 changed to 4
lengths = np.array([1, 2, 4], dtype=np.int32)
data = np.array([
[1.0, 1.0],
[2.0, 2.0],
[2.0, 2.0],
[3.0, 3.0],
[3.0, 3.0],
[3.0, 3.0]], dtype=np.float32)
op = core.CreateOperator(
'PackSegments', ['l', 'd'], ['t'])
inputs = [lengths, data]
self.assertRunOpRaises(
device_option=gc,
op=op,
inputs=inputs,
exception=RuntimeError
)
@given(**hu.gcs_cpu_only)
def test_under_bounds(self, gc, dc):
# Copy pasted from test_pack_ops but with 3 changed to 2
lengths = np.array([1, 2, 2], dtype=np.int32)
data = np.array([
[1.0, 1.0],
[2.0, 2.0],
[2.0, 2.0],
[3.0, 3.0],
[3.0, 3.0],
[3.0, 3.0]], dtype=np.float32)
op = core.CreateOperator(
'PackSegments', ['l', 'd'], ['t'])
inputs = [lengths, data]
self.assertRunOpRaises(
device_option=gc,
op=op,
inputs=inputs,
exception=RuntimeError
)
if __name__ == "__main__":
import unittest
unittest.main()
|
Tom Ford, the genius who can turn his hand on anything and make it stylish. Say it films, fashion or what we are focusing on, cologne.
There’s a bunch of names I can say who has left a mark on the fragrance industry. Christian Dior and Saint Laurent are some best ones. But a very few have the ability to put such impact in so less time like this director, designer and now, one of the most renowned men in the universe.
His wardrobe is full of the collections that he edited with grace. All of these are categorical and lavishing that would make any men’s personality glow up.
Today, as I mentioned already, we about discuss some of the most successful cologne inventions by this genius fashionable statement maker. I’ll include ten best Tom Ford colognes that has taken the world of men fashion by storm.
Don’t worry if you are not so quick to pick a right cologne that matches your aura. I’ll keep my thoughts written below to help you out. There would be a simple to follow guide that should make you choose the right Tom Ford cologne for your persona.
Let’s jump into the world of fragrance!
Let me review one by one some of the best smelling tom ford colognes the market has got for you. Please make sure to understand what goes well with your lifestyle and choose depending on that fact.
I would be including some valuable information later after these reviews to make you meet your perfect scent. Not every fragrance is going to suit you even if each of them is really good. That’s why you need to know some rules to tell what matches your aura the best.
For now, let’s have a trip to ten fascinating and lavishing smell pieces from Tom ford that are the definition of a perfectly smelling man.
The Tom Ford noir extreme is an excellent choice of fragrance that any men would love to have in his closet. It comes from the latest noir extreme series that has become one hype lineup of Tom Ford for its brilliance.
The noir extreme eau de parfum comes in a 3.4-ounce bottle. That’s a good amount of size for both who use it regularly or decides to go for occasional usage.
The fragrance is fantastic and perfect for both men and women. This unisex cologne comes with a few versatile smells that go with any mood and any aura which makes it a perfect pick.
But most men would love to wear it during the evening since the smell is somewhat sensual and perfect for dates and night outs. It’s going to be the cherry on top for a perfectly dressed up men.
The smell lasts for two to three hours which is a good thing. However, people who want to pick a smell for a long time smelling would be slightly disappointed. But the major nice thing is that the smell still does not completely fades up. Instead, it lingers with a sweet and light scent.
A great smell for night out and dates.
Last for a good time period.
Unique warm but the spicy smell.
The 1.7 Oz spray fragrance from Tom Ford suits any skin type and most men would prefer the unique flavors this smell includes within. It’s one of the best and last longing fragrances that you can have for a mind-blowing attraction.
It’s a perfect fit for occasions where you are expecting to make one of the best memories. Like a pure blend of fragrance, this cologne suits any mood and atmosphere perfectly to make an everlasting impression on other people.
The main ingredients for this designer fragrance are lemon, sweet orange, and gingers. The combination also includes a bit of peppercorn to add variety. There pipe tobacco and patchouli as well to make the smell seem like a serious person’s characteristic.
A very masculine smell that goes with any situation.
A flawless choice for bug occasions and anniversaries.
Comes with a very nice combination of ingredients.
The size won’t be enough if you are a regular user.
This 100ml EDT Spray from Tom Ford is a big bottle you can buy for a classy and luxurious looking statement that will go a long way. It’s one of the most popular choices for men beauty and personal care product.
It comes from the signature line up that’s known to be budget friendly as well as masculine in scent. The fragrance isn’t too overpowering that seems obvious for attention.
The combination of pure essences such as mandarin oranges, orange blossoms, lemon leaves, violets, ginger, cedar, and grapefruit give sit a distinct smell that radiates with class.
To the direct smelling sense, the fragrance seems something like woody and citrus. It’s extremely appealing and perfect for a dinner look. It’s one of those colognes that you can use as your regular go-to smell and feel comfortable with any occasion or situation.
A cologne enthusiast must have this fantastic fragrance. It’s a great choice for a complimenting get up that’s also going to be a bit nostalgic for any men.
The price is affordable for most men.
It comes with nice packaging.
The essence is really good for the skin.
It is suitable for any get to gather or occasion.
The smell does not last for a whole day.
The Tom Ford Tobacco Vanille comes in a handy 50 ML size that goes well with men who need a smell for occasional usages. It’s the appropriate mixture of essences that is suitable for your special days and best memories.
This is one of the finest choices that would satisfy most men’s taste. It comes with a very nice and classy fragrance that any fashion conscious men would love to have in their collection.
It’s a great pick that gives you the best masculine scent which goes for any mood or any environment. The compliments would be flooded on you and the whole thing would bring a great joy on your face certainly.
If you are looking for a gift to impress your boss or the men in your life then definitely it’s pro-choice. Most men who associate with great power sense has a soft feel for the fragrance it holds.
However, if you are a light smell lover than this might not be a so great choice for you. It’s also not suitable for regular use since the cost is quite high. But for huge deals and parties, it’s going to be the perfect scent you can wear.
The best fragrance ever from tom ford according to many users.
It’s long lasting and spreads well.
The amazing combination of spiced vanilla with a bit of cedar.
The Tom Ford grey vetiver cologne is a 3.4-ounce fragrance spray goes well with any men from any taste of smell. It’s a very simple but sophisticated smell that keeps you down to earth for the looks. But also it gives you strong stunning presence in a group of people.
Tom ford grey vetiver is a designer men fragrance that’s perfect for your regular casual usages. It’s one great pick for everyday look and office purposes.
It’s important to have a good impression when on a meeting or simply a big presentation. The smell is light and does not distract people but instead gives a very subtle cozy feel.
Moreover, the cologne is very long lasting. The manufacturing is smart that keeps in consideration the fact that this cologne is made for regular usages. So when you wear it for the whole day, it does not simply vanish in a few hours.
This grey vetiver cologne also contains some of the most high-quality ingredients within. The fragrance gets muskier with time which I absolutely love. It’s a textbook choice for warm weather and fresh looks.
The ingredients are well balanced.
Gives a subtle but impressive feel.
It’s a perfect pick for summertime and warm weathers.
The size is good enough for regular usage.
The Tom Ford Noir cologne is a 3.4 Ounce perfume spray edition that comes with the ever attracting vanilla flavor. The spray perfume gives you a refreshing and soothing smell that is one of a rare cologne that you can own.
The fragrance is a new made cologne from the hyped and popular Noir series. This is one of my favorite cologne that emphasizes every note equally.
The fragrance would seem somewhat peppery in a good way. It gives you the feel of nature and keeps you simple but stunning in any gathering. The flavor instantly changes into light vanilla that’s one of the most favorite bases of the essence of most fragrance enthusiast.
This essence goes well with casual, showy and any kind of outlooks which is the most fascinating thing about it. Also, the clean fragrances last for a very good amount of time. Never was it overbearing but enough strength to leave a mark.
A simple but gorgeous smell to wear.
The fragrance is very suitable for most occasions.
The smell is long lasting for whole day use.
Gives your outlook fresh and clean look.
The Tom Ford Grey Vetiver is a small 1.7-Ounce cologne that you can have for any season looks. The down to earth fragrance is woody and nice to smell. It’s one of the best combinations that includes freshness and masculinity.
The fragrance has a very nice lasting effect. It goes pretty well for the whole day usages. Never did I feel that it loses the smell. Even during the very last moment, you’ll get a nice smell.
The genuine product come with a very affordable price. At least comparing to other colognes from Tom Ford, this one is quite inexpensive.
It’s one of those smell that you would like to have as your signature one. If you have thoughts of giving a men gift, this should be a good idea.
The smell is overall, a bit mature, fresh and totally distinctive that’s sure to catch people’s attention.
It comes with a very attractive smell.
Reasonable as a designer cologne.
Perfect for any occasional usage.
Goes well with any season outfit.
It is one of the best gift material.
Mid age men might not like the feel.
The Tom Ford Tuscan Leather Cologne for Men comes in a very convenient size of 1.7 Ounce. This excellent quality luxurious cologne would give you the feel of being rich. The cologne comes from the very rare and unique, tom fords own private blends and uses, the private blend series.
This is a very popular and rare cologne that you can own. The price is the only factor you would want to worry about. Since we all know by now that private blend series are quite expensive. So that’s the only hinge.
However, if you can afford this beautiful smelling fragrance then there’s no doubt you are ought to enjoy one of the best smells in the world.
It comes in very nice packaging. Since the price is a bit high you won’t be using it every day. So the amount in one bottle is quite enough to last for a good period.
A luxurious feel to glow up presence.
An elegant tom ford collection smell to get.
The smell is exotic and beautiful.
It’s suitable for big events and huge matters in your life.
A rich leather feels with a nice sweet aroma.
The Tom Ford oud wood cologne is a 3.4-ounce perfume cologne that comes with the fragrance of woods. It’s a perfect fit for men who love the fresh, manly and earthy smells.
Tom ford oud wood is also from the very rare and unique private blend collection that is quite pricey. But if you focus on the smell and quality the price seems nothing. It’s wonderful! The kind that gives you an instant approval from people of class.
If you want to impress your boss or simply win over a man’s heart, this could be your ticket. Gift it to them. Spend this amount of money and get a smell that’s actually one of a kind in the fragrance industry.
Good news is this cologne is not only limited for men. It’s actually a unisex perfume that both genders can equally enjoy. So if you are going to buy it for yourself than congrats, your spouse is also going to love you for that.
Excellent quality of fragrance with good ingredients.
It comes with an earthy and woody smell that is very charming.
Gives you a sense of confidence when in big gatherings.
The packaging is good enough to last a long period.
The price is a bit too much.
Not appropriate for regular usages.
The Tom Ford eau de cologne Toilette is one of the rare combinations from the designer that comes with budget and quality. So if you are short on budget but want to enjoy a branded good, here’s one for you.
First of all, there’s no tester. It’s one of the colognes that also come with skin maintaining benefits. It’s not harmful nor allergenic to any skin type and most people would find it suitable for their conditions.
The price does not affect the quality. I mean it’s not like those cheap axes that also smells cheap. The fragrance speaks for a well-known, classy man. It’s that charming!
It comes with lighter notes and a nice black bottle packaging. The fragrance goes well with regular life and office looks. Even when you go for a dinner date, this smell is going to make your women go crazy and head over heels for you.
Really budget friendly for any men.
Goes well with most occasions.
A perfect cologne for regular usage.
Quite long lasting for the whole day.
Comes in a nice amount to go a long way.
Not for men who looms for strong smells.
There are some rules you should follow to pick the right cologne that suits you. All Tom Ford scents smell amazing and there’s no doubt to the fact. But not everything suits everyone. So you should be able to find a thing that goes well with your lifestyle and persona.
A man would have an impression on other people based on his smell. Don’t make the first impression go in vain by choosing something that does not speak about your preference. Here are a few things that should help you out.
This is a fact you absolutely cannot ignore. It’s even proved scientifically. Men would get the best use of a cologne that matches his own body scent. You should be looking for something that feels just like your natural odor.
You know pretty well yourself. Only you can decide if a cologne has a scent that goes well with you. Focus on the extracts of a particular one, which you like. Don’t just get a bottle just because it sounds luxurious. You need something that balances your whole personality and yet don’t feel like overdo.
There are some people who are going to judge the scent you pick only because you love challenging smells. Tom Ford is a man who has a huge collection that only breathes in edgy and stand-out-from-the-rest pieces.
So if you are going to pick one from his collection, be sure to be confident about your choice. Only when a lot of people have the same negative reaction to your smell extract preference, then consider choosing a different type.
3. Focus On Your Preferred Ingredients.
Not every cologne would smell nor feel the same. So you should have some preference and check out extracts that you like better than others. A few love to go with woody, earthy smells. While some men would prefer fruity smell more attractive. Also, there is a vast fan base for tropical ocean extracted cologne.
Remember one thing! You’ll be the one who is going to smell your cologne the most. So be sure you like it.
I’m not saying that a Tom Ford cologne will sell you junk in a pretty bottle. But the fact is, you just cannot buy a cologne based on what the bottle looks like. I have seen so many men who decide that cologne is good enough to suit them only because the bottle looks fancy.
A fancy looking bottle might have a really nice smell, especially if it’s from Tom Ford. But what if, the really beautiful smell for others does not go well with what you find attractive? This is a very common scenario and trust me, it would happen to you if you don’t avoid judging a smell based on bottles and packaging.
First and foremost, make sure the smell goes with you. Be sure it prices something that you afford. Go for all the basics and check them out one by one. Finally, if you have a few models on your mind that suit right, then choose whatever bottle or packaging you prefer.
Remember, a flashy bottle won’t make it great, the smell should speak for you.
There are so many fake models of Tom Ford colognes. You should be extra cautious you don’t get into those sources. There are huge customer complaints about just being deceived by a fake cologne because they were too thrilled with the cheap discounts to notice the bug.
When you decide to pick such a lavishing brand cologne, it makes no sense to look for cheap prices. They are going to cost somewhat pricey. But if you consider the quality then you will find that the price is actually affordable.
The end line is you should not run after discounted price but double check the source you are getting it from. I would suggest you visit Amazon to get genuine Tom Ford colognes. The most fascinating part about this source is that you’ll get a refund if there’s a problem. That’s what I call a safe purchase!
Colognes come in different bottle size. But most would range from 1 to 3.4 oz. If you are a person who likes to use the same fragrance for a long period of time then go for big sizes. However, choose a small package if the case is not what I just said.
Another factor is if you are buying for the first time and have no idea how the cologne would smell on you, go for mini sizes. This way you’ll be kind of safe with your money. The smaller bottles would also go well with men who wear fragrances occasionally.
Most colognes would come with the three-part life cycle plan. Imagine a pyramid that evaporates. The top of the pyramid slowly gets invisible once you reach the very base. The sections individually would hold some scents or notes that blend together to match what you smell.
Most fragrances would come with three dissimilar types of notes. These are known to be top, medium and base notes.
The top note is basically a smell that is light. These type of scent would hit your smelling sense right after you apply it. Most of the time this category lasts from fifteen to two hours. The very common and popular top notes are powdery, fruity, marines, floral, citrus and spices.
Then comes the medium note. It is also known as the heart note. This sections would show you the exact and main element of the smell. It develops right after the top note clears. Most of the medium notes last from three to five hours once you spray.
Medium notes are basically built with top notes. This section includes fragrances such as grass, stone, jasmine, cloves, and some fruity scents.
Then the final note is known as base notes. It’s the last one to develop a fragrance. These are basically bold in scents that become more noticeable later in a day.
The base note is the foundation of your fragrance. This determines how the fragrance would last on you. Most of them usually last from five to ten hours. The base note includes moss, vetiver, tar, leather, tobacco, sandalwood, vanilla and musk fragrances.
To figure out the Best Tom Ford cologne for your needs, I want to focus on two types of buyers reading it out. You may be someone who is fanatic of luxurious fragrance or just a casual buyer. I’ll be considering the fact that the readers come from different pocket depths. So to make it easy, I want to split the whole thing into a few specified sections.
The first one is a collection of premium private blends which is of course expensive. The next one is a better accessible signature series of cologne. There would be three more mini sections as well after these two prominent collections.
This is tom ford’s very own experimental and lavishing collection. It got launched in 2007 and the designer has shared his thoughts on it. He says the collection is full of genuine fragrances. It’s totally different and out of focus from the mainstream fragrance creations.
Fashion enthusiast and renowned critics have called the collections challenging, thought-provoking and a slightly edgier. Most of them would give you the starting feel of extracts such as tobacco, leather, gardenia, black violet, and amber.
The private blends are restricted to wilder experiments. But to the wider market, the most successful colognes come from the signature series. These are specially presented and honed for the general public.
Of course, this series combines with some of the most affordable Tom Ford colognes. But that does not mean all of them are cheap. You would still find some models such as grey vetiver and extreme to be quite expensive. The higher priced counterparts are demanding because they smell like bliss.
If I had to describe the Noir collection form Tom Ford I’ll say “A Perfect Blend for Evening-Suitable Temptation.” There are four colognes in this series that are each very different but charming. More like distant cousins with the same surname.
The four colognes come with different feels. One gives you the connection with a warm and powdery feel. One gives you a light citrusy punch. Others two include smoky and woody to earthy, rose and spicy fragrances. The last one falls under the private blend collection though.
Who does not like the classic? They are evergreen and forever favorite of any fashionista. Tom Ford is a fan of classic and vintage fragrances. His love was shown on the Portofino line up. Here he includes some of the simple colognes with outstanding feel.
It’s more like the first love of any fragrance fanatic. The series holds colognes made from aromatic herbs, citrus fruits, and other light smells. These are the best Tom Ford cologne for summertime use. These gives you a fresh but manly aura.
There’s a very little number of odor lovers who don’t crave for that pure earthy smell. Oud is a lineup that combines wood and earthy feels in the blends. It’s one of the massive expanded successful series from Tom Ford. Men have a soft spot for woody smells. Because this gives them a bold and manly feel.
This series is such a hype now that it’s one of the very rare ford colognes to come up with separate ancillaries. It includes beard oil, shower gel, and body lotions. The fragrances come with floral, spicier and smoky wood and salty marine flavors.
These fragrances work best in any season no matter what occasion. Every man would love to have one of these two complete their perfume cabinets.
No matter what collection you get interested in, there’s no denying that Tom Ford is the epitome of good taste in fragrance. The perfect construction and polishing make it a flawless thing for any fashion conscious men.
A fragrance or cologne won’t last forever. There are elements such as heat fluctuation that happens rapidly and effects on the expiry. These are often found on bathroom spaces. They tend to cause the molecules of a particular cologne to break apart.
The same goes for sunlight. So you can’t store them in the window still storage or anywhere near the sun. If you want to expand the life of your bottle then find a dark, dry and cool place to store them. The most perfect and suitable spot is your closet.
He is one of the most prominent American fashion designers. Tom Ford has always created a wow factor in whatever he makes. The style and grace he has in his creation are unmatchable and same goes for the cologne series he launched time to time.
Smell is one of the most powerful sense that human has. So putting an impression with a good scent on the other people you are meeting in regular life is really important. Say it for professional or personal reason. Your women would love the feel when you smell good and you’ll have an impact on your work and friend squad as well.
So wear a fragrance and express yourself with grace and style. Any cologne from these ten Tom Ford cologne would be a fantastic choice. There’s no way I can announce a winner since I’m head over heels for these designer fragrances.
Just make sure what you wear goes with your impression and makes you memorable. Good Luck!
|
# This class implements an interface for a single player to the euchred
# server. This class expects to be used as a base class for an actual
# player class that can be used with the peuchre program. The real
# player class is expected to be called Player, it needs to sub-class
# EuchrePlayer, and needs to implement the following methods:
#
# - decideOrderPass()
# - decideCallPass()
# - decideDrop()
# - decideDefend()
# - decidePlayLead()
# - decidePlayFollow()
import socket
import struct
import logging
import sys
import random
import string
import select
from logging import warning as warn, log, debug, info, error, critical
from card import Card
class EuchrePlayer:
# this is the dict that maps message ID to message name: we also generate
# a reverse mapping at the end
messageId = {
# sent by the client after connection, as well as the server's replies
'JOIN' : 123401 ,
'JOINDENY' : 123402 ,
'JOINACCEPT' : 123403 ,
# sent by the server to connected clients when the server is quitting */
'SERVERQUIT' : 123404 ,
# sent by the client to the server when the client is quitting */
'CLIENTQUIT' : 123405 ,
# sent if the server is full when the client tries to connect */
'DECLINE' : 123406 ,
# sent by the server when the client is about to be terminated */
'KICK' : 123407 ,
# the ID messages, request from client, responses from server */
'ID' : 123408 ,
'IDACCEPT' : 123409 ,
'IDDENY' : 123410 ,
# sent by the client when sending in a chat message, sent by the server
# when broadcasting the chat message
'CHAT' : 123411 ,
# sent by server to clients after game state change: provides all info
# needed by client to enter or resume game
'STATE' : 123412 ,
# sent as a request when the creator wants to kick another player */
'KICKPLAYER' : 123413 ,
'KICKDENY' : 123414 ,
# sent by a client setting options */
'OPTIONS' : 123415 ,
'OPTIONSDENY' : 123416 ,
# sent by the creator to start the game */
'START' : 123417 ,
'STARTDENY' : 123418 ,
# sent by the creator to end or reset the game and sent by the server
# to tell the clients the game is ending */
'END' : 123419 ,
'ENDDENY' : 123420 ,
# sent by client as responses to an order offer */
'ORDER' : 123421 ,
'ORDERALONE' : 123422 ,
'ORDERPASS' : 123423 ,
'ORDERDENY' : 123424 ,
# sent by client to indicate dropped card, and the deny message */
'DROP' : 123425 ,
'DROPDENY' : 123426 ,
# sent by client as responses to a call offer */
'CALL' : 123427 ,
'CALLALONE' : 123428 ,
'CALLPASS' : 123429 ,
'CALLDENY' : 123430 ,
# sent by client as responses to a defend offer */
'DEFEND' : 123431 ,
'DEFENDPASS' : 123432 ,
'DEFENDDENY' : 123433 ,
# sent by client as responses to a play offer */
'PLAY' : 123434 ,
'PLAYDENY' : 123435 ,
# flag messages sent by server */
'TRICKOVER' : 123436 ,
'HANDOVER' : 123437 ,
'GAMEOVER' : 123438 ,
'PLAYOFFER' : 123439 ,
'DEFENDOFFER' : 123440 ,
'CALLOFFER' : 123441 ,
'ORDEROFFER' : 123442 ,
'DROPOFFER' : 123443 ,
'DEAL' : 123444 ,
# these are the trailing bytes, to indicate the end of a message
'TAIL1' : 250 ,
'TAIL2' : 222 ,
}
# now generate the reverse mapping: thanks stack overflow!
#messageName = {v: k for k,v in messageId.items()}
messageName = {}
for k, v in messageId.items():
messageName[v] = k
###########################################################################
#
def __init__(self, **kwargs):
self.server = "0.0.0.0"
self.port = -1
self.playerhandle = -1
self.gamehandle = -1
self.team = -1
# this tracks the data from the most recent state information
self.state = {}
self.state[0] = {}
self.state[1] = {}
self.state[2] = {}
self.state[3] = {}
self.state['state'] = 0
# initialize scores and tricks to 0
self.state['usscore'] = 0
self.state['themscore'] = 0
self.state['ustricks'] = 0
self.state['themtricks'] = 0
# init orderer to -1
self.state['orderer'] = -1
# randomize that name!
self.name = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(10))
# override the defaults if we were passed relevant arguments
if 'server' in kwargs:
self.server = kwargs['server']
if 'port' in kwargs:
self.port = kwargs['port']
if 'name' in kwargs:
self.name = kwargs['name']
# if we were passed a record object, save it
if 'record' in kwargs:
self.record = kwargs['record']
# we use this to ID hands in the log
self.gcount = 0
if 'gcount' in kwargs:
self.gcount = kwargs['gcount']
self.hcount = 0
self.tcount = 0
self.setId()
if 'lock' in kwargs:
self.lock = kwargs['lock']
###########################################################################
# This is a utility function to set the self.id string: it uses the
# game, hand, and trick count variables to compose a self.id string
# which is included in every log message, to make the log files easier
# to parse.
#
def setId(self):
self.id = \
"%s g%dh%dt%d : " % (self.name,self.gcount,self.hcount,self.tcount)
###########################################################################
# prints the score
#
def printScore(self):
info(self.id+"score us:%d them:%d" %
(self.state['usscore'],self.state['themscore']) )
###########################################################################
# print out the detailed state information
#
def status(self):
info("")
info(self.id+"My Status")
info(self.id+" server: " + self.server)
info(self.id+" port : " + str(self.port))
info("")
info(self.id+" Name : " + str(self.name))
info(self.id+" Player: " + str(self.playerhandle))
info(self.id+" Team : " + str(self.team))
info(self.id+" Game : " + str(self.gamehandle))
# just the game stuff
self.gameStatus()
# if we've got a hand state information, we should have all the
# player information, so print that
if 'hstate' in self.state:
self.playerStatus()
###########################################################################
# print out the game state information
#
def gameStatus(self):
info("")
info(self.id+"Game Status:")
info(self.id+" Score : %d vs %d"
% (self.state['usscore'],self.state['themscore']))
info(self.id+" Tricks: %d vs %d"
% (self.state['ustricks'],self.state['themtricks']))
info(self.id+" Game Started: %d" % (self.state['ingame']))
info(self.id+" Hand Status : %d" % (self.state['hstate']))
info(self.id+" options:")
info(self.id+" Can Defend Alone: %d"
% (self.state['defend']))
info(self.id+" Must Go Alone on Order: %d"
% (self.state['aloneonorder']))
info(self.id+" Screw the Dealer: %d"
% (self.state['screw']))
info(self.id+" Number of cards: %d (%s)"
% (self.state['numcards'], self.printHand(self.hand)) )
info(self.id+" Trump is Set: %d" % (self.state['trumpset']))
if not self.state['holein']:
info(self.id+" Hole Card: not dealt")
else:
info(self.id+" Hole Card: " + self.state['hole'])
###########################################################################
# print out all the player state info
#
def playerStatus(self):
for i in (0,1,2,3):
# skip this player if their state isn't joined
if self.state[i]['state'] != 2:
continue
# otherwise print all the info
info("")
info(self.id+"Player %d:" % (i))
info(self.id+" Name: %s" % (self.state[i]['name']))
info(self.id+" Team: %d" % (self.state[i]['team']))
info(self.id+" Dealer: %d" % (self.state[i]['dealer']))
info(self.id+" Ordered: %d" % (self.state[i]['ordered']))
info(self.id+" Passed: %d" % (self.state[i]['passed']))
info(self.id+" Made It: %d" % (self.state[i]['maker']))
info(self.id+" Alone: %d" % (self.state[i]['alone']))
info(self.id+" Lead: %d" % (self.state[i]['leader']))
info(self.id+" Creator: %d" % (self.state[i]['creator']))
info(self.id+" Offers:")
info(self.id+" Drop: %d" % (self.state[i]['dropoffer']))
info(self.id+" Order: %d"
% (self.state[i]['orderoffer']))
info(self.id+" Call: %d" % (self.state[i]['calloffer']))
info(self.id+" Play: %d" % (self.state[i]['playoffer']))
info(self.id+" Defend: %d"
% (self.state[i]['defendoffer']))
# if the player has a card in play, show it
if self.state[i]['cardinplay']:
info(self.id+" Card Played: " + self.state[i]['card'])
else:
info(self.id+" Card Played: none")
###########################################################################
# this routine will connect to the game server
#
def sendJoin(self):
# create the socket for connection to the server: we'll need this
# for use in the rest of the object
try:
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect((self.server,self.port))
except ConnectionRefusedError:
return False
# get the length of the name and use that length in the format strign
namelen = len(self.name)
format = "!iiii" + str(namelen) + "sBB"
size = struct.calcsize(format)
# reduce the size by 4, to leave out the space needed for the
# leading size value
size = size - 4
# now generate a packed array of bytes for the message using that
# format string
message = struct.pack(format,
size,
self.messageId['JOIN'],
1,
len(self.name),
str.encode(self.name),
self.messageId['TAIL1'],
self.messageId['TAIL2'],
)
#self.printMessage(message)
self.s.send(message)
# set up a select with this socket
inputs = [ self.s ]
# wait for a message to come in
readable, writable, exceptional = select.select(inputs, [], inputs)
# we read single int from the socket: this should represent the
# length of the entire message
(size,) = struct.unpack("!i",self.s.recv(4))
# read the specified number of bytes from the socket
bytes = self.s.recv(size)
#info(self.id+"len of bytes is " + str(len(bytes)))
# decode the message identifier
(id,) = struct.unpack_from("!i",bytes)
#info(self.id+"message is: %s (%d)" % (self.messageName[id],id))
# now we mung out a case switch on the message identifier
if ( id == self.messageId['JOINACCEPT'] ):
info(self.id+"join successful")
return self.parseJoinAccept(bytes)
elif ( id == self.messageId['JOINDENY'] ):
return self.parseJoinDeny(bytes)
elif ( id == self.messageId['DECLINE'] ):
return self.parseDecline(bytes)
else:
info(self.id+"unknown join response: %s (%d)" %
(self.messageName[id],id))
return self.badMessage(bytes)
###########################################################################
# this routine will send the start message to the game server
#
def sendStart(self):
# a start message looks like this:
# <msg> : <msglen> <START> <gh> <ph> <tail>
# prep the format string
format = "!iiiiBB"
size = struct.calcsize(format)
# reduce the size by 4, to leave out the space needed for the
# leading size value
size = size - 4
# now generate a packed array of bytes for the message using that
# format string
message = struct.pack(format,
size,
self.messageId['START'],
self.gamehandle,
self.playerhandle,
self.messageId['TAIL1'],
self.messageId['TAIL2'],
)
#self.printMessage(message)
self.s.send(message)
###########################################################################
# this routine will send an order, order alone, or order pass message,
# based on what the player sub-class implementation of decideOrderPass()
# returns
#
def sendOrderPass(self):
# possible messages look like this:
# <msg> : <msglen> <ORDER> <gh> <ph> <tail>
# <msg> : <msglen> <ORDERALONE> <gh> <ph> <tail>
# <msg> : <msglen> <ORDERPASS> <gh> <ph> <tail>
# get the message we should send to the server: this should be one
# of ORDER, ORDERALONE, or ORDERPASS
message = self.decideOrderPass()
# prep the format string
format = "!iiiiBB"
size = struct.calcsize(format)
# reduce the size by 4, to leave out the space needed for the
# leading size value
size = size - 4
# now generate a packed array of bytes for the message using that
# format string
message = struct.pack(format,
size,
message,
self.gamehandle,
self.playerhandle,
self.messageId['TAIL1'],
self.messageId['TAIL2'],
)
#self.printMessage(message)
self.s.send(message)
###########################################################################
# this routine will send a call, call alone, or call pass message,
# based on what the player sub-class implementation of decideCallPass()
# returns
#
def sendCallPass(self):
# a call looks like this:
# <msg> : <msglen> <CALL> <gh> <ph> <suit> <tail>
# <msg> : <msglen> <CALLALONE> <gh> <ph> <suit> <tail>
# a call pass looks like this:
# <msg> : <msglen> <CALLPASS> <gh> <ph> <tail>
# get the message we should send to the server: this should be one
# of CALL, CALLALONE, or CALLPASS, and a suit (which will be
# None if the return is a CALLPASS
result = self.decideCallPass()
op = result['op']
suit = result['suit']
# now generate a packed array of bytes for the message using that
# format string, depending on the message we're supposed to return
if op == self.messageId['CALL'] \
or op == self.messageId['CALLALONE']:
# prep the format string
format = "!iiiiiBB"
size = struct.calcsize(format)
# reduce the size by 4, to leave out the space needed for the
# leading size value
size = size - 4
message = struct.pack(format,
size,
op,
self.gamehandle,
self.playerhandle,
suit,
self.messageId['TAIL1'],
self.messageId['TAIL2'],
)
#self.printMessage(message)
self.s.send(message)
# now generate a packed array of bytes for the message using that
# format string, depending on the message we're supposed to return
if op == self.messageId['CALLPASS']:
format = "!iiiiBB"
size = struct.calcsize(format)
# reduce the size by 4, to leave out the space needed for the
# leading size value
size = size - 4
message = struct.pack(format,
size,
op,
self.gamehandle,
self.playerhandle,
self.messageId['TAIL1'],
self.messageId['TAIL2'],
)
#self.printMessage(message)
self.s.send(message)
###########################################################################
# this routine will randomly drop a card, in response to a drop offer
#
def sendDrop(self):
# a start message looks like this:
# <msg> : <msglen> <DROP> <gh> <ph> <card> <tail>
# call decideDrop() which should return a card to drop
card = self.decideDrop(self.state['hole'])
# prep the format string
format = "!iiiiiiBB"
size = struct.calcsize(format)
# reduce the size by 4, to leave out the space needed for the
# leading size value
size = size - 4
# now generate a packed array of bytes for the message using that
# format string
message = struct.pack(format,
size,
self.messageId['DROP'],
self.gamehandle,
self.playerhandle,
card.value,
card.suit,
self.messageId['TAIL1'],
self.messageId['TAIL2'],
)
#self.printMessage(message)
self.s.send(message)
###########################################################################
# this routine will always decline a defend offer
#
def sendDefend(self):
# a start message looks like this:
# <msg> : <msglen> <DEFEND> <gh> <ph> <card> <tail>
# call the decideDefend() routine to determine if we should
# defend alone or not
message = self.decideDefend()
# prep the format string
format = "!iiiiBB"
size = struct.calcsize(format)
# reduce the size by 4, to leave out the space needed for the
# leading size value
size = size - 4
# now generate a packed array of bytes for the message using that
# format string
message = struct.pack(format,
size,
message,
self.gamehandle,
self.playerhandle,
self.messageId['TAIL1'],
self.messageId['TAIL2'],
)
#self.printMessage(message)
self.s.send(message)
###########################################################################
# this routine will play a card
#
def sendPlay(self):
# a start message looks like this:
# <msg> : <msglen> <PLAY> <gh> <ph> <card> <tail>
# are we leading?
me = self.playerhandle
leader = self.state[me]['leader']
# if we're the leader, we can play anything
if leader:
self.sendPlayLead()
else:
self.sendPlayFollow()
###########################################################################
# This plays a card to lead a new trick, for the moment it will play
# anything
#
def sendPlayLead(self):
# call decidePlayLead() to determine what card we should play as
# a lead
card = self.decidePlayLead()
# remove the card from our hand
self.removeCard(card)
# prep the format string
format = "!iiiiiiBB"
size = struct.calcsize(format)
# reduce the size by 4, to leave out the space needed for the
# leading size value
size = size - 4
# now generate a packed array of bytes for the message using that
# format string
message = struct.pack(format,
size,
self.messageId['PLAY'],
self.gamehandle,
self.playerhandle,
card.value,
card.suit,
self.messageId['TAIL1'],
self.messageId['TAIL2'],
)
#info(self.id+"sending PLAY")
#self.printMessage(message)
self.s.send(message)
###########################################################################
# This plays a card to follow in a new trick, for the moment it will
# play a random (valid) card
#
def sendPlayFollow(self):
# call decidePlayFollow() to determine the card we should follow
# with: this assumes that the returned card is valid
card = self.decidePlayFollow()
# remove the card from our hand
self.removeCard(card)
# prep the format string
format = "!iiiiiiBB"
size = struct.calcsize(format)
# reduce the size by 4, to leave out the space needed for the
# leading size value
size = size - 4
# now generate a packed array of bytes for the message using that
# format string
message = struct.pack(format,
size,
self.messageId['PLAY'],
self.gamehandle,
self.playerhandle,
card.value,
card.suit,
self.messageId['TAIL1'],
self.messageId['TAIL2'],
)
#info(self.id+"sending PLAY")
#self.printMessage(message)
self.s.send(message)
###########################################################################
# this reads a message from the server socket, and processes it
#
def parseMessage(self):
# we read single int from the socket: this should represent the
# length of the entire message
(size,) = struct.unpack("!i",self.s.recv(4))
# read the specified number of bytes from the socket
bytes = self.s.recv(size)
#info(self.id+"len of bytes is " + str(len(bytes)))
# decode the message identifier
(id,) = struct.unpack_from("!i",bytes)
#info(self.id+"message is: %s (%d)" % (self.messageName[id],id))
# now we mung out a case switch on the message identifier
if ( id == self.messageId['JOINACCEPT'] ):
return self.parseJoinAccept(bytes)
elif ( id == self.messageId['JOINDENY'] ):
return self.parseJoinDeny(bytes)
elif ( id == self.messageId['CHAT'] ):
return self.parseChat(bytes)
elif ( id == self.messageId['STATE'] ):
return self.parseState(bytes)
elif ( id == self.messageId['DEAL'] ):
return self.parseDeal(bytes)
elif ( id == self.messageId['STARTDENY'] ):
return self.parseStartDeny(bytes)
elif ( id == self.messageId['ORDEROFFER'] ):
return self.parseOrderOffer(bytes)
elif ( id == self.messageId['ORDERDENY'] ):
return self.parseOrderDeny(bytes)
elif ( id == self.messageId['CALLOFFER'] ):
return self.parseCallOffer(bytes)
elif ( id == self.messageId['CALLDENY'] ):
return self.parseCallDeny(bytes)
elif ( id == self.messageId['DROPOFFER'] ):
return self.parseDropOffer(bytes)
elif ( id == self.messageId['DROPDENY'] ):
return self.parseDropDeny(bytes)
elif ( id == self.messageId['DEFENDOFFER'] ):
return self.parseDefendOffer(bytes)
elif ( id == self.messageId['DEFENDDENY'] ):
return self.parseDefendDeny(bytes)
elif ( id == self.messageId['PLAYOFFER'] ):
return self.parsePlayOffer(bytes)
elif ( id == self.messageId['PLAYDENY'] ):
return self.parsePlayDeny(bytes)
elif ( id == self.messageId['TRICKOVER'] ):
return self.parseTrickOver(bytes)
elif ( id == self.messageId['HANDOVER'] ):
return self.parseHandOver(bytes)
elif ( id == self.messageId['GAMEOVER'] ):
return self.parseGameOver(bytes)
else:
info(self.id+"message is: %s (%d)" % (self.messageName[id],id))
return self.badMessage(bytes)
###########################################################################
# This routine parses a JOINACCEPT message
#
def parseJoinAccept(self, bytes):
#debug(self.id+"parsing JOINACCEPT")
#self.printMessage(bytes)
# the format of a JOINACCEPT message is:
# <msg> : <msglen> <JOINACCEPT> <gh> <ph> <team> <tail>
# where we've already read the msglen bytes
(msg, gh, ph, team, tail1, tail2) = struct.unpack("!iiiiBB",bytes)
# run some sanity checks
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseJoinAccept()")
return False
# ok, otherwise we carry on
self.gamehandle = gh
self.playerhandle = ph
self.team = team
return True
###########################################################################
# This routine parses a JOINDENY message
#
def parseJoinDeny(self, bytes):
#debug(self.id+"parsing JOINDENY")
#self.printMessage(bytes)
# the format of a JOINDENY message is:
# <msg> : <msglen> <JOINDENY> <string> <tail>
# where the string explains why it was denied
message = self.parseString(bytes[4:-2])
info(self.id+"join denied: " + message)
return(False)
###########################################################################
# This routine parses a DECLINE message
#
def parseDecline(self, bytes):
#debug(self.id+"parsing DECLINE")
#self.printMessage(bytes)
# the format of a DECLINE message is:
# <msg> : <msglen> <DECLINE> <string> <tail>
# where the string explains why it was denied
message = self.parseString(bytes[4:-2])
info(self.id+"join declined: " + message)
return(False)
###########################################################################
# This routine parses a CHAT message
#
def parseChat(self, bytes):
#debug(self.id+"parsing CHAT")
#self.printMessage(bytes)
# the format of a CHAT message is:
# <msg> : <msglen> <CHAT> <string> <tail>
# where we've already read the msglen bytes
# since the only content we have is the string, we slice the leading
# <CHAT> (ie. 4 bytes) off the bytes array and pass it to a
# specialized string parser
chat = self.parseString(bytes[4:-2])
# now we peel off the tail and make sure it's sane
(tail1,tail2) = struct.unpack("!BB",bytes[-2:])
# run some sanity checks
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseChat()")
return False
# ok, log the chat
#info(self.id+"" + chat)
return True
###########################################################################
# This routine parses a string component of a message: it expects
# to be passed a bytes array beginning with the string length
#
def parseString(self, bytes):
#debug(self.id+"parsing string")
#self.printMessage(bytes)
# the format of a string is:
# <string> : <textlen> <text>
(len,) = struct.unpack_from("!i",bytes)
#info(self.id+"string len: " + str(len))
# now parse out the text of the string
format = "!"+str(len)+"s"
#info(self.id+"format is "+format)
(chat,) = struct.unpack_from(format,bytes[4:])
#info(self.id+"chat is: " + chat.decode("utf-8"))
return(chat.decode("utf-8"))
###########################################################################
# This routine parses a string component of a message: it expects
# to be passed a bytes array beginning with the string length
#
def parseState(self, bytes):
#info(self.id+"parsing STATE")
#self.printMessage(bytes)
offset = 0
# the format of a state is:
# <msg> : <msglen> <STATE> <statedata> <tail>
# <statedata> : <playersdata> <gamedata> <cards>
# <playersdata> : <p1> <p2> <p3> <p4>
# <pN> : <pstate> <pdata>
# <pstate> : {0|1|2} # unconnected, connected, joined
# <pdata> : if <pstate> == joined
# <ph> <nmstring> <clstring> <hwstring> <osstring>
# <cmtstring> <team> <numcards> <creator> <ordered>
# <dealer> <alone> <defend> <leader> <maker>
# <playoffer> <orderoffer> <dropoffer> <calloffer>
# <defendoffer> <cardinplay> [<card>] <passed>
# else
# <NULL>
# <NULL> : # no data
# <team> : {-1|0|1} # no team, team 0, or team 1
# <creator>|<ordered>|<dealer>|<alone>|<defend>|<leader>|
# <maker>|<playoffer>|<orderoffer>|<dropoffer>|<calloffer>|
# <defendoffer>|<cardinplay>|<passed>
# : <boolean>
# <gamedata> : <ingame> <suspend> <holein> <hole> <trumpset>
# <trump> <tricks> <score> <options>
# <ingame> : <boolean>
# <hstate> : <0|1|2|3|4> # pregame,hole,trump,defend,play
# <suspend> : <boolean>
# <holein> : <boolean> # true if hole card
# <hole> : <card> # only packed if <holein> true
# <card> : <value> <suit>
# <value> : {2|3|4|5|6|7|8|9|10|11|12|13|14}
# <suit> : {0|1|2|3}
# <trumpset> : <boolean> # true if trump set
# <trump> : <suit> # only packed if <trumpset> true
# <tricks> : <tricks0> <tricks1>
# <tricks0> : # tricks for team 0
# <tricks1> : # tricks for team 1
# <score> : <team0> <team1>
# <team0> : # score of team 0
# <team1> : # score of team 1
# <options> : <defend> <aloneonorder> <screw>
# <defend>|<aloneonorder>|<screw> : <boolean>
# <cards> : <numcards> <card1> .. <cardN>
# <cardN> : <value> <suit>
# we pass a slice of the bytes array with the <STATE> removed;
# parseStatePlayer() will return the parsed length, which we'll
# then use to compose further slices to parse the game and cards
offset += self.parseStatePlayer(bytes[4:])
# next we parse the game state, for which we use the offset
# returned from the parseStatePlayer() routine to build a new
# slice of the bytes array
#info("")
offset += self.parseStateGame(bytes[4+offset:])
# next we parse the cards, which may number 0 if we haven't been
# dealt any yet
#info("")
offset += self.parseStateCards(bytes[4+offset:])
# check that we have a valid tail
(tail1,tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseState()")
return False
return True
###########################################################################
# This routine parses the player data of the <STATE> message
#
def parseStatePlayer(self, bytes):
#debug(self.id+"parsing player STATE")
offset = 0
#info("")
offset += self.parseStatePlayerN(bytes[offset:],0)
#info("")
offset += self.parseStatePlayerN(bytes[offset:],1)
#info("")
offset += self.parseStatePlayerN(bytes[offset:],2)
#info("")
offset += self.parseStatePlayerN(bytes[offset:],3)
return offset
###########################################################################
# This reads the N'th player state information
#
def parseStatePlayerN(self, bytes, n):
#debug(self.id+"parsing player STATE for player %d" % (n))
offset = 0
# The player data looks like this:
# <playersdata> : <p1> <p2> <p3> <p4>
# <pN> : <pstate> <pdata>
# <pstate> : {0|1|2} # unconnected, connected, joined
# <pdata> : if <pstate> == joined
# <ph> <nmstring> <clstring> <hwstring> <osstring>
# <cmtstring> <team> <numcards> <creator> <ordered>
# <dealer> <alone> <defend> <leader> <maker>
# <playoffer> <orderoffer> <dropoffer> <calloffer>
# <defendoffer> <cardinplay> [<card>] <passed>
# else
# <NULL>
# <NULL> : # no data
# <team> : {-1|0|1} # no team, team 0, or team 1
# <creator>|<ordered>|<dealer>|<alone>|<defend>|<leader>|<maker>
# <playoffer>|<orderoffer>|<dropoffer>|<calloffer>|<defendoffer>
# <cardinplay> <passed>
# : <boolean>
#
# pull player 0 state: 0 is unconnected, 1 is connected, 2 is joined;
# if the value is 2, there will be further player data
(self.state[n]['state'],) = struct.unpack_from("!i",bytes)
offset += 4 # track the offset into the bytes array
# if this is our state, promote it up
if n == self.playerhandle:
self.state['state'] = self.state[n]['state']
# if player state is 2 (ie. connected), then read the rest of the info
if self.state[n]['state'] == 2:
# get the player handle: not sure why I duped this, since the
# handle is implicit in the order, but anyway...
(ph,) = struct.unpack_from("!i",bytes[offset:])
offset += 4
# get the name
self.state[ph]['name'] = self.parseString(bytes[offset:])
offset += 4+len(self.state[ph]['name'])
#info(self.id+"player name is " + self.state[ph]['name'])
# get the client name
self.state[ph]['clientname'] = self.parseString(bytes[offset:])
offset += 4+len(self.state[ph]['clientname'])
# get the client hardware
self.state[ph]['hardware'] = self.parseString(bytes[offset:])
offset += 4+len(self.state[ph]['hardware'])
# get the OS
self.state[ph]['os'] = self.parseString(bytes[offset:])
offset += 4+len(self.state[ph]['os'])
# get the comment
self.state[ph]['comment'] = self.parseString(bytes[offset:])
offset += 4+len(self.state[ph]['comment'])
# get the team number
(self.state[ph]['team'],) = struct.unpack_from("!i",bytes[offset:])
offset += 4
# get the number of cards
(self.state[ph]['numcards'],) = \
struct.unpack_from("!i",bytes[offset:])
offset += 4
# get the creator boolean
(self.state[ph]['creator'],) = \
struct.unpack_from("!i",bytes[offset:])
if ph == self.playerhandle:
self.state['creator'] = self.state[ph]['creator']
offset += 4
# get the ordered boolean
(self.state[ph]['ordered'],) = \
struct.unpack_from("!i",bytes[offset:])
if self.state[ph]['ordered'] == 1:
self.state['orderer'] = ph
offset += 4
# get the dealer boolean
(self.state[ph]['dealer'],) = \
struct.unpack_from("!i",bytes[offset:])
if self.state[ph]['dealer'] == 1:
self.state['dealer'] = ph
offset += 4
# get the alone boolean
(self.state[ph]['alone'],) = struct.unpack_from("!i",bytes[offset:])
if self.state[ph]['alone'] == 1:
self.state['aloner'] = ph
offset += 4
# get the defend boolean
(self.state[ph]['defend'],) = \
struct.unpack_from("!i",bytes[offset:])
if self.state[ph]['defend'] == 1:
self.state['defender'] = ph
offset += 4
# get the leader boolean
(self.state[ph]['leader'],) = \
struct.unpack_from("!i",bytes[offset:])
if self.state[ph]['leader'] == 1:
self.state['leader'] = ph
offset += 4
# get the maker boolean
(self.state[ph]['maker'],) = struct.unpack_from("!i",bytes[offset:])
if self.state[ph]['maker'] == 1:
self.state['maker'] = ph
offset += 4
# get the playoffer boolean
(self.state[ph]['playoffer'],) = \
struct.unpack_from("!i",bytes[offset:])
offset += 4
# get the orderoffer boolean
(self.state[ph]['orderoffer'],) = \
struct.unpack_from("!i",bytes[offset:])
offset += 4
# get the dropoffer boolean
(self.state[ph]['dropoffer'],) = \
struct.unpack_from("!i",bytes[offset:])
offset += 4
# get the calloffer boolean
(self.state[ph]['calloffer'],) = \
struct.unpack_from("!i",bytes[offset:])
offset += 4
# get the defendoffer boolean
(self.state[ph]['defendoffer'],) = \
struct.unpack_from("!i",bytes[offset:])
offset += 4
# get the cardinplay boolean
(self.state[ph]['cardinplay'],) = \
struct.unpack_from("!i",bytes[offset:])
offset += 4
# if there is a card in play, read it
if self.state[ph]['cardinplay'] == 1:
(value,suit) = struct.unpack_from("!ii",bytes[offset:])
offset += 8
self.state[ph]['card'] = Card(value=value,suit=suit)
# get whether they've passed or not
(self.state[ph]['passed'],) = \
struct.unpack_from("!i",bytes[offset:])
offset += 4
return offset
###########################################################################
# This routine parses the game data of the <STATE> message
#
def parseStateGame(self, bytes):
#debug(self.id+"parsing game STATE")
#self.printMessage(bytes)
offset = 0
# The game data looks like this:
# <gamedata> : <ingame> <hstate> <suspend> <holein> <hole> <trumpset>
# <trump> <tricks> <score> <options>
# <ingame> : <boolean>
# <hstate> : <0|1|2|3|4> # pregame,hole,trump,defend,play
# <suspend> : <boolean>
# <holein> : <boolean> # true if hole card
# <hole> : <card> # only packed if <holein> true
# <card> : <value> <suit>
# <value> : {2|3|4|5|6|7|8|9|10|11|12|13|14}
# <suit> : {0|1|2|3}
# <trumpset> : <boolean> # true if trump set
# <trump> : <suit> # only packed if <trumpset> true
# <tricks> : <tricks0> <tricks1>
# <tricks0> : # tricks for team 0
# <tricks1> : # tricks for team 1
# <score> : <team0> <team1>
# <team0> : # score of team 0
# <team1> : # score of team 1
# <options> : <defend> <aloneonorder> <screw>
# <defend>|<aloneonorder>|<screw> : <boolean>
# get the ingame boolean
(self.state['ingame'],) = struct.unpack_from("!i",bytes[offset:])
offset += 4
# get the hand state: 0, 1, 2, 3, or 4, corresponding to a hand state
# of pregame (hands haven't been dealt yet), hole (hole card ordering
# is available), trump (arbitrary trump can be called), defend (defend
# alone is on offer), play (game is underway)
(self.state['hstate'],) = struct.unpack_from("!i",bytes[offset:])
offset += 4
# get the suspend state: this would be true only if the number of
# players drops below 4
(self.state['suspend'],) = struct.unpack_from("!i",bytes[offset:])
offset += 4
# get the hole card available state: this would be true if there is a
# a hole card on offer
(self.state['holein'],) = struct.unpack_from("!i",bytes[offset:])
offset += 4
# if there is a hole card on offer, read it
if self.state['holein'] == 1:
#info(self.id+"parsing hole card")
(value,suit) = struct.unpack_from("!ii",bytes[offset:])
self.state['hole'] = Card(value=value,suit=suit)
offset += 8
# read whether trump has been set
(self.state['trumpset'],) = struct.unpack_from("!i",bytes[offset:])
offset += 4
# if it has, read the trump suit
if self.state['trumpset'] == 1:
(self.state['trump'],) = struct.unpack_from("!i",bytes[offset:])
offset += 4
#info("")
#info(self.id+"trump is " + Card.suitName(self.state['trump']))
# and set the number of tricks for each team
(tricks0,tricks1) = struct.unpack_from("!ii",bytes[offset:])
offset += 8
# store the previous us and them tricks, so we can compute deltas
prevus = self.state['ustricks']
prevthem = self.state['themtricks']
# set the tricks as an "ustricks" and "themtricks", to make things
# easier to parse later
if self.team == 1:
self.state['ustricks'] = tricks0
self.state['themtricks'] = tricks1
elif self.team == 2:
self.state['ustricks'] = tricks1
self.state['themtricks'] = tricks0
# if the tricks have changed, compute the delta: either ustricks
# has changed, or themtricks, but can't (shouldn't) be both
if prevus != self.state['ustricks']:
self.state['trickdelta'] = self.state['ustricks'] - prevus
if prevthem != self.state['themtricks']:
self.state['trickdelta'] = -1*(self.state['themtricks'] - prevthem)
# similarly, parse the score values into usscore and themscore
(score0,score1) = struct.unpack_from("!ii",bytes[offset:])
offset += 8
# store the previous us and them scores, so we can compute deltas
prevus = self.state['usscore']
prevthem = self.state['themscore']
# set the scores as an "usscore" and "themscore", to make things
# easier to parse later
if self.team == 1:
self.state['usscore'] = score0
self.state['themscore'] = score1
elif self.team == 2:
self.state['usscore'] = score1
self.state['themscore'] = score0
# if the score has changed, compute the delta: either usscore
# has changed, or themscore, but can't (shouldn't) be both
if prevus != self.state['usscore']:
self.state['scoredelta'] = self.state['usscore'] - prevus
if prevthem != self.state['themscore']:
self.state['scoredelta'] = -1*(self.state['themscore'] - prevthem)
# and then read a bunch of options
(self.state['defend'],self.state['aloneonorder'],self.state['screw'],)\
= struct.unpack_from("!iii",bytes[offset:])
offset += 12
return offset
###########################################################################
# This reads the cards information in the state message
#
def parseStateCards(self, bytes):
#debug(self.id+"parsing cards STATE")
#self.printMessage(bytes)
offset = 0
# The cards data looks like this:
# <cards> : <numcards> <card1> .. <cardN>
# <cardN> : <value> <suit>
# get the number of cards to be read
(self.state['numcards'],) = struct.unpack_from("!i",bytes)
offset += 4
# if we have a non-zero number of cards, read them
self.hand = list([])
for i in range(self.state['numcards']):
(value,suit) = struct.unpack_from("!ii",bytes[offset:])
self.hand.append(Card(value=value,suit=suit))
offset += 8
return offset
###########################################################################
# This routine parses a DEAL message: this message is sent after cards
# for the deal are completed. The state structure for the player
# receiving the deal message should be fully populated
#
def parseDeal(self, bytes):
debug("")
debug(self.id+"parsing DEAL")
#self.printMessage(bytes)
# the format of a DEAL message is:
# <msg> : <msglen> <DEAL> <tail>
# it's really just a notification message, so check we have a valid
# tail and otherwise do nothing
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseDeal()")
return False
# at this point we've received and parsed the state message with
# our hand details in it: we will want to know our original hand
# later, to run stats on it, so we record the original hand now
self.originalHand = self.hand
return True
###########################################################################
# This routine parses a STARTDENY message
#
def parseStartDeny(self, bytes):
#debug(self.id+"parsing STARTDENY")
#self.printMessage(bytes)
# the format of a STARTDENY message is:
# <msg> : <msglen> <STARTDENY> <string> <tail>
# where the string explains why it was denied
message = self.parseString(bytes[4:-2])
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseStartDeny()")
return False
info("")
info(self.id+"uh-oh, got a STARTDENY message: " + message)
return False
###########################################################################
# This routine parses an ORDEROFFER message
#
def parseOrderOffer(self, bytes):
debug(self.id+"parsing ORDEROFFER")
#self.printMessage(bytes)
# the format of an ORDEROFFER message is:
# <msg> : <msglen> <ORDEROFFER> <ph> <tail>
# it's really just a notification message, unless we're the <ph>
(msg, ph) = struct.unpack_from("!ii",bytes)
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseOrderOffer()")
return False
# if the person offered the order is us, call sendOrderPass()
if ph == self.playerhandle:
self.sendOrderPass()
return True
###########################################################################
# This routine parses a ORDERDENY message
#
def parseOrderDeny(self, bytes):
#debug(self.id+"parsing ORDERDENY")
#self.printMessage(bytes)
# the format of a ORDERDENY message is:
# <msg> : <msglen> <ORDERDENY> <string> <tail>
# where the string explains why it was denied
message = self.parseString(bytes[4:-2])
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseDefendDeny()")
return False
info("")
info(self.id+"uh-oh, got a ORDERDENY message: " + message)
return False
###########################################################################
# This routine parses a CALLOFFER message
#
def parseCallOffer(self, bytes):
debug(self.id+"parsing CALLOFFER")
#self.printMessage(bytes)
# the format of an CALLOFFER message is:
# <msg> : <msglen> <CALLOFFER> <ph> <tail>
# it's really just a notification message, unless we're the <ph>
(msg, ph) = struct.unpack_from("!ii",bytes)
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseOrderOffer()")
return False
# if the person offered the order is us, call sendOrderPass()
if ph == self.playerhandle:
self.sendCallPass()
return True
###########################################################################
# This routine parses a CALLDENY message
#
def parseCallDeny(self, bytes):
#debug(self.id+"parsing CALLDENY")
#self.printMessage(bytes)
# the format of a CALLDENY message is:
# <msg> : <msglen> <CALLDENY> <string> <tail>
# where the string explains why it was denied
message = self.parseString(bytes[4:-2])
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseDefendDeny()")
return False
info("")
info(self.id+"uh-oh, got a CALLDENY message: " + message)
return False
###########################################################################
# This routine parses a DROPOFFER message
#
def parseDropOffer(self, bytes):
#debug(self.id+"parsing DROPOFFER")
#self.printMessage(bytes)
# the format of an DROPOFFER message is:
# <msg> : <msglen> <DROPOFFER> <ph> <tail>
# it's really just a notification message, unless we're the <ph>
(msg, ph) = struct.unpack_from("!ii",bytes)
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseDropOffer()")
return False
# if the person offered the drop is us, call sendDrop()
if ph == self.playerhandle:
self.sendDrop()
return True
###########################################################################
# This routine parses a DROPDENY message
#
def parseDropDeny(self, bytes):
#debug(self.id+"parsing DROPDENY")
#self.printMessage(bytes)
# the format of a DROPDENY message is:
# <msg> : <msglen> <DROPDENY> <string> <tail>
# where the string explains why it was denied
message = self.parseString(bytes[4:-2])
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseDefendDeny()")
return False
info("")
info(self.id+"uh-oh, got a DROPDENY message: " + message)
return False
###########################################################################
# This routine parses a DEFENDOFFER message
#
def parseDefendOffer(self, bytes):
#debug(self.id+"parsing DEFENDOFFER")
#self.printMessage(bytes)
# the format of an DEFENDOFFER message is:
# <msg> : <msglen> <DEFENDOFFER> <ph> <tail>
# it's really just a notification message, unless we're the <ph>
(msg, ph) = struct.unpack_from("!ii",bytes)
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseDefendOffer()")
return False
# if the person offered the defend is us, call sendDefend()
if ph == self.playerhandle:
info(self.id+"declining defend alone")
self.sendDefend()
return True
###########################################################################
# This routine parses a DEFENDDENY message
#
def parseDefendDeny(self, bytes):
#debug(self.id+"parsing DEFENDDENY")
#self.printMessage(bytes)
# the format of a DEFENDDENY message is:
# <msg> : <msglen> <DEFENDDENY> <string> <tail>
# where the string explains why it was denied
message = self.parseString(bytes[4:-2])
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseDefendDeny()")
return False
info("")
info(self.id+"uh-oh, got a DEFENDDENY message: " + message)
return False
###########################################################################
# This routine parses a PLAYOFFER message
#
def parsePlayOffer(self, bytes):
#info(self.id+"parsing PLAYOFFER")
#self.printMessage(bytes)
# the format of an PLAYOFFER message is:
# <msg> : <msglen> <PLAYOFFER> <ph> <tail>
# it's really just a notification message, unless we're the <ph>
(msg, ph) = struct.unpack_from("!ii",bytes)
#info("")
#info(self.id+"got PLAYOFFER for %s" % (self.state[ph]['name']))
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseDropOffer()")
return False
# if the person offered the play is us, call sendPlay()
if ph == self.playerhandle:
self.sendPlay()
return True
###########################################################################
# This routine parses a PLAYDENY message
#
def parsePlayDeny(self, bytes):
#debug(self.id+"parsing PLAYDENY")
#self.printMessage(bytes)
# the format of a PLAYDENY message is:
# <msg> : <msglen> <PLAYDENY> <string> <tail>
# where the string explains why it was denied
message = self.parseString(bytes[4:-2])
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parsePlayDeny()")
return False
info("")
info(self.id+"uh-oh, got a PLAYDENY message: " + message)
return False
###########################################################################
# This routine parses a TRICKOVER message
#
def parseTrickOver(self, bytes):
#debug(self.id+"parsing TRICKOVER")
#self.printMessage(bytes)
# the format of a TRICKOVER message is:
# <msg> : <msglen> <TRICKOVER> <tail>
# ie. it's just an alert, so no need to parse anything out of it
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseTrickOver()")
return False
# we don't want to clutter the log by reporting all instances
# of the trick over message, so we only print it for the maker
if self.playerhandle == self.state['maker']:
if self.state['trickdelta'] < 0: wl="lost"
elif self.state['trickdelta'] > 0: wl="won"
else: wl="bad bad bad"
info(self.id+"trick is over, we %s, now %d to %d"
% (wl,self.state['ustricks'],self.state['themtricks']))
# increment the trick counter for the id string
self.tcount += 1
self.setId()
return True
###########################################################################
# This routine parses a HANDOVER message
#
def parseHandOver(self, bytes):
#info("")
#info(self.id+"parsing HANDOVER")
#self.printMessage(bytes)
# the format of a HANDOVER message is:
# <msg> : <msglen> <HANDOVER> <tail>
# ie. it's just an alert, so no need to parse anything out of it
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseHandOver()")
return False
# if we were the maker, print some info and then record the score
# delta for this hand
if self.playerhandle == self.state['maker']:
info("")
info(self.id+"hand is over")
self.printScore()
info(self.id+"score delta: %d" % (self.state['scoredelta']))
info(self.id+"original hand: %s, trump: %s"
% (self.printHand(self.originalHand),
Card.suitName(self.state['trump'])))
# log our data in a thread-safe fashion
self.lock.acquire()
remap = ""
try:
remap = self.record.addHand(
self.originalHand, self.state['trump'],
self.state['scoredelta'], self)
finally:
self.lock.release()
# log the remapped hand: makes it easier to debug things later
info(self.id+"remapped hand: %s" % (remap))
# clear the orderer info
self.state['orderer'] = -1
# increment the hand and trick counters for the id string
self.hcount += 1
self.tcount = 0
self.setId()
return True
###########################################################################
# This routine parses a GAMEOVER message
#
def parseGameOver(self, bytes):
#info("")
#info(self.id+"parsing GAMEOVER")
#self.printMessage(bytes)
# the format of a GAMEOVER message is:
# <msg> : <msglen> <GAMEOVER> <tail>
# ie. it's just an alert, so no need to parse anything out of it
# check we have a valid tail
(tail1, tail2) = struct.unpack("!BB",bytes[-2:])
if tail1 != self.messageId['TAIL1'] or tail2 != self.messageId['TAIL2']:
error(self.id+"bad tail value in parseHandOver()")
return False
# we don't want to clutter the log by reporting all instances
# of the trick over message, so we only print it for the maker
if self.playerhandle == self.state['maker']:
info("")
info(self.id+"game is over")
self.printScore()
info("")
# log our data in a thread-safe fashion
self.lock.acquire()
try:
self.record.addGame()
finally:
self.lock.release()
# we set the new game, hand, and trick values: this is really
# mostly useless, since when the game is over, the player object
# is going to deleted, but I think it's useful to do this for
# completeness
self.gcount += 1
self.hcount = 0
self.tcount = 0
self.setId()
# return False to indicate this client is finished
return False
###########################################################################
# This routine parses a random bad message
#
def badMessage(self, bytes):
#debug(self.id+"parsing bad message")
#self.printMessage(bytes)
return False
###########################################################################
# this takes a byte array and displays it as a series of bytes, useful
# for decoding and debugging messages
#
def printMessage(self, message):
print()
print("decoded message:")
hex_string = "".join("%02x " % b for b in message)
print("hex: " + hex_string)
print()
###########################################################################
# this prints out all the cards in our hand
#
def printHand(self,hand):
string = ""
sep = ""
for i in hand:
string += sep + i
sep = " "
return string
###########################################################################
# This takes a suit (the lead suit), and returns the set of cards from
# the player's hand which can be played to legally follow it. So if the
# player has one or more cards of that suit, the returned set will contain
# those cards, and if the player has no cards of that suit, then all
def followCards(self):
# begin by determining who the leader of the hand was
leader = -1
for i in (0,1,2,3):
if self.state[i]['leader'] == 1:
leader = i
# set the trump and complimentary suits
trumpsuit = self.state['trump']
compsuit = Card.suitComp(self.state['trump'])
# set the leadsuit to the suit of the lead card, unless the lead
# card is the left (ie. the J of compsuit), in which case set the
# leadsuit to trump
leadsuit = self.state[leader]['card'].suit
if leadsuit == compsuit and \
self.state[leader]['card'].value == Card.nameValue("J"):
leadsuit = trumpsuit
# step through the player's hand: anything with the same suit
# gets added to the playable cards list
playable = list([])
for card in self.hand:
# put the suit and value of this card into temporary variables
csuit = card.suit
cvalue = card.value
# if the card value is a J and its suit is the compsuit (ie.
# the complimentary suit of trump), then rewrite the suit as
# trump
if cvalue == Card.nameValue("J") and csuit == compsuit:
csuit = trumpsuit
# now if the possible-remapped csuit value matches the lead
# suit, add the card to the playable hand
if csuit == leadsuit:
playable.append(card)
# if we have no playable cards by suit, then we can play anything
#info(self.id+"before playable cards: " + self.printHand(playable))
if len(playable) == 0:
playable = self.hand.copy()
# print the hand
info(self.id+"playable: " + self.printHand(playable)
+ ", lead: " + Card.suitName(leadsuit)
+ ", trump: " + Card.suitName(trumpsuit) )
# generate some stats for follow requirements in a thread-safe way
self.lock.acquire()
try:
self.record.addFollow(len(self.hand),len(playable))
finally:
self.lock.release()
return playable
###########################################################################
# This takes a card and removes it from the player's hand: we need to
# do it like this because sometimes we're working with a copy of the
# card (ie. when we're using playable sets to follow), so we need to
# remove by value and not by reference
#
def removeCard(self, card):
# get the card value and suit
value = card.value
suit = card.suit
# loop across all cards in the hand
for card in self.hand:
if card.value == value and card.suit == suit:
self.hand.remove(card)
|
If you must hike only ONE SECTION of the PCT, then this one stretch, LE CONTE to LAKE TAHOE, is by far the best for high altitude, granite, peaks, passes, total alpine lake bliss, with incredible mountain scenery. When you must limit your big adventure trek, due to time constraints, this portion is some of the most beautiful high elevation terrain in the whole darn nation. Seriously. Better than a movie!
Services like grocery, laundromats, hotels, restaurants, post office, available in SLT (South Lake Tahoe). Postal, food & lodging can be had in numerous locations along certain high country routes – like Vermillion Resort @ Lake Edison. The PCT guide books give good descriptions of these handy trailheads with services, in way more detail.
|
# -*- coding: utf-8 -*-
# Copyright 2016-2017 Pavel_M <[email protected]>,
# released under the GNU GPL version 3.
# This is a plugin for Zim-wiki program (zim-wiki.org) by Jaap Karssenberg.
import gtk
import pango
from zim.notebook import Path
from zim.gui.widgets import ScrolledWindow, Dialog, SingleClickTreeView
from zim.notebook.index.tags import TagsView
from .iconutils import render_icon, RESERVED_ICON_NAMES, ICONS
class TagsManagerDialog(Dialog):
'''
Tags Manager dialog to do some basic operations with
tags and to set icons for tags.
'''
def __init__(self, window, index, uistate):
Dialog.__init__(self, window, _('Tags Manager (IconTags plugin)'), # T: dialog title
buttons=gtk.BUTTONS_OK_CANCEL,
defaultwindowsize=(450, 400) )
# Don't confuse with local variable 'self.uistate',
# which is already determined for this class.
self._window = window
self.plugin_uistate = uistate
self.show_pages_button = gtk.ToggleButton('Show Pages')
self.show_pages_button.connect('toggled', self.toggle_show_pages)
self.add_extra_button(self.show_pages_button)
self.treeview_tags = TagsManagerTagsView(index, self.plugin_uistate['Icons for Tags'])
self.treeview_pages = TagsManagerPagesView(index, window.ui)
self.scrolled_widget = ScrolledWindow(self.treeview_tags)
self.vbox.pack_start(self.scrolled_widget, True)
self.treeview_tags.connect('row-activated', self.get_tag)
# Enable left/right arrows to navigate between views.
self.treeview_tags.connect('key-release-event', self.toggle_view)
self.treeview_pages.connect('key-release-event', self.toggle_view)
# Update if tags change.
self.connectto_all(index.update_iter.tags, (
('tag-row-inserted', lambda *a: self.update()),
('tag-row-deleted', lambda *a: self.update())
))
self.show_all()
def toggle_view(self, treeview, event):
'''Change view by pressing Left/Right arrows on keyboard.'''
key = gtk.gdk.keyval_name(event.keyval)
if key == 'Right' and treeview == self.treeview_tags:
self.show_pages_button.set_active(True)
elif key == 'Left' and treeview == self.treeview_pages:
self.show_pages_button.set_active(False)
def get_tag(self, treeview, path, column):
'''Place the tag to the cursor position.'''
model = treeview.get_model()
tag = '@' + model.get_value(model.get_iter(path), treeview.TAG_COL)
self._window.pageview.view.get_buffer().insert_tag_at_cursor(tag)
def update(self):
'''Update both tags and pages trees.'''
self.treeview_tags.refill_model()
self.treeview_pages.refill_model(self.treeview_pages.current_tag)
def toggle_show_pages(self, button):
''' 'Show Pages' button is clicked.'''
for widget in self.scrolled_widget.get_children():
self.scrolled_widget.remove(widget)
model, iter = self.treeview_tags.get_selection().get_selected()
if button.get_active():
self.scrolled_widget.add(self.treeview_pages)
# Set values for 'self.treeview_pages'.
if iter:
selected_tag = model.get_value(iter, self.treeview_tags.TAG_COL)
self.treeview_pages.refill_model(selected_tag)
else:
self.scrolled_widget.add(self.treeview_tags)
# Scroll to tag in 'self.treeview_tags'.
if iter:
path = model.get_path(iter)
self.treeview_tags.scroll_to_cell(path)
self.show_all()
def do_response_ok(self, *a):
''' OK button is pressed.'''
self.plugin_uistate['Icons for Tags'] = self.treeview_tags.icons_for_tags
self.result = True
return True
class TagsManagerTagsView(SingleClickTreeView):
'''
Class to show tags with icons in a treeview.
Is used in Tags Manager Dialog.
'''
TAG_COL = 0 # column with tag name
ICON_COL = 1 # column with icon image
ICON_NAME = 2 # column to sort ICON_COL
N_PAGES_COL = 3 # column to show number of pages
def __init__(self, index, preferences):
self.index = index
# Icons corresponding to tags, prevent unnecessary changing.
self.icons_for_tags = preferences.copy()
self.model = gtk.ListStore(str, gtk.gdk.Pixbuf, str, int) # TAG_COL, ICON_COL, ICON_NAME, N_PAGES_COL
SingleClickTreeView.__init__(self, self.model)
cells = (('Tags', self.TAG_COL, True),
('Pages', self.N_PAGES_COL, False))
for name, col_id, expand in cells:
cell = gtk.CellRendererText()
cell.set_property('ellipsize', pango.ELLIPSIZE_END)
cell.set_property('cell-background', 'white')
col = gtk.TreeViewColumn(name, cell)
col.set_attributes(cell, text = col_id)
col.set_resizable(expand)
col.set_expand(expand)
col.set_sort_column_id(col_id)
self.append_column(col)
cell = gtk.CellRendererPixbuf()
cell.set_property('cell-background', 'white')
col = gtk.TreeViewColumn('Icon', cell)
col.set_attributes(cell, pixbuf = self.ICON_COL)
col.set_resizable(False)
col.set_expand(False)
col.set_sort_column_id(self.ICON_NAME)
self.append_column(col)
self.refill_model()
def row_activated(self, path, column):
if column.get_sort_column_id() != self.ICON_NAME:
return False
def set_icon(path, icon_name = None):
tag = self.model.get_value(self.model.get_iter(path), self.TAG_COL)
tag = unicode(tag) # to use with non latin characters
if icon_name:
self.icons_for_tags[tag] = icon_name
else:
self.icons_for_tags.pop(tag, None)
self.refill_model()
return True
menu = gtk.Menu()
item = gtk.MenuItem('None')
item.connect('activate', lambda item: set_icon(path))
menu.append(item)
icons = sorted([(a, render_icon(b)) for (a,b) in ICONS.iteritems()
if a not in RESERVED_ICON_NAMES])
for name, icon in icons:
image = gtk.Image()
image.set_from_pixbuf(icon)
item = gtk.ImageMenuItem(name)
item.set_use_underline(False)
item.set_image(image)
item.zim_icon_name = name
item.connect('activate', lambda item: set_icon(path, item.zim_icon_name))
menu.append(item)
menu.show_all()
menu.popup(None, None, None, 3, 0)
def refill_model(self):
'''Update model.'''
self.model.clear()
tagview = TagsView.new_from_index(self.index)
for tag in [a.name for a in tagview.list_all_tags()]:
if tag in self.icons_for_tags:
icon_name = self.icons_for_tags[tag]
rendered_icon = render_icon(ICONS[icon_name])
else:
icon_name, rendered_icon = None, None
self.model.append([tag, rendered_icon, icon_name,
tagview.n_list_pages(tag)])
# Sort tags by number of pages and then by names.
self.model.set_sort_column_id(self.TAG_COL, order = gtk.SORT_ASCENDING)
self.model.set_sort_column_id(self.N_PAGES_COL, order = gtk.SORT_DESCENDING)
class TagsManagerPagesView(SingleClickTreeView):
'''
Class to show pages for a selected tag.
Is used in Tags Manager Dialog.
'''
PAGE_COL = 0 # column with page name
TAGS_N_COL = 1 # column with number of tags for the page
TAGS_COL = 2 # column with all tags for the page
def __init__(self, index, ui):
self.tagview = TagsView.new_from_index(index)
self.ui = ui
self.current_tag = None
self.model = gtk.ListStore(str, int, str) # PAGE_COL, TAGS_COL
SingleClickTreeView.__init__(self, self.model)
cells = (('Page', self.PAGE_COL, True),
('N', self.TAGS_N_COL, False),
('Tags', self.TAGS_COL, True))
for name, col_id, expand in cells:
cell = gtk.CellRendererText()
cell.set_property('ellipsize', pango.ELLIPSIZE_END)
cell.set_property('cell-background', 'white')
col = gtk.TreeViewColumn(name, cell)
col.set_attributes(cell, text = col_id)
col.set_resizable(expand)
col.set_expand(expand)
col.set_sort_column_id(col_id)
self.append_column(col)
self.connect('row-activated', lambda treeview, path, column:
self.row_activated(path, column))
self.refill_model()
def refill_model(self, tag = None):
'''Update model.'''
self.model.clear()
self.current_tag = tag
if tag:
tag = unicode(tag) # to use with non latin names
for page in self.tagview.list_pages(tag):
# Exclude current tag to not include it in sorting.
tags = [tag] + sorted([a.name for a in self.tagview.list_tags(page)
if a.name != tag])
self.model.append([page.name, len(tags), ', '.join(tags)])
# Sort pages by names.
self.model.set_sort_column_id(self.PAGE_COL, order = gtk.SORT_DESCENDING)
def row_activated(self, path, column):
'''Open page in the view.'''
name = self.model.get_value(self.model.get_iter(path), self.PAGE_COL)
self.ui.open_page(Path(name))
|
Delicious smores on a a bright blue sock with yellow heels, toes and cuff.
Size 12-24 Months Fits Shoe Size 3-7.
Size 2-4 Years fits Shoe Size 6-11.
Size 4-7 Years fits Shoe Size 10-1 (Youth).
Size 7-10 Years fits Shoe Size 12-5 (Youth).
|
# Copyright 2020 Cloudbase Solutions Srl
# All Rights Reserved.
import itertools
def _format_opt(req, option, keys=None):
def transform(key, value):
if keys and key not in keys:
return
yield (key, value)
return dict(itertools.chain.from_iterable(
transform(k, v) for k, v in option.items()))
def destination_minion_pool_options_collection(req, destination_pool_options):
formatted_opts = [
_format_opt(req, opt) for opt in destination_pool_options]
return {'destination_minion_pool_options': formatted_opts}
def destination_options_collection(req, destination_options):
formatted_opts = [
_format_opt(req, opt) for opt in destination_options]
return {'destination_options': formatted_opts}
def source_minion_pool_options_collection(req, source_pool_options):
formatted_opts = [
_format_opt(req, opt) for opt in source_pool_options]
return {'source_minion_pool_options': formatted_opts}
def source_options_collection(req, source_options):
formatted_opts = [
_format_opt(req, opt) for opt in source_options]
return {'source_options': formatted_opts}
|
PasturePro rope clips are recommended for horse fencing and will work for up to 1/4" rope or coated wire. Available in black and white.
The combination of PasturePro rope clips and line posts creates a much safer alternative over traditional t-posts. Available in black.
PasturePro rope clips are made to withstand the elements and are attached with two small stainless steel screws.
|
#!/usr/bin/env python
# -*- coding: latin-1; py-indent-offset:4 -*-
################################################################################
#
# This file is part of BfPy
#
# BfPy is a Python library to communicate with the Betfair Betting Exchange
# Copyright (C) 2010 Daniel Rodriguez (aka Daniel Rodriksson)
# Copyright (C) 2011 Sensible Odds Ltd.
#
# You can learn more and contact the author at:
#
# http://code.google.com/p/bfpy/
#
# BfPy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BfPy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BfPy. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
'''
BfPy wsdsl variables holding the Betfair WSDL definitions
'''
#
# Variables containing the Betfair WSDL files
#
BFGlobalService = '''
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2003-2004 The Sporting Exchange Limited. All rights reserved.
The presentation, distribution or other dissemination of the information contained herein by The Sporting Exchange Limited (Betfair) is not a license, either expressly or impliedly, to any intellectual property owned or controlled by Betfair.
Save as provided by statute and to the fullest extent permitted by law, the following provisions set out the entire liability of Betfair (including any liability for the acts and omissions of its employees, agents and sub-contractors) to the User in respect of the use of its WSDL file whether in contract, tort, statute, equity or otherwise:
(a) The User acknowledges and agrees that (except as expressly provided in this Agreement) the WSDL is provided "AS IS" without warranties of any kind (whether express or implied);
(b) All conditions, warranties, terms and undertakings (whether express or implied, statutory or otherwise relating to the delivery, performance, quality, uninterrupted use, fitness for purpose, occurrence or reliability of the WSDL are hereby excluded to the fullest extent permitted by law; and
(c) Betfair shall not be liable to the User for loss of profit (whether direct or indirect), loss of contracts or goodwill, lost advertising, loss of data or any type of special, indirect, consequential or economic loss (including loss or damage suffered by the User as a result of an action brought by a third party) even if such loss was reasonably foreseeable or Betfair had been advised of the possibility of the User incurring such loss.
No exclusion or limitation set out in this Agreement shall apply in the case of fraud or fraudulent concealment, death or personal injury resulting from the negligence of either party or any of its employees, agents or sub-contractors; and/or any breach of the obligations implied by (as appropriate) section 12 of the Sale of Goods Act 1979, section 2 of the Supply of Goods and Services Act 1982 or section 8 of the Supply of Goods (Implied Terms) Act 1973.
-->
<wsdl:definitions name="BFGlobalService"
targetNamespace="http://www.betfair.com/publicapi/v3/BFGlobalService/"
xmlns:types="http://www.betfair.com/publicapi/types/global/v3/"
xmlns:soap="http://schemas.xmlsoap.org/wsdl/soap/"
xmlns:tns="http://www.betfair.com/publicapi/v3/BFGlobalService/"
xmlns:wsdl="http://schemas.xmlsoap.org/wsdl/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<wsdl:types>
<xsd:schema targetNamespace="http://www.betfair.com/publicapi/types/global/v3/">
<xsd:import namespace="http://schemas.xmlsoap.org/soap/encoding/"/>
<xsd:complexType name="LoginResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="currency" nillable="true" type="xsd:string"/>
<xsd:element name="errorCode" type="types:LoginErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="validUntil" type="xsd:dateTime"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType abstract="true" name="APIResponse">
<xsd:sequence>
<xsd:element name="header" nillable="true" type="types:APIResponseHeader"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="APIResponseHeader">
<xsd:sequence>
<xsd:element name="errorCode" type="types:APIErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="sessionToken" nillable="true" type="xsd:string"/>
<xsd:element name="timestamp" type="xsd:dateTime"/>
</xsd:sequence>
</xsd:complexType>
<xsd:simpleType name="APIErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="INTERNAL_ERROR"/>
<xsd:enumeration value="EXCEEDED_THROTTLE"/>
<xsd:enumeration value="USER_NOT_SUBSCRIBED_TO_PRODUCT"/>
<xsd:enumeration value="SUBSCRIPTION_INACTIVE_OR_SUSPENDED"/>
<xsd:enumeration value="VENDOR_SOFTWARE_INACTIVE"/>
<xsd:enumeration value="VENDOR_SOFTWARE_INVALID"/>
<xsd:enumeration value="SERVICE_NOT_AVAILABLE_IN_PRODUCT"/>
<xsd:enumeration value="NO_SESSION"/>
<xsd:enumeration value="TOO_MANY_REQUESTS"/>
<xsd:enumeration value="PRODUCT_REQUIRES_FUNDED_ACCOUNT"/>
<xsd:enumeration value="SERVICE_NOT_AVAILABLE_FOR_LOGIN_STATUS"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="LoginErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="OK_MESSAGES"/>
<xsd:enumeration value="FAILED_MESSAGE"/>
<xsd:enumeration value="INVALID_USERNAME_OR_PASSWORD"/>
<xsd:enumeration value="USER_NOT_ACCOUNT_OWNER"/>
<xsd:enumeration value="INVALID_VENDOR_SOFTWARE_ID"/>
<xsd:enumeration value="INVALID_PRODUCT"/>
<xsd:enumeration value="INVALID_LOCATION"/>
<xsd:enumeration value="LOGIN_FAILED_ACCOUNT_LOCKED"/>
<xsd:enumeration value="ACCOUNT_SUSPENDED"/>
<xsd:enumeration value="T_AND_C_ACCEPTANCE_REQUIRED"/>
<xsd:enumeration value="POKER_T_AND_C_ACCEPTANCE_REQUIRED"/>
<xsd:enumeration value="LOGIN_REQUIRE_TERMS_AND_CONDITIONS_ACCEPTANCE"/>
<xsd:enumeration value="LOGIN_UNAUTHORIZED"/>
<xsd:enumeration value="ACCOUNT_CLOSED"/>
<xsd:enumeration value="LOGIN_RESTRICTED_LOCATION"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="LoginReq">
<xsd:sequence>
<xsd:element name="ipAddress" nillable="false" type="xsd:string"/>
<xsd:element name="locationId" nillable="false" type="xsd:int"/>
<xsd:element name="password" nillable="false" type="xsd:string"/>
<xsd:element name="productId" nillable="false" type="xsd:int"/>
<xsd:element name="username" nillable="false" type="xsd:string"/>
<xsd:element name="vendorSoftwareId" nillable="false" type="xsd:int"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="RetrieveLIMBMessageReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest"/>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="RetrieveLIMBMessageResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:RetrieveLIMBMessageErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="totalMessagesCount" nillable="false" type="xsd:int"/>
<xsd:element name="retrievePersonalMessage" type="types:RetrievePersonalLIMBMessage"/>
<xsd:element name="retrieveTCPrivacyPolicyChangeMessage" type="types:RetrieveTCPrivacyPolicyChangeLIMBMessage"/>
<xsd:element name="retrievePasswordChangeMessage" type="types:RetrievePasswordChangeLIMBMessage"/>
<xsd:element name="retrieveBirthDateCheckMessage" type="types:RetrieveBirthDateCheckLIMBMessage"/>
<xsd:element name="retrieveAddressCheckMessage" type="types:RetrieveAddressCheckLIMBMessage"/>
<xsd:element name="retrieveContactDetailsCheckMessage" type="types:RetrieveContactDetailsCheckLIMBMessage"/>
<xsd:element name="retrieveChatNameChangeMessage" type="types:RetrieveChatNameChangeLIMBMessage"/>
<xsd:element name="retrieveCardBillingAddressCheckItems" nillable="true" type="types:ArrayOfRetrieveCardBillingAddressCheckLIMBMessage"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="RetrieveLIMBMessageErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="RetrievePersonalLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="enforceDate" nillable="true" type="xsd:dateTime"/>
<xsd:element name="indicator" nillable="false" type="xsd:boolean"/>
<xsd:element name="message" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="RetrieveTCPrivacyPolicyChangeLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="enforceDate" nillable="true" type="xsd:dateTime"/>
<xsd:element name="indicator" nillable="false" type="xsd:boolean"/>
<xsd:element name="reasonForChange" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="RetrievePasswordChangeLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="enforceDate" nillable="true" type="xsd:dateTime"/>
<xsd:element name="indicator" nillable="false" type="xsd:boolean"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="RetrieveBirthDateCheckLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="enforceDate" nillable="true" type="xsd:dateTime"/>
<xsd:element name="indicator" nillable="false" type="xsd:boolean"/>
<xsd:element name="birthDate" nillable="true" type="xsd:dateTime"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="RetrieveAddressCheckLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="enforceDate" nillable="true" type="xsd:dateTime"/>
<xsd:element name="indicator" nillable="false" type="xsd:boolean"/>
<xsd:element name="address1" type="xsd:string"/>
<xsd:element name="address2" nillable="true" type="xsd:string"/>
<xsd:element name="address3" nillable="true" type="xsd:string"/>
<xsd:element name="town" nillable="true" type="xsd:string"/>
<xsd:element name="county" nillable="true" type="xsd:string"/>
<xsd:element name="zipCode" nillable="true" type="xsd:string"/>
<xsd:element name="country" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="RetrieveContactDetailsCheckLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="enforceDate" nillable="true" type="xsd:dateTime"/>
<xsd:element name="indicator" nillable="false" type="xsd:boolean"/>
<xsd:element name="homeTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="workTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="mobileTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="emailAddress" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="RetrieveChatNameChangeLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="enforceDate" nillable="true" type="xsd:dateTime"/>
<xsd:element name="indicator" nillable="false" type="xsd:boolean"/>
<xsd:element name="chatName" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="ArrayOfRetrieveCardBillingAddressCheckLIMBMessage">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="3" minOccurs="0"
name="retrieveCardBillingAddressCheckLIMBMessage" nillable="true" type="types:RetrieveCardBillingAddressCheckLIMBMessage"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="RetrieveCardBillingAddressCheckLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="enforceDate" nillable="true" type="xsd:dateTime"/>
<xsd:element name="indicator" nillable="false" type="xsd:boolean"/>
<xsd:element name="nickName" type="xsd:string"/>
<xsd:element name="cardShortNumber" type="xsd:string"/>
<xsd:element name="address1" type="xsd:string"/>
<xsd:element name="address2" nillable="true" type="xsd:string"/>
<xsd:element name="address3" nillable="true" type="xsd:string"/>
<xsd:element name="town" nillable="true" type="xsd:string"/>
<xsd:element name="county" nillable="true" type="xsd:string"/>
<xsd:element name="zipCode" nillable="true" type="xsd:string"/>
<xsd:element name="country" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="SubmitLIMBMessageReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="password" nillable="false" type="xsd:string"/>
<xsd:element name="submitPersonalMessage" type="types:SubmitPersonalLIMBMessage"/>
<xsd:element name="submitTCPrivacyPolicyChangeMessage" type="types:SubmitTCPrivacyPolicyChangeLIMBMessage"/>
<xsd:element name="submitPasswordChangeMessage" type="types:SubmitPasswordChangeLIMBMessage"/>
<xsd:element name="submitBirthDateCheckMessage" type="types:SubmitBirthDateCheckLIMBMessage"/>
<xsd:element name="submitAddressCheckMessage" type="types:SubmitAddressCheckLIMBMessage"/>
<xsd:element name="submitContactDetailsCheckMessage" type="types:SubmitContactDetailsCheckLIMBMessage"/>
<xsd:element name="submitChatNameChangeMessage" type="types:SubmitChatNameChangeLIMBMessage"/>
<xsd:element name="submitCardBillingAddressCheckItems" nillable="true" type="types:ArrayOfSubmitCardBillingAddressCheckLIMBMessage"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="SubmitPersonalLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="acknowledgment" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="SubmitTCPrivacyPolicyChangeLIMBMessage">
<xsd:sequence>
<xsd:element name="tCPrivacyPolicyChangeAcceptance" nillable="false" type="types:PrivacyPolicyChangeResponseEnum"/>
</xsd:sequence>
</xsd:complexType>
<xsd:simpleType name="PrivacyPolicyChangeResponseEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="NO_REPLY"/>
<xsd:enumeration value="ACCEPT"/>
<xsd:enumeration value="REJECT"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="SubmitPasswordChangeLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="newPassword" nillable="true" type="xsd:string"/>
<xsd:element name="newPasswordRepeat" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="SubmitBirthDateCheckLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="detailsCorrect" nillable="true" type="xsd:string"/>
<xsd:element name="correctBirthDate" nillable="true" type="xsd:dateTime"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="SubmitAddressCheckLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="detailsCorrect" nillable="true" type="xsd:string"/>
<xsd:element name="newAddress1" nillable="true" type="xsd:string"/>
<xsd:element name="newAddress2" nillable="true" type="xsd:string"/>
<xsd:element name="newAddress3" nillable="true" type="xsd:string"/>
<xsd:element name="newTown" nillable="true" type="xsd:string"/>
<xsd:element name="newCounty" nillable="true" type="xsd:string"/>
<xsd:element name="newZipCode" nillable="true" type="xsd:string"/>
<xsd:element name="newCountry" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="SubmitContactDetailsCheckLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="detailsCorrect" nillable="true" type="xsd:string"/>
<xsd:element name="newHomeTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="newWorkTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="newMobileTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="newEmailAddress" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="SubmitChatNameChangeLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="newChatName" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="ArrayOfSubmitCardBillingAddressCheckLIMBMessage">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="3" minOccurs="0"
name="submitCardBillingAddressCheckLIMBMessage" nillable="true" type="types:SubmitCardBillingAddressCheckLIMBMessage"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="SubmitCardBillingAddressCheckLIMBMessage">
<xsd:sequence>
<xsd:element name="messageId" nillable="true" type="xsd:int"/>
<xsd:element name="detailsCorrect" nillable="true" type="xsd:string"/>
<xsd:element name="nickName" type="xsd:string"/>
<xsd:element name="newAddress1" nillable="true" type="xsd:string"/>
<xsd:element name="newAddress2" nillable="true" type="xsd:string"/>
<xsd:element name="newAddress3" nillable="true" type="xsd:string"/>
<xsd:element name="newTown" nillable="true" type="xsd:string"/>
<xsd:element name="newCounty" nillable="true" type="xsd:string"/>
<xsd:element name="newZipCode" nillable="true" type="xsd:string"/>
<xsd:element name="newCountry" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="SubmitLIMBMessageResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:SubmitLIMBMessageErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="validationErrors" nillable="true" type="types:ArrayOfLIMBValidationErrorsEnum"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="LIMBValidationErrorsEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="INVALID_DOB"/>
<xsd:enumeration value="INVALID_ADDRESS_LINE1"/>
<xsd:enumeration value="INVALID_ADDRESS_LINE2"/>
<xsd:enumeration value="INVALID_ADDRESS_LINE3"/>
<xsd:enumeration value="INVALID_CITY"/>
<xsd:enumeration value="INVALID_COUNTY_STATE"/>
<xsd:enumeration value="INVALID_COUNTRY_OF_RESIDENCE"/>
<xsd:enumeration value="INVALID_POSTCODE"/>
<xsd:enumeration value="INVALID_HOME_PHONE"/>
<xsd:enumeration value="INVALID_WORK_PHONE"/>
<xsd:enumeration value="INVALID_MOBILE_PHONE"/>
<xsd:enumeration value="INVALID_EMAIL"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="RESERVED_PASSWORD"/>
<xsd:enumeration value="INVALID_NEW_PASSWORD"/>
<xsd:enumeration value="INVALID_TC_VERSION"/>
<xsd:enumeration value="INVALID_PRIVICY_VERSION"/>
<xsd:enumeration value="INVALID_CHATNAME"/>
<xsd:enumeration value="CHATNAME_ALREADY_TAKEN"/>
<xsd:enumeration value="INVALID_CARD_BILLING_ADDRESS_LINE_1"/>
<xsd:enumeration value="INVALID_CARD_BILLING_ADDRESS_LINE_2"/>
<xsd:enumeration value="INVALID_CARD_BILLING_ADDRESS_LINE_3"/>
<xsd:enumeration value="INVALID_CARD_BILLING_CITY"/>
<xsd:enumeration value="INVALID_CARD_BILLING_COUNTY_STATE"/>
<xsd:enumeration value="INVALID_CARD_BILLING_ZIP_CODE"/>
<xsd:enumeration value="INVALID_CARD_BILLING_COUNTRY_OF_RESIDENCE"/>
<xsd:enumeration value="NO_SUCH_PERSONAL_MESSAGE"/>
<xsd:enumeration value="NO_SUCH_TC_PRIVACY_POLICY_MESSAGE"/>
<xsd:enumeration value="NO_SUCH_PASSWORD_CHANGE_MESSAGE"/>
<xsd:enumeration value="NO_SUCH_BIRTH_DATE_CHECK_MESSAGE"/>
<xsd:enumeration value="NO_SUCH_ADDRESS_CHECK_MESSAGE"/>
<xsd:enumeration value="NO_SUCH_CONTACT_DETAILS_CHECK_MESSAGE"/>
<xsd:enumeration value="NO_SUCH_CHATNAME_CHENGE_MESSAGE"/>
<xsd:enumeration value="NO_SUCH_CARD_BILLING_ADDRESS_CHECK_MESSAGE"/>
<xsd:enumeration value="INVALID_PERSONAL_MESSAGE_ACKNOWLEDGMENT"/>
<xsd:enumeration value="INVALID_TC_PRIVACY_POLICY_MESSAGE_ACKNOWLEDGMENT"/>
<xsd:enumeration value="INVALID_BIRTH_DATE_CHECK_MESSAGE"/>
<xsd:enumeration value="INVALID_ADDRESS_CHECK_MESSAGE"/>
<xsd:enumeration value="INVALID_CONTACT_DETAILS_CHECK_MESSAGE"/>
<xsd:enumeration value="INVALID_CARD_BILLING_ADDRESS_CHECK_MESSAGE"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ArrayOfLIMBValidationErrorsEnum">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" minOccurs="0"
name="LIMBValidationErrorsEnum" nillable="true" type="types:LIMBValidationErrorsEnum"/>
</xsd:sequence>
</xsd:complexType>
<xsd:simpleType name="SubmitLIMBMessageErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="VALIDATION_ERRORS"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="LogoutErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK" />
<xsd:enumeration value="API_ERROR" />
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="LogoutResp">
<xsd:complexContent mixed="false">
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string" />
<xsd:element name="errorCode" type="types:LogoutErrorEnum" />
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="LogoutReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest" />
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="KeepAliveResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="apiVersion" nillable="true" type="xsd:string"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="KeepAliveReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest"/>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType abstract="true" name="APIRequest">
<xsd:sequence>
<xsd:element name="header" nillable="true" type="types:APIRequestHeader"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="APIRequestHeader">
<xsd:sequence>
<xsd:element name="clientStamp" type="xsd:long"/>
<xsd:element name="sessionToken" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="GetEventsResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:GetEventsErrorEnum"/>
<xsd:element name="eventItems" nillable="true" type="types:ArrayOfBFEvent"/>
<xsd:element name="eventParentId" nillable="false" type="xsd:int"/>
<xsd:element name="marketItems" nillable="true" type="types:ArrayOfMarketSummary"/>
<xsd:element name="couponLinks" nillable="true" type="types:ArrayOfCouponLinks"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="GetEventsErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="INVALID_EVENT_ID"/>
<xsd:enumeration value="NO_RESULTS"/>
<xsd:enumeration value="INVALID_LOCALE_DEFAULTING_TO_ENGLISH"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="BFEvent">
<xsd:sequence>
<xsd:element name="eventId" nillable="false" type="xsd:int"/>
<xsd:element name="eventName" nillable="true" type="xsd:string"/>
<xsd:element name="eventTypeId" nillable="false" type="xsd:int"/>
<xsd:element name="menuLevel" nillable="false" type="xsd:int"/>
<xsd:element name="orderIndex" nillable="false" type="xsd:int"/>
<xsd:element name="startTime" type="xsd:dateTime"/>
<xsd:element name="timezone" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="ArrayOfBFEvent">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" minOccurs="0"
name="BFEvent" nillable="true" type="types:BFEvent"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="MarketSummary">
<xsd:sequence>
<xsd:element name="eventTypeId" nillable="false" type="xsd:int"/>
<xsd:element name="marketId" nillable="false" type="xsd:int"/>
<xsd:element name="marketName" nillable="true" type="xsd:string"/>
<xsd:element name="marketType" type="types:MarketTypeEnum"/>
<xsd:element name="marketTypeVariant" type="types:MarketTypeVariantEnum"/>
<xsd:element name="menuLevel" nillable="false" type="xsd:int"/>
<xsd:element name="orderIndex" nillable="false" type="xsd:int"/>
<xsd:element name="startTime" type="xsd:dateTime"/>
<xsd:element name="timezone" nillable="true" type="xsd:string"/>
<xsd:element name="venue" nillable="true" type="xsd:string"/>
<xsd:element name="betDelay" nillable="false" type="xsd:int"/>
<xsd:element name="numberOfWinners" nillable="false" type="xsd:int"/>
<xsd:element name="eventParentId" nillable="false" type="xsd:int"/>
<xsd:element name="exchangeId" nillable="false" type="xsd:int"/>
</xsd:sequence>
</xsd:complexType>
<xsd:simpleType name="MarketTypeEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="O"/>
<xsd:enumeration value="L"/>
<xsd:enumeration value="R"/>
<xsd:enumeration value="A"/>
<xsd:enumeration value="NOT_APPLICABLE"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="MarketTypeVariantEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="D"/><!-- default -->
<xsd:enumeration value="ASL"/><!-- asian single line -->
<xsd:enumeration value="ADL"/><!-- asian double line -->
<xsd:enumeration value="COUP"/><!-- coupon -->
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ArrayOfMarketSummary">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" minOccurs="0"
name="MarketSummary" nillable="true" type="types:MarketSummary"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="CouponLink">
<xsd:sequence>
<xsd:element name="couponId" nillable="false" type="xsd:int"/>
<xsd:element name="couponName" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="ArrayOfCouponLinks">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" minOccurs="0"
name="CouponLink" nillable="true" type="types:CouponLink"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="GetEventsReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="eventParentId" nillable="false" type="xsd:int"/>
<xsd:element name="locale" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="GetEventTypesResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="eventTypeItems" nillable="true" type="types:ArrayOfEventType"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="errorCode" type="types:GetEventsErrorEnum"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="EventType">
<xsd:sequence>
<xsd:element name="id" nillable="false" type="xsd:int"/>
<xsd:element name="name" nillable="true" type="xsd:string"/>
<xsd:element name="nextMarketId" nillable="false" type="xsd:int"/>
<xsd:element name="exchangeId" nillable="false" type="xsd:int"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="ArrayOfEventType">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" minOccurs="0"
name="EventType" nillable="true" type="types:EventType"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="GetEventTypesReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="locale" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="MarketStatusEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="ACTIVE"/>
<xsd:enumeration value="INACTIVE"/>
<xsd:enumeration value="CLOSED"/>
<xsd:enumeration value="SUSPENDED"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="Runner">
<xsd:sequence>
<xsd:element name="asianLineId" nillable="false" type="xsd:int"/>
<xsd:element name="handicap" nillable="false" type="xsd:double"/>
<xsd:element name="name" nillable="true" type="xsd:string"/>
<xsd:element name="selectionId" nillable="false" type="xsd:int"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="GetSubscriptionInfoResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="subscriptions" nillable="true" type="types:ArrayOfSubscription"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="Subscription">
<xsd:sequence>
<xsd:element name="billingAmount" nillable="false" type="xsd:double"/>
<xsd:element name="billingDate" type="xsd:dateTime"/>
<xsd:element name="billingPeriod" type="types:BillingPeriodEnum"/>
<xsd:element name="productId" nillable="false" type="xsd:int"/>
<xsd:element name="productName" nillable="true" type="xsd:string"/>
<xsd:element name="services" nillable="true" type="types:ArrayOfServiceCall"/>
<xsd:element name="setupCharge" nillable="false" type="xsd:double"/>
<xsd:element name="setupChargeActive" nillable="false" type="xsd:boolean"/>
<xsd:element name="status" type="types:SubscriptionStatusEnum"/>
<xsd:element name="subscribedDate" type="xsd:dateTime"/>
<xsd:element name="vatEnabled" nillable="false" type="xsd:boolean"/>
</xsd:sequence>
</xsd:complexType>
<xsd:simpleType name="BillingPeriodEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="WEEKLY"/>
<xsd:enumeration value="MONTHLY"/>
<xsd:enumeration value="QUARTERLY"/>
<xsd:enumeration value="ANNUALLY"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ServiceCall">
<xsd:sequence>
<xsd:element name="maxUsages" nillable="false" type="xsd:int"/>
<xsd:element name="period" type="xsd:long"/>
<xsd:element name="periodExpiry" type="xsd:dateTime"/>
<xsd:element name="serviceType" type="types:ServiceEnum"/>
<xsd:element name="usageCount" nillable="false" type="xsd:int"/>
</xsd:sequence>
</xsd:complexType>
<xsd:simpleType name="ServiceEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="ADD_PAYMENT_CARD"/>
<xsd:enumeration value="DELETE_PAYMENT_CARD"/>
<xsd:enumeration value="GET_PAYMENT_CARD"/>
<xsd:enumeration value="UPDATE_PAYMENT_CARD"/>
<xsd:enumeration value="LOGIN"/>
<xsd:enumeration value="GET_BET"/>
<xsd:enumeration value="PLACE_BETS"/>
<xsd:enumeration value="WITHDRAW_TO_PAYMENT_CARD"/>
<xsd:enumeration value="EDIT_BETS"/>
<xsd:enumeration value="DEPOSIT_FROM_PAYMENT_CARD"/>
<xsd:enumeration value="CANCEL_BETS"/>
<xsd:enumeration value="DO_KEEP_ALIVE"/>
<xsd:enumeration value="GET_ACCOUNT_STATEMENT"/>
<xsd:enumeration value="LOAD_MARKET_PROFIT_LOSS"/>
<xsd:enumeration value="GET_CURRENT_BETS"/>
<xsd:enumeration value="LOAD_ACCOUNT_FUNDS"/>
<xsd:enumeration value="LOAD_BET_HISTORY"/>
<xsd:enumeration value="LOAD_DETAILED_AVAIL_MKT_DEPTH"/>
<xsd:enumeration value="GET_MARKET_TRADED_VOLUME"/>
<xsd:enumeration value="LOAD_EVENTS"/>
<xsd:enumeration value="LOAD_EVENT_TYPES"/>
<xsd:enumeration value="LOAD_MARKET"/>
<xsd:enumeration value="LOAD_MARKET_PRICES"/>
<xsd:enumeration value="LOAD_MARKET_PRICES_COMPRESSED"/>
<xsd:enumeration value="LOAD_SERVICE_ANNOUNCEMENTS"/>
<xsd:enumeration value="LOAD_SUBSCRIPTION_INFO"/>
<xsd:enumeration value="CREATE_ACCOUNT"/>
<xsd:enumeration value="CONVERT_CURRENCY"/>
<xsd:enumeration value="GET_CURRENCIES"/>
<xsd:enumeration value="FORGOT_PASSWORD"/>
<xsd:enumeration value="MODIFY_PASSWORD"/>
<xsd:enumeration value="VIEW_PROFILE"/>
<xsd:enumeration value="MODIFY_PROFILE"/>
<xsd:enumeration value="LOGOUT"/>
<xsd:enumeration value="RETRIEVE_LIMB_MESSAGE"/>
<xsd:enumeration value="SUBMIT_LIMB_MESSAGE"/>
<xsd:enumeration value="GET_MARGIN_MARKET_PRICES"/>
<xsd:enumeration value="GET_MARGIN_MARKET_PRICES_COMPRESSED"/>
<xsd:enumeration value="GENERATE_REGISTERED_MARGIN_PRICES"/>
<xsd:enumeration value="MARGINLOGIN"/>
<xsd:enumeration value="TRANSFER_FUNDS"/>
<xsd:enumeration value="ADD_VENDORSUBSCRIPTION"/>
<xsd:enumeration value="UPDATE_VENDORSUBSCRIPTION"/>
<xsd:enumeration value="CANCEL_VENDORSUBSCRIPTION"/>
<xsd:enumeration value="GET_VENDOR_USERS"/>
<xsd:enumeration value="GET_VENDORSUBSCRIPTION_INFO"/>
<xsd:enumeration value="GET_VENDOR_INFO"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ArrayOfServiceCall">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" minOccurs="0"
name="ServiceCall" nillable="true" type="types:ServiceCall"/>
</xsd:sequence>
</xsd:complexType>
<xsd:simpleType name="SubscriptionStatusEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="ACTIVE"/>
<xsd:enumeration value="INACTIVE"/>
<xsd:enumeration value="SUSPENDED"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ArrayOfSubscription">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" minOccurs="0"
name="Subscription" nillable="true" type="types:Subscription"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="GetSubscriptionInfoReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest"/>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="DepositFromPaymentCardResp">
<xsd:annotation>
<xsd:documentation>
Result of a DepositFromPaymentCardReq. If errorCode is set to CARD_AMOUNT_OUTSIDE_LIMIT then minAmount and maxAmount
will be set. If errorCode is set to DEPOSIT_LIMIT_EXCEEDED then maxAmount will be set.
</xsd:documentation>
</xsd:annotation>
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:PaymentsErrorEnum"/>
<xsd:element name="fee" nillable="false" type="xsd:double"/>
<xsd:element name="maxAmount" nillable="false" type="xsd:double"/>
<xsd:element name="minAmount" nillable="false" type="xsd:double"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="netAmount" nillable="false" type="xsd:double"/>
<xsd:element name="transactionId" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="PaymentsErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="ACCOUNT_SUSPENDED"/>
<xsd:enumeration value="API_ERROR"/>
<xsd:enumeration value="CARD_AMOUNT_OUTSIDE_LIMIT"/>
<xsd:enumeration value="CARD_EXPIRED"/>
<xsd:enumeration value="CARD_LOCKED"/>
<xsd:enumeration value="CARD_NOT_FOUND"/>
<xsd:enumeration value="DEPOSIT_DECLINED"/>
<xsd:enumeration value="DEPOSIT_LIMIT_EXCEEDED"/>
<xsd:enumeration value="EXCEEDS_BALANCE"/>
<xsd:enumeration value="CARD_NOT_VALIDATED"/>
<xsd:enumeration value="INVALID_AMOUNT"/>
<xsd:enumeration value="INVALID_CARD_CV2"/>
<xsd:enumeration value="INVALID_CARD_DETAILS"/>
<xsd:enumeration value="INVALID_EXPIRY_DATE"/>
<xsd:enumeration value="INVALID_MASTERCARD"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="CFT_MAX_WITHDRAWAL_LIMIT"/>
<xsd:enumeration value="NEGATIVE_NET_DEPOSITS"/>
<xsd:enumeration value="NON_STERLING_TO_UK_MASTERCARD"/>
<xsd:enumeration value="NON_ZERO_NON_NEG_NET_DEPOSITS"/>
<xsd:enumeration value="UNAUTHORIZED"/>
<xsd:enumeration value="VISA_WITHDRAWAL_NOT_POSSIBLE"/>
<xsd:enumeration value="DUPLICATE_WITHDRAWAL"/>
<!-- The following four values added for new withdrawByBankTransfer operation -->
<!-- Do not use unless for other operations unless they are also new. There -->
<!-- will be some clients that are not aware of these new enum values. -->
<xsd:enumeration value="DEPOSITS_NOT_CLEARED"/>
<xsd:enumeration value="INVALID_BANK_ACCOUNT_DETAILS_FIELD"/>
<xsd:enumeration value="EXPRESS_TRANSFER_NOT_AVAILABLE"/>
<xsd:enumeration value="UNSUPPORTED_COUNTRY_FOR_BANK_TRANSFER"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="DepositFromPaymentCardReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="amount" nillable="false" type="xsd:double"/>
<xsd:element name="cardIdentifier" nillable="true" type="xsd:string"/>
<xsd:element name="cv2" nillable="true" type="xsd:string"/>
<xsd:element name="password" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="AddPaymentCardReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="cardNumber" type="xsd:string"/>
<xsd:element name="cardType" type="types:CardTypeEnum"/>
<xsd:element name="startDate" nillable="true" type="xsd:string"/>
<xsd:element name="expiryDate" type="xsd:string"/>
<xsd:element name="issueNumber" nillable="true" type="xsd:string"/>
<xsd:element name="billingName" type="xsd:string"/>
<xsd:element name="nickName" type="xsd:string"/>
<xsd:element name="password" type="xsd:string"/>
<xsd:element name="address1" type="xsd:string"/>
<xsd:element name="address2" nillable="true" type="xsd:string"/>
<xsd:element name="address3" nillable="true" type="xsd:string"/>
<xsd:element name="address4" nillable="true" type="xsd:string"/>
<xsd:element name="town" nillable="true" type="xsd:string"/>
<xsd:element name="county" nillable="true" type="xsd:string"/>
<xsd:element name="zipCode" nillable="true" type="xsd:string"/>
<xsd:element name="country" nillable="true" type="xsd:string"/>
<xsd:element name="cardStatus" type="types:PaymentCardStatusEnum"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="DeletePaymentCardReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="nickName" type="xsd:string"/>
<xsd:element name="password" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="GetPaymentCardReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="UpdatePaymentCardReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="nickName" type="xsd:string"/>
<xsd:element name="password" type="xsd:string"/>
<xsd:element name="expiryDate" nillable="true" type="xsd:string"/>
<xsd:element name="startDate" nillable="true" type="xsd:string"/>
<xsd:element name="issueNumber" nillable="true" type="xsd:string"/>
<xsd:element name="address1" nillable="true" type="xsd:string"/>
<xsd:element name="address2" nillable="true" type="xsd:string"/>
<xsd:element name="address3" nillable="true" type="xsd:string"/>
<xsd:element name="address4" nillable="true" type="xsd:string"/>
<xsd:element name="town" nillable="true" type="xsd:string"/>
<xsd:element name="county" nillable="true" type="xsd:string"/>
<xsd:element name="zipCode" nillable="true" type="xsd:string"/>
<xsd:element name="country" nillable="true" type="xsd:string"/>
<xsd:element name="cardStatus" type="types:PaymentCardStatusEnum"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="CardTypeEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="VISA"/>
<xsd:enumeration value="MASTERCARD"/>
<xsd:enumeration value="VISADELTA"/>
<xsd:enumeration value="SWITCH"/>
<xsd:enumeration value="SOLO"/>
<xsd:enumeration value="ELECTRON"/>
<xsd:enumeration value="LASER"/>
<xsd:enumeration value="MAESTRO"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="AddPaymentCardResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:AddPaymentCardErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="paymentCard" type="types:PaymentCard"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="AddPaymentCardErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="INVALID_CARD_DETAILS"/>
<xsd:enumeration value="INVALID_CARD_CV2"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="ACCOUNT_INACTIVE"/>
<xsd:enumeration value="UNAUTHORIZED"/>
<xsd:enumeration value="INVALID_EXPIRY_DATE"/>
<xsd:enumeration value="INVALID_START_DATE"/>
<xsd:enumeration value="INVALID_CARD_NUMBER"/>
<xsd:enumeration value="INVALID_ZIP_CODE"/>
<xsd:enumeration value="INVALID_COUNTRY_CODE"/>
<xsd:enumeration value="INVALID_BILLING_NAME"/>
<xsd:enumeration value="INVALID_CARD_ADDRESS"/>
<xsd:enumeration value="CARD_ALREADY_EXISTS"/>
<xsd:enumeration value="AGE_VERIFICATION_REQUIRED"/>
<xsd:enumeration value="NOT_FUNDED_WITH_FIRST_CARD"/>
<xsd:enumeration value="CARD_NOT_VALID_FOR_ACCOUNT_CURRENCY"/>
<xsd:enumeration value="INVALID_CARD_TYPE"/>
<xsd:enumeration value="MAXIMUM_NUMBER_OF_CARDS_REACHED"/>
<xsd:enumeration value="INVALID_ISSUE_NUMBER"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="DeletePaymentCardErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="INVALID_CARD_DETAILS"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="ACCOUNT_INACTIVE"/>
<xsd:enumeration value="UNAUTHORIZED"/>
<xsd:enumeration value="CARD_NOT_DELETED"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="DeletePaymentCardResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:DeletePaymentCardErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="nickName" type="xsd:string"/>
<xsd:element name="billingName" type="xsd:string"/>
<xsd:element name="cardShortNumber" type="xsd:string"/>
<xsd:element name="cardType" type="types:CardTypeEnum"/>
<xsd:element name="issuingCountry" nillable="true" type="xsd:string"/>
<xsd:element name="expiryDate" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="UpdatePaymentCardResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:UpdatePaymentCardErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="nickName" type="xsd:string"/>
<xsd:element name="billingName" type="xsd:string"/>
<xsd:element name="cardType" type="types:CardTypeEnum"/>
<xsd:element name="expiryDate" type="xsd:string"/>
<xsd:element name="startDate" nillable="true" type="xsd:string"/>
<xsd:element name="address1" type="xsd:string"/>
<xsd:element name="address2" nillable="true" type="xsd:string"/>
<xsd:element name="address3" nillable="true" type="xsd:string"/>
<xsd:element name="address4" nillable="true" type="xsd:string"/>
<xsd:element name="zipCode" nillable="true" type="xsd:string"/>
<xsd:element name="country" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="UpdatePaymentCardErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="INVALID_CARD_DETAILS"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="ACCOUNT_INACTIVE"/>
<xsd:enumeration value="UNAUTHORIZED"/>
<xsd:enumeration value="INVALID_COUNTRY_CODE"/>
<xsd:enumeration value="INVALID_CARD_ADDRESS"/>
<xsd:enumeration value="INVALID_EXPIRY_DATE"/>
<xsd:enumeration value="INVALID_START_DATE"/>
<xsd:enumeration value="INVALID_ZIP_CODE"/>
<xsd:enumeration value="INVALID_ISSUE_NUMBER"/>
<xsd:enumeration value="API_ERROR"/>
<xsd:enumeration value="CARD_NOT_FOUND"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="GetPaymentCardResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:GetPaymentCardErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="paymentCardItems" nillable="true" type="types:ArrayOfPaymentCard"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="GetPaymentCardErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="ACCOUNT_INACTIVE"/>
<xsd:enumeration value="UNAUTHORIZED"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="PaymentCard">
<xsd:sequence>
<xsd:element name="nickName" type="xsd:string"/>
<xsd:element name="cardShortNumber" type="xsd:string"/>
<xsd:element name="expiryDate" type="xsd:string"/>
<xsd:element name="startDate" nillable="true" type="xsd:string"/>
<xsd:element name="issueNumber" nillable="true" type="xsd:string"/>
<xsd:element name="cardType" type="types:CardTypeEnum"/>
<xsd:element name="issuingCountryIso3" nillable="true" type="xsd:string"/>
<xsd:element name="totalDeposits" nillable="true" type="xsd:double"/>
<xsd:element name="totalWithdrawals" nillable="true" type="xsd:double"/>
<xsd:element name="netDeposits" nillable="true" type="xsd:double"/>
<xsd:element name="validationStatus" nillable="true" type="xsd:string"/>
<xsd:element name="billingName" type="xsd:string"/>
<xsd:element name="billingAddress1" nillable="true" type="xsd:string"/>
<xsd:element name="billingAddress2" nillable="true" type="xsd:string"/>
<xsd:element name="billingAddress3" nillable="true" type="xsd:string"/>
<xsd:element name="billingAddress4" nillable="true" type="xsd:string"/>
<xsd:element name="town" nillable="true" type="xsd:string"/>
<xsd:element name="county" nillable="true" type="xsd:string"/>
<xsd:element name="postcode" nillable="true" type="xsd:string"/>
<xsd:element name="billingCountryIso3" nillable="true" type="xsd:string"/>
<xsd:element name="cardStatus" type="types:PaymentCardStatusEnum"/>
</xsd:sequence>
</xsd:complexType>
<xsd:simpleType name="PaymentCardStatusEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="LOCKED"/>
<xsd:enumeration value="UNLOCKED"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ArrayOfPaymentCard">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" minOccurs="0"
name="PaymentCard" nillable="true" type="types:PaymentCard"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="WithdrawToPaymentCardResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="amountWithdrawn" nillable="false" type="xsd:double"/>
<xsd:element name="errorCode" type="types:PaymentsErrorEnum"/>
<xsd:element name="maxAmount" nillable="false" type="xsd:double"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="WithdrawToPaymentCardReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="amount" nillable="false" type="xsd:double"/>
<xsd:element name="cardIdentifier" nillable="true" type="xsd:string"/>
<xsd:element name="password" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="WithdrawByBankTransferReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="mode" nillable="false" type="types:WithdrawByBankTransferModeEnum"/>
<xsd:element name="amount" nillable="false" type="xsd:double"/>
<xsd:element name="bankAccountDetails" nillable="false"
type="types:BankAccountDetails"/>
<xsd:element name="expressTransfer" nillable="false" type="xsd:boolean"/>
<xsd:element name="password" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="WithdrawByBankTransferResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" nillable="false" type="types:PaymentsErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="amountWithdrawn" nillable="false" type="xsd:double"/>
<xsd:element name="minAmount" nillable="false" type="xsd:double"/>
<xsd:element name="maxAmount" nillable="false" type="xsd:double"/>
<xsd:element name="amountAvailable" nillable="true" type="xsd:double"/>
<xsd:element name="transferFee" nillable="true" type="xsd:double"/>
<xsd:element name="expressTransferFee" nillable="true" type="xsd:double"/>
<xsd:element name="expressTransferAvailable" nillable="true" type="xsd:boolean"/>
<xsd:element name="lastBankAccountDetails" nillable="true"
type="types:BankAccountDetails"/>
<xsd:element name="requiredBankAccountDetailsFields" nillable="true"
type="types:ArrayOfBankAccountDetailsField"/>
<xsd:element name="transactionId" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="WithdrawByBankTransferModeEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="VALIDATE"/>
<xsd:enumeration value="EXECUTE"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ArrayOfBankAccountDetailsField">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" name="BankAccountDetailsField"
nillable="true" type="types:BankAccountDetailsField"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="BankAccountDetailsField">
<xsd:complexContent>
<xsd:extension base="types:AbstractField">
<xsd:sequence>
<xsd:element name="type" nillable="false" type="types:BankAccountDetailsFieldEnum"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="BankAccountDetailsFieldEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="PAYEE"/>
<xsd:enumeration value="BANK_LOCATION_ISO3"/>
<xsd:enumeration value="BANK_NAME"/>
<xsd:enumeration value="ACCOUNT_HOLDING_BRANCH"/>
<xsd:enumeration value="ACCOUNT_NUMBER"/>
<xsd:enumeration value="ACCOUNT_TYPE"/>
<xsd:enumeration value="BANK_CODE"/>
<xsd:enumeration value="SORT_CODE"/>
<xsd:enumeration value="BANK_KEY"/>
<xsd:enumeration value="BRANCH_CODE"/>
<xsd:enumeration value="ROUTING"/>
<xsd:enumeration value="BANK_BSB"/>
<xsd:enumeration value="BLZ_CODE"/>
<xsd:enumeration value="ABI_CAB"/>
<xsd:enumeration value="BANK_GIRO_CREDIT_NUMBER"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="AbstractField">
<xsd:sequence>
<xsd:element name="required" nillable="false" type="xsd:boolean"/>
<xsd:element name="readOnly" nillable="false" type="xsd:boolean"/>
<xsd:element name="size" nillable="false" type="xsd:int"/>
<xsd:element name="minLength" nillable="false" type="xsd:int"/>
<xsd:element name="maxLength" nillable="false" type="xsd:int"/>
<xsd:element name="regExp" nillable="false" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="BasicBankAccountDetails">
<xsd:sequence>
<xsd:element name="bankName" nillable="true" type="xsd:string"/>
<xsd:element name="accountHoldingBranch" nillable="true" type="xsd:string"/>
<xsd:element name="bankGiroCreditNumber" nillable="true" type="xsd:string"/>
<xsd:element name="accountNumber" nillable="true" type="xsd:string"/>
<xsd:element name="sortCode" nillable="true" type="xsd:string"/>
<xsd:element name="bankCode" nillable="true" type="xsd:string"/>
<xsd:element name="blzCode" nillable="true" type="xsd:string"/>
<xsd:element name="bankBsb" nillable="true" type="xsd:string"/>
<xsd:element name="branchCode" nillable="true" type="xsd:string"/>
<xsd:element name="bankLocationIso3" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="BankAccountDetails">
<xsd:complexContent>
<xsd:extension base="types:BasicBankAccountDetails">
<xsd:sequence>
<xsd:element name="payee" nillable="true" type="xsd:string"/>
<xsd:element name="accountType" nillable="false" type="types:BankAccountTypeEnum"/>
<xsd:element name="bankKey" nillable="true" type="xsd:string"/>
<xsd:element name="routing" nillable="true" type="xsd:string"/>
<xsd:element name="abiCab" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="BankAccountTypeEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="NotSpecified"/>
<xsd:enumeration value="CH"/>
<xsd:enumeration value="SA"/>
<xsd:enumeration value="TR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="TransferFundsReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="sourceWalletId" nillable="false" type="xsd:int" />
<xsd:element name="targetWalletId" nillable="false" type="xsd:int" />
<xsd:element name="amount" nillable="false" type="xsd:double" />
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="TransferFundsResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" nillable="false" type="types:TransferFundsErrorEnum" />
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string" />
<xsd:element name="monthlyDepositTotal" nillable="true" type="xsd:double" />
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="TransferFundsErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK" />
<xsd:enumeration value="INVALID_AMOUNT" />
<xsd:enumeration value="TRANSFER_FAILED"/>
<xsd:enumeration value="OVER_BALANCE"/>
<xsd:enumeration value="WALLETS_MUST_BE_DIFFERENT"/>
<xsd:enumeration value="SOURCE_WALLET_UNKNOWN" />
<xsd:enumeration value="SOURCE_WALLET_SUSPENDED" />
<xsd:enumeration value="SOURCE_WALLET_SUSPENDED_KYC" />
<xsd:enumeration value="SOURCE_WALLET_KYC_WITHDRAWAL" />
<xsd:enumeration value="SOURCE_WALLET_KYC_DEPOSIT_TOTAL" />
<xsd:enumeration value="SOURCE_WALLET_KYC_DEPOSIT_MONTH" />
<xsd:enumeration value="TARGET_WALLET_UNKNOWN" />
<xsd:enumeration value="TARGET_WALLET_SUSPENDED" />
<xsd:enumeration value="TARGET_WALLET_SUSPENDED_KYC" />
<xsd:enumeration value="TARGET_WALLET_KYC_WITHDRAWAL" />
<xsd:enumeration value="TARGET_WALLET_KYC_DEPOSIT_TOTAL" />
<xsd:enumeration value="TARGET_WALLET_KYC_DEPOSIT_MONTH" />
<xsd:enumeration value="API_ERROR" />
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="SelfExcludeReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="selfExclude" nillable="false" type="xsd:boolean"/>
<xsd:element name="password" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="SelfExcludeResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string" />
<xsd:element name="errorCode" type="types:SelfExcludeErrorEnum"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="SelfExcludeErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="ACCOUNT_CLOSED"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="INVALID_SELF_EXCLUDE_VALUE"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ConvertCurrencyResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="convertedAmount" nillable="false" type="xsd:double"/>
<xsd:element name="errorCode" type="types:ConvertCurrencyErrorEnum"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="ConvertCurrencyErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="INVALID_AMOUNT"/>
<xsd:enumeration value="INVALID_FROM_CURRENCY"/>
<xsd:enumeration value="INVALID_TO_CURRENCY"/>
<xsd:enumeration value="CANNOT_CONVERT"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ConvertCurrencyReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="amount" nillable="false" type="xsd:double"/>
<xsd:element name="fromCurrency" nillable="true" type="xsd:string"/>
<xsd:element name="toCurrency" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="GetCurrenciesResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="currencyItems" nillable="true" type="types:ArrayOfCurrency"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="Currency">
<xsd:sequence>
<xsd:element name="currencyCode" nillable="true" type="xsd:string"/>
<xsd:element name="rateGBP" nillable="false" type="xsd:double"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="ArrayOfCurrency">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" minOccurs="0"
name="Currency" nillable="true" type="types:Currency"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="GetCurrenciesReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest"/>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="GetCurrenciesV2Resp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="currencyItems" nillable="true" type="types:ArrayOfCurrencyV2"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="CurrencyV2">
<xsd:complexContent>
<xsd:extension base="types:Currency">
<xsd:sequence>
<!-- Version 2 fields -->
<xsd:element name="minimumStake" nillable="true" type="xsd:double"/>
<xsd:element name="minimumRangeStake" nillable="true" type="xsd:double"/>
<xsd:element name="minimumBSPLayLiability" nillable="true" type="xsd:double"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="ArrayOfCurrencyV2">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" minOccurs="0"
name="CurrencyV2" nillable="true" type="types:CurrencyV2"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="GetCurrenciesV2Req">
<xsd:complexContent>
<xsd:extension base="types:APIRequest"/>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="ViewReferAndEarnReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest"/>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="ViewReferAndEarnResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="errorCode" type="types:ViewReferAndEarnErrorEnum"/>
<xsd:element name="referAndEarnCode" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="ViewReferAndEarnErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="NO_RESULTS"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ViewProfileReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest"/>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="ViewProfileResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="errorCode" type="types:ViewProfileErrorEnum"/>
<xsd:element name="title" type="types:TitleEnum"/>
<xsd:element name="firstName" nillable="true" type="xsd:string"/>
<xsd:element name="surname" nillable="true" type="xsd:string"/>
<xsd:element name="userName" nillable="true" type="xsd:string"/>
<xsd:element name="forumName" nillable="true" type="xsd:string"/>
<xsd:element name="address1" nillable="true" type="xsd:string"/>
<xsd:element name="address2" nillable="true" type="xsd:string"/>
<xsd:element name="address3" nillable="true" type="xsd:string"/>
<xsd:element name="townCity" nillable="true" type="xsd:string"/>
<xsd:element name="countyState" nillable="true" type="xsd:string"/>
<xsd:element name="postCode" nillable="true" type="xsd:string"/>
<xsd:element name="countryOfResidence" nillable="true" type="xsd:string"/>
<xsd:element name="homeTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="workTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="mobileTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="emailAddress" nillable="true" type="xsd:string"/>
<xsd:element name="timeZone" nillable="true" type="xsd:string"/>
<xsd:element name="currency" nillable="true" type="xsd:string"/>
<xsd:element name="gamcareLimit" nillable="true" type="xsd:int"/>
<xsd:element name="gamcareFrequency" type="types:GamcareLimitFreqEnum"/>
<xsd:element name="gamcareLossLimit" nillable="true" type="xsd:int"/>
<xsd:element name="gamcareLossLimitFrequency" type="types:GamcareLimitFreqEnum"/>
<xsd:element name="gamcareUpdateDate" type="xsd:dateTime"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="ViewProfileV2ReqVersionEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="V1"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ViewProfileV2Req">
<xsd:complexContent>
<xsd:extension base='types:APIRequest'>
<xsd:sequence>
<xsd:element name='requestVersion' nillable='true' type='types:ViewProfileV2ReqVersionEnum'/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="ViewProfileV2Resp">
<xsd:complexContent>
<xsd:extension base="types:ViewProfileResp">
<xsd:sequence>
<!-- Version 2 Fields -->
<xsd:element name="tAN" nillable="true" type="xsd:string"/>
<xsd:element name="referAndEarnCode" nillable="true" type="xsd:string"/>
<xsd:element name="earthportID" nillable="true" type="xsd:string"/>
<xsd:element name="kYCStatus" type="types:KYCStatusEnum"/>
<xsd:element name='nationalIdentifier' minOccurs='0' type='xsd:string'/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="ViewProfileErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="UNAUTHORIZED"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ModifyProfileReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="password" nillable="false" type="xsd:string"/>
<xsd:element name="address1" nillable="true" type="xsd:string"/>
<xsd:element name="address2" nillable="true" type="xsd:string"/>
<xsd:element name="address3" nillable="true" type="xsd:string"/>
<xsd:element name="townCity" nillable="true" type="xsd:string"/>
<xsd:element name="countyState" nillable="true" type="xsd:string"/>
<xsd:element name="postCode" nillable="true" type="xsd:string"/>
<xsd:element name="countryOfResidence" nillable="true" type="xsd:string"/>
<xsd:element name="homeTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="workTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="mobileTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="emailAddress" nillable="true" type="xsd:string"/>
<xsd:element name="timeZone" nillable="true" type="xsd:string"/>
<xsd:element name="depositLimit" nillable="true" type="xsd:int"/>
<xsd:element name="depositLimitFrequency" nillable='true' type="types:GamcareLimitFreqEnum"/>
<xsd:element name="lossLimit" nillable="true" type="xsd:int"/>
<xsd:element name="lossLimitFrequency" nillable='true' type="types:GamcareLimitFreqEnum"/>
<xsd:element name='nationalIdentifier' nillable='true' type='xsd:string'/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="ModifyProfileResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:ModifyProfileErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="validationErrors"
nillable="true" type="types:ArrayOfValidationErrorsEnum"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="ModifyProfileErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="VALIDATION_ERRORS"/>
<xsd:enumeration value="PROFILE_MODIFICATION_ERROR"/>
<xsd:enumeration value="UNAUTHORIZED"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="ACCOUNT_INACTIVE"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="CreateAccountResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="accountId" nillable="false" type="xsd:int"/>
<xsd:element name="accountStatus" type="types:AccountStatusEnum"/>
<xsd:element name="errorCode" type="types:CreateAccountErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="tan" nillable="true" type="xsd:string"/>
<xsd:element name="userId" nillable="false" type="xsd:int"/>
<xsd:element name="validationErrors"
nillable="true" type="types:ArrayOfValidationErrorsEnum"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="AccountStatusEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="A"/>
<xsd:enumeration value="C"/>
<xsd:enumeration value="D"/>
<xsd:enumeration value="L"/>
<xsd:enumeration value="P"/>
<xsd:enumeration value="S"/>
<xsd:enumeration value="T"/>
<xsd:enumeration value="X"/>
<xsd:enumeration value="Z"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="CreateAccountErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="VALIDATION_ERRORS"/>
<xsd:enumeration value="ACCOUNT_CREATION_ERROR"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="ValidationErrorsEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="DUPLICATE_USERNAME"/>
<xsd:enumeration value="FUNDS_TRANSFER_CANCEL"/>
<xsd:enumeration value="FUNDS_TRANSFER_CURRENCY_MISMATCH"/>
<xsd:enumeration value="INCOMPLETE_DETAILS"/>
<xsd:enumeration value="INSUFFICIENT_FUNDS"/>
<xsd:enumeration value="INVALID_ACCOUNT_TYPE"/>
<xsd:enumeration value="INVALID_ADDRESS_LINE1"/>
<xsd:enumeration value="INVALID_ADDRESS_LINE2"/>
<xsd:enumeration value="INVALID_ADDRESS_LINE3"/>
<xsd:enumeration value="INVALID_ANSWER1"/>
<xsd:enumeration value="INVALID_ANSWER2"/>
<xsd:enumeration value="INVALID_BROWSER"/>
<xsd:enumeration value="INVALID_CITY"/>
<xsd:enumeration value="INVALID_COUNTRY_OF_RESIDENCE"/>
<xsd:enumeration value="INVALID_COUNTY_STATE"/>
<xsd:enumeration value="INVALID_CURRENCY"/>
<xsd:enumeration value="INVALID_DEPOSIT_LIMIT"/>
<xsd:enumeration value="INVALID_DEPOSIT_LIMIT_FREQUENCY"/>
<xsd:enumeration value="INVALID_DETAILS"/>
<xsd:enumeration value="INVALID_DOB"/>
<xsd:enumeration value="INVALID_EMAIL"/>
<xsd:enumeration value="INVALID_FIRSTNAME"/>
<xsd:enumeration value="INVALID_GENDER"/>
<xsd:enumeration value="INVALID_HOME_PHONE"/>
<xsd:enumeration value="INVALID_IP_ADDRESS"/>
<xsd:enumeration value="INVALID_LANGUAGE"/>
<xsd:enumeration value="INVALID_LOCALE"/>
<xsd:enumeration value="INVALID_LOSS_LIMIT"/>
<xsd:enumeration value="INVALID_LOSS_LIMIT_FREQUENCY"/>
<xsd:enumeration value="INVALID_MASTER_ID"/>
<xsd:enumeration value="INVALID_MOBILE_PHONE"/>
<xsd:enumeration value="INVALID_PARTNERID"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="INVALID_POSTCODE"/>
<xsd:enumeration value="INVALID_PRIVICY_VERSION"/>
<xsd:enumeration value="INVALID_PRODUCT_ID"/>
<xsd:enumeration value="INVALID_REFERRER_CODE"/>
<xsd:enumeration value="INVALID_REGION"/>
<xsd:enumeration value="INVALID_SECURITY_QUESTION1"/>
<xsd:enumeration value="INVALID_SECURITY_QUESTION2"/>
<xsd:enumeration value="INVALID_SUBPARTNERID"/>
<xsd:enumeration value="INVALID_SUPERPARTNERID"/>
<xsd:enumeration value="INVALID_SURNAME"/>
<xsd:enumeration value="INVALID_TC_VERSION"/>
<xsd:enumeration value="INVALID_TIMEZONE"/>
<xsd:enumeration value="INVALID_TITLE"/>
<xsd:enumeration value="INVALID_USERNAME"/>
<xsd:enumeration value="INVALID_WORK_PHONE"/>
<xsd:enumeration value="RESERVED_PASSWORD"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ArrayOfValidationErrorsEnum">
<xsd:sequence>
<xsd:element form="qualified" maxOccurs="unbounded" minOccurs="0"
name="ValidationErrorsEnum" nillable="true" type="types:ValidationErrorsEnum"/>
</xsd:sequence>
</xsd:complexType>
<xsd:complexType name="CreateAccountReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="acceptedPrivicyPolicyVersion" nillable="false" type="xsd:int"/>
<xsd:element name="acceptedTermsAndConditionsVersion" nillable="false" type="xsd:int"/>
<xsd:element name="accountType" nillable="false" type="types:AccountTypeEnum"/>
<xsd:element name="address1" nillable="false" type="xsd:string"/>
<xsd:element name="address2" nillable="true" type="xsd:string"/>
<xsd:element name="address3" nillable="true" type="xsd:string"/>
<xsd:element name="answer1" nillable="false" type="xsd:string"/>
<xsd:element name="answer2" nillable="false" type="xsd:string"/>
<xsd:element name="browser" nillable="true" type="xsd:string"/>
<xsd:element name="countryOfResidence" nillable="true" type="xsd:string"/>
<xsd:element name="countyState" nillable="true" type="xsd:string"/>
<xsd:element name="currency" nillable="true" type="xsd:string"/>
<xsd:element name="dateOfBirth" nillable="false" type="xsd:dateTime"/>
<xsd:element name="depositLimit" nillable="false" type="xsd:double"/>
<xsd:element name="depositLimitFrequency" nillable="false" type="types:GamcareLimitFreqEnum"/>
<xsd:element name="emailAddress" nillable="false" type="xsd:string"/>
<xsd:element name="firstName" nillable="false" type="xsd:string"/>
<xsd:element name="gender" nillable="false" type="types:GenderEnum"/>
<xsd:element name="homeTelephone" nillable="false" type="xsd:string"/>
<xsd:element name="informProductsServices" nillable="false" type="xsd:boolean"/>
<xsd:element name="informSpecialOffers" nillable="false" type="xsd:boolean"/>
<xsd:element name="ipAddress" nillable="false" type="xsd:string"/>
<xsd:element name="locale" nillable="true" type="xsd:string"/>
<xsd:element name="lossLimit" nillable="false" type="xsd:double"/>
<xsd:element name="lossLimitFrequency" nillable="false" type="types:GamcareLimitFreqEnum"/>
<xsd:element name="manualAddress" nillable="false" type="xsd:boolean"/>
<xsd:element name="mobileTelephone" nillable="false" type="xsd:string"/>
<xsd:element name="partnerId" nillable="false" type="xsd:int"/>
<xsd:element name="password" nillable="true" type="xsd:string"/>
<xsd:element name="postCode" nillable="true" type="xsd:string"/>
<xsd:element name="preferredName" nillable="true" type="xsd:string"/>
<xsd:element name="productId" nillable="false" type="xsd:int"/>
<xsd:element name="question1" nillable="false" type="types:SecurityQuestion1Enum"/>
<xsd:element name="question2" nillable="false" type="types:SecurityQuestion2Enum"/>
<xsd:element name="referrerCode" nillable="true" type="xsd:string"/>
<xsd:element name="region" type="types:RegionEnum"/>
<xsd:element name="subPartnerId" nillable="false" type="xsd:int"/>
<xsd:element name="superPartnerId" nillable="false" type="xsd:int"/>
<xsd:element name="surname" nillable="false" type="xsd:string"/>
<xsd:element name="timeZone" nillable="true" type="xsd:string"/>
<xsd:element name="title" nillable="false" type="types:TitleEnum"/>
<xsd:element name="townCity" nillable="false" type="xsd:string"/>
<xsd:element name="username" nillable="true" type="xsd:string"/>
<xsd:element name="workTelephone" nillable="true" type="xsd:string"/>
<xsd:element name="nationalIdentifier" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:simpleType name="AccountTypeEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="STANDARD"/>
<xsd:enumeration value="MARGIN"/>
<xsd:enumeration value="TRADING"/>
<xsd:enumeration value="AGENT_CLIENT"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="GamcareLimitFreqEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="DAILY"/>
<xsd:enumeration value="WEEKLY"/>
<xsd:enumeration value="MONTHLY"/>
<xsd:enumeration value="YEARLY"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="GenderEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="M"/>
<xsd:enumeration value="F"/>
<xsd:enumeration value="UNKNOWN"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="SecurityQuestion1Enum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="SQ1A"/>
<xsd:enumeration value="SQ1B"/>
<xsd:enumeration value="SQ1C"/>
<xsd:enumeration value="SQ1D"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="SecurityQuestion2Enum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="SQ2A"/>
<xsd:enumeration value="SQ2B"/>
<xsd:enumeration value="SQ2C"/>
<xsd:enumeration value="SQ2D"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="RegionEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="ZAF"/>
<xsd:enumeration value="NA"/>
<xsd:enumeration value="NORD"/>
<xsd:enumeration value="GBR"/>
<xsd:enumeration value="IRL"/>
<xsd:enumeration value="AUS_NZL"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="TitleEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="Dr"/>
<xsd:enumeration value="Mr"/>
<xsd:enumeration value="Miss"/>
<xsd:enumeration value="Mrs"/>
<xsd:enumeration value="Ms"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="KYCStatusEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="DEFAULT"/>
<xsd:enumeration value="AGE_VERIFIED"/>
<xsd:enumeration value="KYC"/>
<xsd:enumeration value="KYC_NON_AUS"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="ForgotPasswordErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="INVALID_USERNAME"/>
<xsd:enumeration value="INVALID_COUNTRY_OF_RESIDENCE"/>
<xsd:enumeration value="INVALID_EMAIL"/>
<xsd:enumeration value="INVALID_ANSWER"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="TOO_MANY_ATTEMPTS_ACCOUNT_SUSPENDED"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="ModifyPasswordErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="INVALID_NEW_PASSWORD"/>
<xsd:enumeration value="PASSWORDS_DONT_MATCH"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:simpleType name="SetChatNameErrorEnum">
<xsd:restriction base="xsd:string">
<xsd:enumeration value="OK"/>
<xsd:enumeration value="INVALID_PASSWORD"/>
<xsd:enumeration value="ACCOUNT_SUSPENDED"/>
<xsd:enumeration value="ACCOUNT_NOT_FUNDED"/>
<xsd:enumeration value="CHAT_NAME_UNAVAILABLE"/>
<xsd:enumeration value="CANNOT_CHANGE_CHAT_NAME"/>
<xsd:enumeration value="API_ERROR"/>
</xsd:restriction>
</xsd:simpleType>
<xsd:complexType name="ForgotPasswordReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="username" type="xsd:string" nillable="false"/>
<xsd:element name="emailAddress" type="xsd:string" nillable="false"/>
<xsd:element name="countryOfResidence" type="xsd:string" nillable="false"/>
<xsd:element name="forgottenPasswordAnswer1" type="xsd:string" nillable="true"/>
<xsd:element name="forgottenPasswordAnswer2" type="xsd:string" nillable="true"/>
<xsd:element name="newPassword" type="xsd:string" nillable="true"/>
<xsd:element name="newPasswordRepeat" type="xsd:string" nillable="true"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="ForgotPasswordResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:ForgotPasswordErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
<xsd:element name="question1" type="xsd:string" nillable="true"/>
<xsd:element name="question2" type="xsd:string" nillable="true"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="ModifyPasswordReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="password" type="xsd:string" nillable="false"/>
<xsd:element name="newPassword" type="xsd:string" nillable="false"/>
<xsd:element name="newPasswordRepeat" type="xsd:string" nillable="false"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="ModifyPasswordResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:ModifyPasswordErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="SetChatNameReq">
<xsd:complexContent>
<xsd:extension base="types:APIRequest">
<xsd:sequence>
<xsd:element name="password" type="xsd:string" nillable="false"/>
<xsd:element name="chatName" type="xsd:string" nillable="false"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
<xsd:complexType name="SetChatNameResp">
<xsd:complexContent>
<xsd:extension base="types:APIResponse">
<xsd:sequence>
<xsd:element name="errorCode" type="types:SetChatNameErrorEnum"/>
<xsd:element name="minorErrorCode" nillable="true" type="xsd:string"/>
</xsd:sequence>
</xsd:extension>
</xsd:complexContent>
</xsd:complexType>
</xsd:schema>
<xsd:schema elementFormDefault="qualified" targetNamespace="http://www.betfair.com/publicapi/v3/BFGlobalService/">
<xsd:import namespace="http://www.betfair.com/publicapi/types/global/v3/"/>
<xsd:element name="login">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:LoginReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="loginResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:LoginResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="retrieveLIMBMessage">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:RetrieveLIMBMessageReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="retrieveLIMBMessageResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:RetrieveLIMBMessageResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="submitLIMBMessage">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:SubmitLIMBMessageReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="submitLIMBMessageResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:SubmitLIMBMessageResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="logout">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:LogoutReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="logoutResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="false" type="types:LogoutResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="keepAlive">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:KeepAliveReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="keepAliveResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:KeepAliveResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getEvents">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:GetEventsReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getEventsResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:GetEventsResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getActiveEventTypes">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:GetEventTypesReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getActiveEventTypesResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:GetEventTypesResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getAllEventTypes">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:GetEventTypesReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getAllEventTypesResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:GetEventTypesResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getSubscriptionInfo">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:GetSubscriptionInfoReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getSubscriptionInfoResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:GetSubscriptionInfoResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="depositFromPaymentCard">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:DepositFromPaymentCardReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="depositFromPaymentCardResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:DepositFromPaymentCardResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="addPaymentCard">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:AddPaymentCardReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="addPaymentCardResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:AddPaymentCardResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="deletePaymentCard">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:DeletePaymentCardReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="deletePaymentCardResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:DeletePaymentCardResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="updatePaymentCard">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:UpdatePaymentCardReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="updatePaymentCardResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:UpdatePaymentCardResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getPaymentCard">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:GetPaymentCardReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getPaymentCardResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:GetPaymentCardResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="withdrawToPaymentCard">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:WithdrawToPaymentCardReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="withdrawToPaymentCardResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:WithdrawToPaymentCardResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="selfExclude">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:SelfExcludeReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="selfExcludeResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:SelfExcludeResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="convertCurrency">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:ConvertCurrencyReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="convertCurrencyResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:ConvertCurrencyResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getAllCurrencies">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:GetCurrenciesReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getAllCurrenciesResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:GetCurrenciesResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getAllCurrenciesV2">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:GetCurrenciesV2Req"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="getAllCurrenciesV2Response">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:GetCurrenciesV2Resp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="viewReferAndEarn">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:ViewReferAndEarnReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="viewReferAndEarnResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:ViewReferAndEarnResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="viewProfile">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:ViewProfileReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="viewProfileResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:ViewProfileResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="withdrawByBankTransfer">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:WithdrawByBankTransferReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="withdrawByBankTransferResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:WithdrawByBankTransferResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="viewProfileV2">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:ViewProfileV2Req"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="viewProfileV2Response">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:ViewProfileV2Resp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="modifyProfile">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:ModifyProfileReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="modifyProfileResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:ModifyProfileResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="createAccount">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:CreateAccountReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="createAccountResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:CreateAccountResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="forgotPassword">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:ForgotPasswordReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="forgotPasswordResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:ForgotPasswordResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="modifyPassword">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:ModifyPasswordReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="modifyPasswordResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:ModifyPasswordResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="setChatName">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:SetChatNameReq"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="setChatNameResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:SetChatNameResp"/>
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="transferFunds">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="request" type="types:TransferFundsReq" />
</xsd:sequence>
</xsd:complexType>
</xsd:element>
<xsd:element name="transferFundsResponse">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="Result" nillable="true" type="types:TransferFundsResp" />
</xsd:sequence>
</xsd:complexType>
</xsd:element>
</xsd:schema>
</wsdl:types>
<wsdl:message name="loginIn">
<wsdl:part element="tns:login" name="parameters"/>
</wsdl:message>
<wsdl:message name="loginOut">
<wsdl:part element="tns:loginResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="retrieveLIMBMessageIn">
<wsdl:part element="tns:retrieveLIMBMessage" name="parameters"/>
</wsdl:message>
<wsdl:message name="retrieveLIMBMessageOut">
<wsdl:part element="tns:retrieveLIMBMessageResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="submitLIMBMessageIn">
<wsdl:part element="tns:submitLIMBMessage" name="parameters"/>
</wsdl:message>
<wsdl:message name="submitLIMBMessageOut">
<wsdl:part element="tns:submitLIMBMessageResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="logoutIn">
<wsdl:part element="tns:logout" name="parameters"/>
</wsdl:message>
<wsdl:message name="logoutOut">
<wsdl:part element="tns:logoutResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="keepAliveIn">
<wsdl:part element="tns:keepAlive" name="parameters"/>
</wsdl:message>
<wsdl:message name="keepAliveOut">
<wsdl:part element="tns:keepAliveResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="getEventsIn">
<wsdl:part element="tns:getEvents" name="parameters"/>
</wsdl:message>
<wsdl:message name="getEventsOut">
<wsdl:part element="tns:getEventsResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="getActiveEventTypesIn">
<wsdl:part element="tns:getActiveEventTypes" name="parameters"/>
</wsdl:message>
<wsdl:message name="getActiveEventTypesOut">
<wsdl:part element="tns:getActiveEventTypesResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="getAllEventTypesIn">
<wsdl:part element="tns:getAllEventTypes" name="parameters"/>
</wsdl:message>
<wsdl:message name="getAllEventTypesOut">
<wsdl:part element="tns:getAllEventTypesResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="getSubscriptionInfoIn">
<wsdl:part element="tns:getSubscriptionInfo" name="parameters"/>
</wsdl:message>
<wsdl:message name="getSubscriptionInfoOut">
<wsdl:part element="tns:getSubscriptionInfoResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="depositFromPaymentCardIn">
<wsdl:part element="tns:depositFromPaymentCard" name="parameters"/>
</wsdl:message>
<wsdl:message name="depositFromPaymentCardOut">
<wsdl:part element="tns:depositFromPaymentCardResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="addPaymentCardIn">
<wsdl:part element="tns:addPaymentCard" name="parameters"/>
</wsdl:message>
<wsdl:message name="addPaymentCardOut">
<wsdl:part element="tns:addPaymentCardResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="deletePaymentCardIn">
<wsdl:part element="tns:deletePaymentCard" name="parameters"/>
</wsdl:message>
<wsdl:message name="deletePaymentCardOut">
<wsdl:part element="tns:deletePaymentCardResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="updatePaymentCardIn">
<wsdl:part element="tns:updatePaymentCard" name="parameters"/>
</wsdl:message>
<wsdl:message name="updatePaymentCardOut">
<wsdl:part element="tns:updatePaymentCardResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="getPaymentCardIn">
<wsdl:part element="tns:getPaymentCard" name="parameters"/>
</wsdl:message>
<wsdl:message name="getPaymentCardOut">
<wsdl:part element="tns:getPaymentCardResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="withdrawToPaymentCardIn">
<wsdl:part element="tns:withdrawToPaymentCard" name="parameters"/>
</wsdl:message>
<wsdl:message name="withdrawToPaymentCardOut">
<wsdl:part element="tns:withdrawToPaymentCardResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="selfExcludeIn">
<wsdl:part element="tns:selfExclude" name="parameters"/>
</wsdl:message>
<wsdl:message name="selfExcludeOut">
<wsdl:part element="tns:selfExcludeResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="convertCurrencyIn">
<wsdl:part element="tns:convertCurrency" name="parameters"/>
</wsdl:message>
<wsdl:message name="convertCurrencyOut">
<wsdl:part element="tns:convertCurrencyResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="getAllCurrenciesIn">
<wsdl:part element="tns:getAllCurrencies" name="parameters"/>
</wsdl:message>
<wsdl:message name="getAllCurrenciesOut">
<wsdl:part element="tns:getAllCurrenciesResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="getAllCurrenciesV2In">
<wsdl:part element="tns:getAllCurrenciesV2" name="parameters"/>
</wsdl:message>
<wsdl:message name="getAllCurrenciesV2Out">
<wsdl:part element="tns:getAllCurrenciesV2Response" name="parameters"/>
</wsdl:message>
<wsdl:message name="viewReferAndEarnIn">
<wsdl:part element="tns:viewReferAndEarn" name="parameters"/>
</wsdl:message>
<wsdl:message name="viewReferAndEarnOut">
<wsdl:part element="tns:viewReferAndEarnResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="viewProfileIn">
<wsdl:part element="tns:viewProfile" name="parameters"/>
</wsdl:message>
<wsdl:message name="viewProfileOut">
<wsdl:part element="tns:viewProfileResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="viewProfileV2In">
<wsdl:part element="tns:viewProfileV2" name="parameters"/>
</wsdl:message>
<wsdl:message name="viewProfileV2Out">
<wsdl:part element="tns:viewProfileV2Response" name="parameters"/>
</wsdl:message>
<wsdl:message name="modifyProfileIn">
<wsdl:part element="tns:modifyProfile" name="parameters"/>
</wsdl:message>
<wsdl:message name="modifyProfileOut">
<wsdl:part element="tns:modifyProfileResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="createAccountIn">
<wsdl:part element="tns:createAccount" name="parameters"/>
</wsdl:message>
<wsdl:message name="createAccountOut">
<wsdl:part element="tns:createAccountResponse" name="parameters"/>
</wsdl:message>
<wsdl:message name="forgotPasswordIn">
<wsdl:part name="parameters" element="tns:forgotPassword"/>
</wsdl:message>
<wsdl:message name="forgotPasswordOut">
<wsdl:part name="parameters" element="tns:forgotPasswordResponse"/>
</wsdl:message>
<wsdl:message name="modifyPasswordIn">
<wsdl:part name="parameters" element="tns:modifyPassword"/>
</wsdl:message>
<wsdl:message name="modifyPasswordOut">
<wsdl:part name="parameters" element="tns:modifyPasswordResponse"/>
</wsdl:message>
<wsdl:message name="withdrawByBankTransferIn">
<wsdl:part name="parameters" element="tns:withdrawByBankTransfer"/>
</wsdl:message>
<wsdl:message name="withdrawByBankTransferOut">
<wsdl:part name="parameters" element="tns:withdrawByBankTransferResponse"/>
</wsdl:message>
<wsdl:message name="setChatNameIn">
<wsdl:part name="parameters" element="tns:setChatName"/>
</wsdl:message>
<wsdl:message name="setChatNameOut">
<wsdl:part name="parameters" element="tns:setChatNameResponse"/>
</wsdl:message>
<wsdl:message name="transferFundsIn">
<wsdl:part name="parameters" element="tns:transferFunds"/>
</wsdl:message>
<wsdl:message name="transferFundsOut">
<wsdl:part name="parameters" element="tns:transferFundsResponse"/>
</wsdl:message>
<wsdl:portType name="BFGlobalService">
<wsdl:operation name="login">
<wsdl:input message="tns:loginIn" name="loginIn"/>
<wsdl:output message="tns:loginOut" name="loginOut"/>
</wsdl:operation>
<wsdl:operation name="retrieveLIMBMessage">
<wsdl:input message="tns:retrieveLIMBMessageIn" name="retrieveLIMBMessageIn"/>
<wsdl:output message="tns:retrieveLIMBMessageOut" name="retrieveLIMBMessageOut"/>
</wsdl:operation>
<wsdl:operation name="submitLIMBMessage">
<wsdl:input message="tns:submitLIMBMessageIn" name="submitLIMBMessageIn"/>
<wsdl:output message="tns:submitLIMBMessageOut" name="submitLIMBMessageOut"/>
</wsdl:operation>
<wsdl:operation name="logout">
<wsdl:input message="tns:logoutIn" name="logoutIn"/>
<wsdl:output message="tns:logoutOut" name="logoutOut"/>
</wsdl:operation>
<wsdl:operation name="keepAlive">
<wsdl:input message="tns:keepAliveIn" name="keepAliveIn"/>
<wsdl:output message="tns:keepAliveOut" name="keepAliveOut"/>
</wsdl:operation>
<wsdl:operation name="getEvents">
<wsdl:input message="tns:getEventsIn" name="getEventsIn"/>
<wsdl:output message="tns:getEventsOut" name="getEventsOut"/>
</wsdl:operation>
<wsdl:operation name="getActiveEventTypes">
<wsdl:input message="tns:getActiveEventTypesIn" name="getActiveEventTypesIn"/>
<wsdl:output message="tns:getActiveEventTypesOut" name="getActiveEventTypesOut"/>
</wsdl:operation>
<wsdl:operation name="getAllEventTypes">
<wsdl:input message="tns:getAllEventTypesIn" name="getAllEventTypesIn"/>
<wsdl:output message="tns:getAllEventTypesOut" name="getAllEventTypesOut"/>
</wsdl:operation>
<wsdl:operation name="getSubscriptionInfo">
<wsdl:input message="tns:getSubscriptionInfoIn" name="getSubscriptionInfoIn"/>
<wsdl:output message="tns:getSubscriptionInfoOut" name="getSubscriptionInfoOut"/>
</wsdl:operation>
<wsdl:operation name="depositFromPaymentCard">
<wsdl:input message="tns:depositFromPaymentCardIn" name="depositFromPaymentCardIn"/>
<wsdl:output message="tns:depositFromPaymentCardOut" name="depositFromPaymentCardOut"/>
</wsdl:operation>
<wsdl:operation name="addPaymentCard">
<wsdl:input message="tns:addPaymentCardIn" name="addPaymentCardIn"/>
<wsdl:output message="tns:addPaymentCardOut" name="addPaymentCardOut"/>
</wsdl:operation>
<wsdl:operation name="deletePaymentCard">
<wsdl:input message="tns:deletePaymentCardIn" name="deletePaymentCardIn"/>
<wsdl:output message="tns:deletePaymentCardOut" name="deletePaymentCardOut"/>
</wsdl:operation>
<wsdl:operation name="updatePaymentCard">
<wsdl:input message="tns:updatePaymentCardIn" name="updatePaymentCardIn"/>
<wsdl:output message="tns:updatePaymentCardOut" name="updatePaymentCardOut"/>
</wsdl:operation>
<wsdl:operation name="getPaymentCard">
<wsdl:input message="tns:getPaymentCardIn" name="getPaymentCardIn"/>
<wsdl:output message="tns:getPaymentCardOut" name="getPaymentCardOut"/>
</wsdl:operation>
<wsdl:operation name="withdrawToPaymentCard">
<wsdl:input message="tns:withdrawToPaymentCardIn" name="withdrawToPaymentCardIn"/>
<wsdl:output message="tns:withdrawToPaymentCardOut" name="withdrawToPaymentCardOut"/>
</wsdl:operation>
<wsdl:operation name="selfExclude">
<wsdl:input message="tns:selfExcludeIn" name="selfExcludeIn"/>
<wsdl:output message="tns:selfExcludeOut" name="selfExcludeOut"/>
</wsdl:operation>
<wsdl:operation name="convertCurrency">
<wsdl:input message="tns:convertCurrencyIn" name="convertCurrencyIn"/>
<wsdl:output message="tns:convertCurrencyOut" name="convertCurrencyOut"/>
</wsdl:operation>
<wsdl:operation name="getAllCurrencies">
<wsdl:input message="tns:getAllCurrenciesIn" name="getAllCurrenciesIn"/>
<wsdl:output message="tns:getAllCurrenciesOut" name="getAllCurrenciesOut"/>
</wsdl:operation>
<wsdl:operation name="getAllCurrenciesV2">
<wsdl:input message="tns:getAllCurrenciesV2In" name="getAllCurrenciesV2In"/>
<wsdl:output message="tns:getAllCurrenciesV2Out" name="getAllCurrenciesV2Out"/>
</wsdl:operation>
<wsdl:operation name="viewReferAndEarn">
<wsdl:input message="tns:viewReferAndEarnIn" name="viewReferAndEarnIn"/>
<wsdl:output message="tns:viewReferAndEarnOut" name="viewReferAndEarnOut"/>
</wsdl:operation>
<wsdl:operation name="viewProfile">
<wsdl:input message="tns:viewProfileIn" name="viewProfileIn"/>
<wsdl:output message="tns:viewProfileOut" name="viewProfileOut"/>
</wsdl:operation>
<wsdl:operation name="viewProfileV2">
<wsdl:input message="tns:viewProfileV2In" name="viewProfileV2In"/>
<wsdl:output message="tns:viewProfileV2Out" name="viewProfileV2Out"/>
</wsdl:operation>
<wsdl:operation name="modifyProfile">
<wsdl:input message="tns:modifyProfileIn" name="modifyProfileIn"/>
<wsdl:output message="tns:modifyProfileOut" name="modifyProfileOut"/>
</wsdl:operation>
<wsdl:operation name="createAccount">
<wsdl:input message="tns:createAccountIn" name="createAccountIn"/>
<wsdl:output message="tns:createAccountOut" name="createAccountOut"/>
</wsdl:operation>
<wsdl:operation name="forgotPassword">
<wsdl:input name="forgotPasswordIn" message="tns:forgotPasswordIn"/>
<wsdl:output name="forgotPasswordOut" message="tns:forgotPasswordOut"/>
</wsdl:operation>
<wsdl:operation name="modifyPassword">
<wsdl:input name="modifyPasswordIn" message="tns:modifyPasswordIn"/>
<wsdl:output name="modifyPasswordOut" message="tns:modifyPasswordOut"/>
</wsdl:operation>
<wsdl:operation name="withdrawByBankTransfer">
<wsdl:input name="withdrawByBankTransferIn" message="tns:withdrawByBankTransferIn"/>
<wsdl:output name="withdrawByBankTransferOut" message="tns:withdrawByBankTransferOut"/>
</wsdl:operation>
<wsdl:operation name="setChatName">
<wsdl:input name="setChatNameIn" message="tns:setChatNameIn"/>
<wsdl:output name="setChatNameOut" message="tns:setChatNameOut"/>
</wsdl:operation>
<wsdl:operation name="transferFunds">
<wsdl:input name="transferFundsIn" message="tns:transferFundsIn" />
<wsdl:output name="transferFundsOut" message="tns:transferFundsOut" />
</wsdl:operation>
</wsdl:portType>
<wsdl:binding name="BFGlobalService" type="tns:BFGlobalService">
<soap:binding style="document" transport="http://schemas.xmlsoap.org/soap/http"/>
<wsdl:operation name="login">
<soap:operation soapAction="login" style="document"/>
<wsdl:input name="loginIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="loginOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="retrieveLIMBMessage">
<soap:operation soapAction="retrieveLIMBMessage" style="document"/>
<wsdl:input name="retrieveLIMBMessageIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="retrieveLIMBMessageOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="submitLIMBMessage">
<soap:operation soapAction="submitLIMBMessage" style="document"/>
<wsdl:input name="submitLIMBMessageIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="submitLIMBMessageOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="logout">
<soap:operation soapAction="logout" style="document"/>
<wsdl:input name="logoutIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="logoutOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="keepAlive">
<soap:operation soapAction="keepAlive" style="document"/>
<wsdl:input name="keepAliveIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="keepAliveOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="getEvents">
<soap:operation soapAction="getEvents" style="document"/>
<wsdl:input name="getEventsIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="getEventsOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="getActiveEventTypes">
<soap:operation soapAction="getActiveEventTypes" style="document"/>
<wsdl:input name="getActiveEventTypesIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="getActiveEventTypesOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="getAllEventTypes">
<soap:operation soapAction="getAllEventTypes" style="document"/>
<wsdl:input name="getAllEventTypesIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="getAllEventTypesOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="getSubscriptionInfo">
<soap:operation soapAction="getSubscriptionInfo" style="document"/>
<wsdl:input name="getSubscriptionInfoIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="getSubscriptionInfoOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="depositFromPaymentCard">
<soap:operation soapAction="depositFromPaymentCard" style="document"/>
<wsdl:input name="depositFromPaymentCardIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="depositFromPaymentCardOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="addPaymentCard">
<soap:operation soapAction="addPaymentCard" style="document"/>
<wsdl:input name="addPaymentCardIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="addPaymentCardOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="deletePaymentCard">
<soap:operation soapAction="deletePaymentCard" style="document"/>
<wsdl:input name="deletePaymentCardIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="deletePaymentCardOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="updatePaymentCard">
<soap:operation soapAction="updatePaymentCard" style="document"/>
<wsdl:input name="updatePaymentCardIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="updatePaymentCardOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="getPaymentCard">
<soap:operation soapAction="getPaymentCard" style="document"/>
<wsdl:input name="getPaymentCardIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="getPaymentCardOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="withdrawToPaymentCard">
<soap:operation soapAction="withdrawToPaymentCard" style="document"/>
<wsdl:input name="withdrawToPaymentCardIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="withdrawToPaymentCardOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="selfExclude">
<soap:operation soapAction="selfExclude" style="document"/>
<wsdl:input name="selfExcludeIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="selfExcludeOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="convertCurrency">
<soap:operation soapAction="convertCurrency" style="document"/>
<wsdl:input name="convertCurrencyIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="convertCurrencyOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="getAllCurrencies">
<soap:operation soapAction="getAllCurrencies" style="document"/>
<wsdl:input name="getAllCurrenciesIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="getAllCurrenciesOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="getAllCurrenciesV2">
<soap:operation soapAction="getAllCurrenciesV2" style="document"/>
<wsdl:input name="getAllCurrenciesV2In">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="getAllCurrenciesV2Out">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="viewReferAndEarn">
<soap:operation soapAction="viewReferAndEarn" style="document"/>
<wsdl:input name="viewReferAndEarnIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="viewReferAndEarnOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="viewProfile">
<soap:operation soapAction="viewProfile" style="document"/>
<wsdl:input name="viewProfileIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="viewProfileOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="viewProfileV2">
<soap:operation soapAction="viewProfileV2" style="document"/>
<wsdl:input name="viewProfileV2In">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="viewProfileV2Out">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="modifyProfile">
<soap:operation soapAction="modifyProfile" style="document"/>
<wsdl:input name="modifyProfileIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="modifyProfileOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="createAccount">
<soap:operation soapAction="createAccount" style="document"/>
<wsdl:input name="createAccountIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="createAccountOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="forgotPassword">
<soap:operation soapAction="forgotPassword" style="document"/>
<wsdl:input name="forgotPasswordIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="forgotPasswordOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="modifyPassword">
<soap:operation soapAction="modifyPassword" style="document"/>
<wsdl:input name="modifyPasswordIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="modifyPasswordOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="withdrawByBankTransfer">
<soap:operation soapAction="withdrawByBankTransfer" style="document"/>
<wsdl:input name="withdrawByBankTransferIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="withdrawByBankTransferOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="setChatName">
<soap:operation soapAction="setChatName" style="document"/>
<wsdl:input name="setChatNameIn">
<soap:body use="literal"/>
</wsdl:input>
<wsdl:output name="setChatNameOut">
<soap:body use="literal"/>
</wsdl:output>
</wsdl:operation>
<wsdl:operation name="transferFunds">
<soap:operation soapAction="transferFunds" style="document" />
<wsdl:input name="transferFundsIn">
<soap:body use="literal" />
</wsdl:input>
<wsdl:output name="transferFundsOut">
<soap:body use="literal" />
</wsdl:output>
</wsdl:operation>
</wsdl:binding>
<wsdl:service name="BFGlobalService">
<wsdl:port binding="tns:BFGlobalService" name="BFGlobalService">
<soap:address location="https://api.betfair.com/global/v3/BFGlobalService"/>
</wsdl:port>
</wsdl:service>
</wsdl:definitions>
'''
|
Unidentified workers stripping the old roof off the Wanakena Presbyterian Church. The renovation was funded by church and community donations totaling about $35,000. 2004. Wanakena, NY. Photo courtesy of Allen Ditch.
|
# Seamless DVD Player
# Copyright (C) 2004-2005 Martin Soto <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
"""Command objects to control the playback pipeline.
Any virtual machine implementation must return intances of the classes
in this module."""
class PipelineCmd(object):
"""A generic command object.
Objects of this class, when invoked with a pipeline object as
parameter, call the method named by attribute `methodName` passing
it the paremeters received by the object constructor."""
__slots__ = ('args',
'keywords')
def __init__(self, *args, **keywords):
self.args = args
self.keywords = keywords
methodName = None
def __call__(self, pipeline):
getattr(pipeline, self.methodName)(*self.args, **self.keywords)
class DoNothing(PipelineCmd):
"""A do-nothing command object."""
__slots__ = ()
def __call__(self, pipeline):
pass
class PlayVobu(PipelineCmd):
"""When constructed with parameter list `(domain, titleNr,
sectorNr)`, play the VOBU corresponding to domain `domain`, title
number `titleNr`, and sector number `sectorNr`."""
__slots__ = ()
methodName = 'playVobu'
class CancelVobu(PipelineCmd):
"""When constructed without parameters, cancel the effect of the
last `PlayVobu` operation. A new `PlayVobu` must be sent
afterwards in order for the pipeline to be able to resume
playback."""
__slots__ = ()
methodName = 'cancelVobu'
# Since accepting the playback of a VOBU is the default, `acceptVobu`
# is equivalent to doing nothing.
class AcceptVobu(DoNothing):
pass
ASPECT_RATIO_4_3 = 10
ASPECT_RATIO_16_9 = 11
class SetAspectRatio(PipelineCmd):
"""When constructed with parameter list `(aspectRatio)`, set the
aspect ratio to the one specified. `aspectRatio` must be one of
the `ASPECT_RATIO` constants in this module."""
__slots__ = ()
methodName = 'setAspectRatio'
class SetAudio(PipelineCmd):
"""When constructed with parameter list `(phys)`, set the physical
audio stream to 'phys'."""
__slots__ = ()
methodName = 'setAudio'
class SetSubpicture(PipelineCmd):
"""When constructed with parameter list `(phys, hide)`, set the
physical subpicture stream to `phys` and hide it if `hide` is
`True`."""
__slots__ = ()
methodName = 'setSubpicture'
class SetSubpictureClut(PipelineCmd):
"""When constructed with parameter list `(clut)`, set the
subpicture color lookup table to 'clut''clut' is a 16-position
array."""
__slots__ = ()
methodName = 'setSubpictureClut'
class Highlight(PipelineCmd):
"""When constructed with parameter list `(area, button, palette)`,
highlight the specified area, corresponding to the specified
button number and using the specified palette."""
__slots__ = ()
methodName = 'highlight'
class ResetHighlight(PipelineCmd):
"""When constructed without parameters, clear (reset) the highlighted
area."""
__slots__ = ()
methodName = 'resetHighlight'
class StillFrame(PipelineCmd):
"""When constructed without parameter list `(seconds)', tell the
pipeline that a still frame was sent and should be displayed for
the specified number of seconds. If `seconds` is None the still
frame should remain displayed until an external event, like user
interaction, cancels it."""
__slots__ = ()
methodName = 'stillFrame'
|
Believe it or not, it’s been nearly four and a half years since Apple released the original MacBook Air. At the time, it was revolutionary in terms of its size and weight, but it also was slow, had little storage, had only a single USB port for expansion, and was very expensive—it started at $1799, and if you wanted solid-state storage, the price increased dramatically (by $999!). As Jason Snell wrote at the time, “laptop design has always been about compromise,” and the original Air required some painful compromises.
But that Air also gave us a glimpse at the future of Mac laptops: incredibly thin, blissfully light, and surprisingly sturdy, with reliable, fast, flash storage—attributes that have made their way into Apple’s Pro laptop line with the new MacBook Pro with Retina display ( ). It’s safe to say that before long, all of Apple’s laptops will be direct descendants of the Air.
The Air itself has also evolved: The second iteration gained some speed, better video capabilities, and more storage. The third generation got faster and cheaper. In 2010, Apple gave the Air its biggest update by adding a second USB port, improving performance, standardizing on flash storage, lowering prices, and—in the biggest move of all—releasing a road-warrior-dream 11-inch model priced at just $999. The company turbocharged the Air last year by upgrading to Intel Core i5 and i7 processors and adding a Thunderbolt port.
In a few short years, the Air has gone from an expensive technology demonstration to a successful product firmly established as the heart of Apple’s laptop line. This year’s models improve the appeal of the Air by increasing performance, enhancing expansion capabilities, and lowering prices. In fact, the new models might just be making the MacBook Pro line a little nervous.
Through the Air’s now-six iterations, the laptop’s external design has remained essentially the same. Both the 11-inch and 13-inch Airs are thin wedges of unibody aluminum—just under 0.7 inch thick in the back and just over 0.1 inch thick in the front—though obviously with different footprints: The 13-inch model is 12.8 inches wide and 8.9 inches deep, while the 11-inch Air is just 11.8 inches wide and 7.6 inches deep. The 13-inch Air weighs just under 3 pounds, with the 11-inch version weighing in at just under 2.4 pounds.
Flip open the screen, and you reveal a full-size, backlit keyboard—even on the 11-inch model—that uses the same low-profile, flat keys as Apple’s current desktop keyboards. (The differences? The Air’s F-key row uses half-height keys compared to the desktop keyboards, and its bottom row of keys is ever-so-slightly shorter.) There’s also the large, Multi-Touch trackpad that’s ideal for using gestures in OS—it remains the best trackpad I’ve used on any laptop—and an LED-backlit, widescreen, glossy display surrounded by an aluminum bezel.
Inside the case is a multi-cell, custom-fit—yes, and still non-user-replaceable—battery system that gives the Air line excellent battery life, despite its slim profile. The 11-inch Air has a claimed battery life of up to 5 hours, with the 13-inch Air boasting up to 7 hours. Apple’s battery estimates are based on “wirelessly browsing 25 popular websites with display brightness set to 50 percent.” We test using tasks designed to drain the battery more quickly, but our results show roughly four hours of battery life for the 11-inch Air (slightly better than the 2011 model’s battery life), and around five hours of use for the 13-inch Air (about the same as its predecessor).
There is one change to the Air’s enclosure, though: The built-in camera is now, in Apple’s parlance, a 720p FaceTime HD camera. In other words, the camera can record video at 1280 by 720 resolution. In my testing, the new camera still produces grainy images and video with less-than-optimal color accuracy, but it’s noticeably better than the camera on my 2010 MacBook Air.
The 2012 Air ships with OS X 10.7 (Lion)—including Lion Recovery—and iLife (iPhoto, iMovie, and GarageBand). Anyone who buys a 2012 Air is eligible for a free upgrade to OS X 10.8 (Mountain Lion).
The big changes this year are on the sides and the inside. Like the 2011 Air, the new model sports a Thunderbolt port for both high-speed peripherals and connecting external displays. (Fans of FireWire, take note: Apple has also announced a Thunderbolt to FireWire 800 adapter, although we’re still awaiting its debut.) But the Air’s two USB ports, one on each side, are now USB 3 versions, making the Air line the first—along with the Retina MacBook Pro—to support the new USB standard. Each USB 3 port gets its own bus, is capable of up to 5 Gbps of throughput, and is backward-compatible with USB 2.0 peripherals. We’re currently testing USB 3 performance and hope to have benchmark results soon, but USB 3 opens up the Air to a big market of inexpensive, decent-performance storage devices.
SD Card Reader: The right side of the 13-inch MacBook Air (left) has a SD Card slot, which is not found on the 11-inch model.
You’ll also find, on the left-hand edge, Apple’s MagSafe 2 power connector. The new connector is apparently electrically identical to the original MagSafe, but flatter and wider. This means the new Airs, along with the Retina MacBook Pro, ship with a new MagSafe 2 AC-power adapter. Apple sells the $10 MagSafe to MagSafe 2 Converter, a tiny adapter that lets you use older MagSafe power bricks with the new Airs and Retina MacBook Pro, but you can’t use the new MagSafe 2 power bricks with your older MagSafe-equipped laptops.
Oddly, the design of the new MagSafe 2 plug forces the cable to protrude directly out—at a 90-degree angle—from the plug. The original MagSafe plug had a similar design, but after many people had problems with the cable fraying where it entered the plug, in 2010 Apple switched to a lower-profile, L-shaped plug that didn’t fray as easily and was more difficult to accidentally knock loose. We’ll see how this new (old) design holds up over time.
The right side of the 13-inch Air continues to host an SD card reader. On both sizes, the left side sports a tiny microphone along with a 1/8-inch (3.5mm) headphone jack that also supports headphones with an Apple-style inline remote/microphone module.
The Air continues to support 802.11a/b/g/n wireless networking and Bluetooth 4.0.
Some of the new Air models’ biggest changes are found on the inside. For starters, the 2012 Airs use Intel’s third-generation Core processors (also known as Ivy Bridge) for better performance, making Apple one of the first vendors to adopt these chips in their non-workhorse lines. Specifically, the 11-inch Air uses a 1.7GHz dual-core Core i5 processor with 3MB of shared level–3 cache, while the 13-inch Air uses a 1.8GHz version; a 2.0GHz i7 processor with 4MB level–3 cache is available as a built-to-order option. Like the previous Core i5 and i7 processors (known as Sandy Bridge), Ivy Bridge CPUs include hyper-threading and Turbo Boost. Hyper-threading lets the CPU’s two cores be utilized by the OS as four. When only one core is needed for a task, Turbo Boost lets the chip shut down one of the two cores and bump up the clock speed of the other. Turbo Boost can reach 2.6GHz on the 1.7GHz i5 and 2.8GHz on the 1.8GHz i5. Turbo Boost on the 2.0GHz i7 CPU can reach 3.2GHz.
How We Tested: In Handbrake 0.9.5, we encoded a single chapter (to H.264 using the application's Normal settings) from a DVD that was previously ripped to the hard drive. We installed Parallels 6 and ran WorldBench 6’s Multitask test. In Cinebench, we recorded how long it took to render a scene with multiprocessors.
How We Tested: We ran Mathematica 8’s Evaluate Notebook Test.
Last year’s Airs offered big performance gains over their predecessors thanks to the switch from Core 2 Duo to Core i5 processors. While this year’s Airs don’t offer as big of a jump, our benchmarks show that the new Airs are roughly 15 to 21 percent faster in processor-intensive tasks (namely, our Cinebench CPU and MathematicaMark8 tests) than their immediate predecessors, and that’s the case for both the 13-inch and 11-inch models.
When it came to graphics performance, the 2011 Air was in some ways a step back compared to the 2010 model. Though both versions used an integrated graphics processor (GPU), the 2010 model’s Nvidia GeForce 320M performed significantly better than the 2011 Air’s Intel HD Graphics 3000 in our traditional benchmark tests, such as Cinebench’s OpenGL test and a Call of Duty demo. However, when we used apps that had been specifically optimized for Intel graphics, such as Valve’s Portal 2, the 2011 Air’s GPU slightly bested its predecessor’s.
For 2012, the Air line has been upgraded to an Intel HD Graphics 4000 GPU. While still an integrated GPU, Apple claims the 4000 is up to 60 percent faster on graphics-intensive tasks than last year’s 3000. In our testing the 2012 Airs were indeed around 60 percent faster in our Cinebench OpenGL test, though only 20 to 25 percent faster in our Portal test. So while Apple’s claims may not hold up across the board, it’s clear that the 2012 Airs gain significant graphics improvements and comfortably best all previous generations in this area.
How We Tested: In Cinebench, we ran that application’s OpenGL frames-per-second test. Using Steam and Steam for Mac, we created a self-running demo for Portal and recorded the frames-per-second rating.
There’s one additional graphics-related feature that Apple isn’t advertising: Once you update your 2012 MacBook Air with the MacBook Air (Mid 2012) Software Update 1.0, you can connect two external Thunderbolt displays for a total (including the built-in display) of three displays. This is similar to the four-display capability of the Retina MacBook Pro.
When it comes to RAM, all four MacBook Airs, including the $999 entry-level model, now ship with a minimum of 4GB, and you can upgrade any 2012 Air, at the time of purchase, to a whopping-for-an-Air 8GB. These days, 2GB just isn’t enough for anything but the most-basic usage, and since you can’t upgrade the Air’s RAM later, you’re stuck with what you initially buy. As someone who’s been using a 2010 MacBook Air with 2GB of RAM for a year and a half, I can tell you from personal experience: This is a welcome change. Memory is also a bit faster this year, jumping from 1333MHz to 1600MHz.
|
#!/usr/bin/python
########
#
# qb class item by JOD
# level 3
#
class item: # level3
def __init__(self, local_ndx):
self._local_ndx = local_ndx
# file stream
def write(self, file):
for i in range (0, len(self._nodesNumber)):
file.write(" " + str(self._nodesNumber[i]))
class line(item):
_nodesNumber = [-1,-1]
def __init__(self, local_ndx):
nodesNumber = [-1,-1]
self._nodesNumber = nodesNumber
item.__init__(self, local_ndx)
# content
def fill(self, cubeNodesNumber):
for i in range (0, len(self._nodesNumber)):
self._nodesNumber[i] = int(cubeNodesNumber[i])
class tri(item):
_nodesNumber = [-1,-1,-1]
def __init__(self, local_ndx):
nodesNumber = [-1,-1,-1]
self._nodesNumber = nodesNumber
item.__init__(self, local_ndx)
# content
def fill(self, cubeNodesNumber):
if(self._local_ndx == 0):
self._nodesNumber[0] = int(cubeNodesNumber[0])
self._nodesNumber[1] = int(cubeNodesNumber[1])
self._nodesNumber[2] = int(cubeNodesNumber[3])
else:
self._nodesNumber[0] = int(cubeNodesNumber[1])
self._nodesNumber[1] = int(cubeNodesNumber[2])
self._nodesNumber[2] = int(cubeNodesNumber[3])
class quad(item):
_nodesNumber = [-1,-1,-1,-1]
def __init__(self, local_ndx):
nodesNumber = [-1,-1,-1,-1]
self._nodesNumber = nodesNumber
item.__init__(self, local_ndx)
# content
def fill(self, cubeNodesNumber):
for i in range (0, len(self._nodesNumber)):
self._nodesNumber[i] = int(cubeNodesNumber[i])
class pris(item):
_nodesNumber = [-1,-1,-1,-1,-1,-1]
def __init__(self, local_ndx):
nodesNumber = [-1,-1,-1,-1,-1,-1]
self._nodesNumber = nodesNumber
item.__init__(self, local_ndx)
# content
def fill(self, cubeNodesNumber):
if(self._local_ndx == 0):
self._nodesNumber[0] = int(cubeNodesNumber[0])
self._nodesNumber[1] = int(cubeNodesNumber[1])
self._nodesNumber[2] = int(cubeNodesNumber[3])
self._nodesNumber[3] = int(cubeNodesNumber[4])
self._nodesNumber[4] = int(cubeNodesNumber[5])
self._nodesNumber[5] = int(cubeNodesNumber[7])
else:
self._nodesNumber[0] = int(cubeNodesNumber[1])
self._nodesNumber[1] = int(cubeNodesNumber[2])
self._nodesNumber[2] = int(cubeNodesNumber[3])
self._nodesNumber[3] = int(cubeNodesNumber[4])
self._nodesNumber[4] = int(cubeNodesNumber[5])
self._nodesNumber[5] = int(cubeNodesNumber[6])
class tet(item):
_nodesNumber = [-1,-1,-1,-1]
def __init__(self, local_ndx):
nodesNumber = [-1,-1,-1,-1]
self._nodesNumber = nodesNumber
item.__init__(self, local_ndx)
# content
def fill(self, cubeNodesNumber):
pass
class hexa(item):
_nodesNumber = [-1,-1,-1,-1,-1,-1,-1,-1]
def __init__(self, local_ndx):
nodesNumber = [-1,-1,-1,-1,-1,-1,-1,-1]
self._nodesNumber = nodesNumber
item.__init__(self, local_ndx)
# content
def fill(self, cubeNodesNumber):
for i in range (0, len(self._nodesNumber)):
self._nodesNumber[i] = int(cubeNodesNumber[i])
|
I have my own opinion about draft dodgers, but I didn’t have to walk in their shoes, so it’s not my place to say one way or another. The fact is, we are all human beings and all in need of the same courtesies, unconditional love that we want for ourselves.
DNA test produces surprise and new family ties.
I was reading the paper this morning, [what else to do on an early morning at a hotel], and as I thumbed through the sports page, here was the “>continued from 1c” part to the title above. I went back to the page, 1c, and began to read why there would be a DNA test results in the sports section. I was surprised to read the unfolding story that began in 1947. Earlier really, as far as life stories go. If a certain young man had not chosen a career in baseball, none of this story would even exist.
I don’t want to give away the story with it’s twists and turns, and surprises of its own, please read it for yourself. It’s about adoption and not having the desire to find birth parents, because the adoptive parents were exactly that, parents. It’s about a doctor stating, You need to find out your family medical history. It’s about a life that began, but ended before it could be born. It’s about lives connecting that didn’t even know each other existed. It’s about …an epic love letter. It’s about more than all of this.
And, it’s not the three you might assume.
Thanks much for following my blog – I’ll give yours a shout out soon.
|
import datetime
from unittest import mock
from urllib import parse
from django.test import TestCase, override_settings
from contentfiles.storage import MediaStorage, RemotePrivateStorage
class TestMediaStorage(TestCase):
def test_url(self):
storage = MediaStorage()
url = storage.url("test.txt")
self.assertEqual(url, "https://demo.contentfiles.net/media/test.txt")
def test_unicode_url(self):
storage = MediaStorage()
url = storage.url("Paris+&+Orléans.jpg")
self.assertEqual(url, "https://demo.contentfiles.net/media/Paris%2B%26%2BOrl%C3%A9ans.jpg")
@override_settings(CONTENTFILES_SSL=False)
def test_http_url(self):
storage = MediaStorage()
url = storage.url("test.txt")
self.assertEqual(url, "http://demo.contentfiles.net/media/test.txt")
@override_settings(CONTENTFILES_HOSTNAME="media.example.org")
def test_custom_hostname(self):
storage = MediaStorage()
url = storage.url("test.txt")
self.assertEqual(url, "https://media.example.org/media/test.txt")
@mock.patch("botocore.auth.HmacV1QueryAuth._get_date")
def test_private_storage(self, mock_get_date):
mock_get_date.return_value = "1234567890"
storage = RemotePrivateStorage()
storage.access_key = "AKIA1234567890ABCDEF"
storage.secret_key = "1234567890123456789012345678901234567890"
storage.bucket_name = "demo-bucket"
url = storage.url("test.txt")
parsed_url = parse.urlparse(url)
url_querystring = parse.parse_qs(parsed_url.query)
self.assertEqual(parsed_url.scheme, "https")
self.assertEqual(parsed_url.netloc, "demo-bucket.s3.amazonaws.com")
self.assertEqual(parsed_url.path, "/demo/test.txt")
self.assertDictEqual(
url_querystring,
{
"AWSAccessKeyId": ["AKIA1234567890ABCDEF"],
"Signature": ["nolnfqXilquat3YAccmhEyNk/IU="],
"Expires": ["1234567890"],
},
)
@override_settings(
CONTENTFILES_S3_REGION="eu-west-2",
CONTENTFILES_S3_ENDPOINT_URL="https://s3.dualstack.eu-west-2.amazonaws.com",
)
@mock.patch("botocore.auth.datetime")
def test_private_storage_aws4(self, mock_datetime):
mock_datetime.datetime.utcnow.return_value = datetime.datetime(2020, 1, 1, 12, 34, 56, 0)
storage = RemotePrivateStorage()
storage.access_key = "AKIA1234567890ABCDEF"
storage.secret_key = "1234567890123456789012345678901234567890"
storage.bucket_name = "demo-bucket"
url = storage.url("test.txt")
parsed_url = parse.urlparse(url)
url_querystring = parse.parse_qs(parsed_url.query)
self.assertEqual(parsed_url.scheme, "https")
self.assertEqual(parsed_url.netloc, "demo-bucket.s3.dualstack.eu-west-2.amazonaws.com")
self.assertEqual(parsed_url.path, "/demo/test.txt")
self.assertDictEqual(
url_querystring,
{
"X-Amz-Algorithm": ["AWS4-HMAC-SHA256"],
"X-Amz-Credential": ["AKIA1234567890ABCDEF/20200101/eu-west-2/s3/aws4_request"],
"X-Amz-Date": ["20200101T123456Z"],
"X-Amz-Expires": ["300"],
"X-Amz-Signature": [
"be39d90daf58c495bde25a607e20dbf2f75f4d01358a5bc93911a2733bd3da21"
],
"X-Amz-SignedHeaders": ["host"],
},
)
|
Austin Air is another company that makes a serious product. All Austin Air Purifiers are manufactured in the USA - an acknowledged sign of good build quality. The Austin Air Healthmate Air Purifier hsa a 360-degree intake, contains over 60 square feet of "true medical grade HEPA" filter and includes 15 pounds of activated carbon. The company claims its 4-stage filtering system removes dust, hair, dander, mold, chemicals, gases, odors, bacteria and viruses from the air. And Austin Air backs their product with a 5 year warranty - which includes the filter parts!
Austin Air Healthmate air purifiers use a 4-stage filtration process:Stage 1 is a large particle pre-filter - this removes the most easily visible such as hair and dust; Stage 2 is a medium particle pre-filter - this removes small to medium size particles such as pollen and mold; Stage 3 features 15 pounds of activated carbon / zeolite blend - this filters chemicals, gases and odors such as sulfuric acid and ammonia; finally stage 4 is 60 square feet of true medical grade HEPA - this filters bacteria, viruses and utrafine particles.
The Austin Air Healthmate JR weighs 18 lbs and is 16" high x 11" x "11. The Healthmate Jr. moves 200 cubic feet of air per minute. It is certified to remove 99.97% of particles over the size of 0.3 microns and 95% of particles over 0.1 microns.
According to product reviewers (consumers), the Healthmate Jr. dramatically decreases dust in the home living environment and facilitates easier breathing. Another reviewer noted an "amazing" difference; another still said that the difference in air quality in a 440 square foot room was noticeable within an hour. The Austin units have three settings, the lowest of which is said by most to be very close to inaudible in ordinary use.
Austin Air was specified by FEMA and The Red Cross to address air purification after the tragic events of 9/11/01 in New York.
Austins products do not use electrostatic / ionic air purification methods and thus they state that their air purifiers produce no ozone.
Austin Air's Hega Series is aimed at allergy sufferers and they claim it is the only unit on the marget to feature HEGA filtering - High Efficiency Gas Absorption.
Austin Air also makes products designated for pet owners - and additionally, an intriguing air purifier product designed for babies' nurseries! In addition to cleaning the air, this machine emits a soothing "white noise" sound which apparently mimics sounds heard in the womb. White noise is a special combination of all frequencies and sounds a bit like a waterfall. White noise is widely belived to help infants sleep better and according to Austin's web site, this has now been clinically proven.
Austin Air has the largest air cleaner manufacturing facility in the world, makes everything in-house, and their products are chosen by leading doctors. Austin air purifiers are guaranteed for 5 years. Austin appear to us to be a good, solid, honest American company and one worth supporting!
|
"""Make a detweet module."""
import imaginations as imags
import markovify
import tweepy
import tweet_reader as tr
def detweet(tweeter):
"""Detweet at someone."""
keys = tr.key_access()
auth = tweepy.OAuthHandler(keys['consumer_key'], keys['consumer_secret'])
auth.set_access_token(keys['access_token'], keys['access_token_secret'])
api = tweepy.API(auth)
print(" authorization: complete")
profile = api.get_user(tweeter)
pic_add = profile.profile_image_url
pic = imags.get_twitpic(pic_add.replace("_normal", ""), tweeter)
pix = pic.load()
path = "./twits/imgs/{}/".format(tweeter)
print(" image: acquired")
data = tr.twit_reader(tweeter)
print(" tweets: mcgotten")
# print(" {} data".format(len(data)))
with open("./twits/corpses/{}_corpus.txt".format(tweeter), "w") as fp:
print(" corpse: opened")
for ii in xrange(len(data)):
for jj in xrange(len(data[ii]) if len(data[ii]) < 5000 else 5000):
# print("{}".format(data[ii][jj]["text"]))
twit = data[ii][jj]["text"] + "\n"
fp.write(twit.encode("utf-8"))
# if ii > len(data):
# break
print(" corpse: ready")
with open("./twits/corpses/{}_corpus.txt".format(tweeter), 'r') as cf:
corpus = cf.read()
model = markovify.Text(corpus, state_size=2)
print(" model: super")
tweet_len = 138 - len(tweeter)
sentence = ''
temp_sentence = model.make_short_sentence(tweet_len)
try:
sentence += '{} '.format(temp_sentence)
tweet_len -= len(temp_sentence)
except Exception:
sentence = model.make_sentence()
while len(sentence) < 100:
temp_sentence = model.make_short_sentence(tweet_len)
if temp_sentence is not None:
sentence += temp_sentence + ' '
tweet_len -= len(temp_sentence) + 1
print(" sentence: commuted")
sentence += "#{}".format(tweeter)
pic_name = "{}_dtm.jpg".format(tweeter)
xx = pic.size[0]
yy = pic.size[1]
temp_sentence = model.make_short_sentence(72)
while temp_sentence is None:
temp_sentence = model.make_short_sentence(72)
while len(temp_sentence) < 46:
temp_temp = model.make_short_sentence(60)
if temp_temp is not None:
temp_sentence += " " + temp_temp
l1 = temp_sentence[0:16]
l2 = temp_sentence[16:32]
l3 = temp_sentence[32:48]
imags.xorror(pix, xx, yy, sentence)
imags.shapes(pix, xx, yy, sentence,
(ord(sentence[66]) / 8) % len(sentence) + 1)
imags.wordler(pic, l1, l2, l3)
imags.scoots(pix, xx, yy, sentence)
imags.xorror(pix, xx, yy, sentence)
pic.save(path + pic_name)
print(" glitch: art")
sentence = sentence.replace("@", "#")
sentence = sentence.replace("&", "&")
sentence = sentence.encode('utf-8')
print (" " + sentence)
try:
api.update_with_media(path + pic_name, sentence)
except Exception:
"you're banned, idiot."
|
I live in the Land of Potholes. Is there any downside to keeping an extra 5 lbs PSI in my tires to try to avoid damaging the tires and rims from rough roads? Thanks.
|
"""
---------------
Julython Badges
---------------
This is where all the logic that drives the awarding of badges.
Badges consist of a counter, metric and a badge meta info. The
badge meta data defines the look of the badge the color, text,
icon and popup text to display.
The counters and badge awards are stored in a large json blob for
each user. When either a new commit for the user is added or the
user profile is displayed the counters are updated. After the counters
are updated the badges are iterated over to see if a new one was
added or if the user completed another badge.
Counters
---------
* Game(year) commit count, the count of the current game.
* Total commit count, the overall number of commits
* Game(year) language set, set of languages in the current game.
* Total language set, set of languages over all games.
Badge Example
-------------
Here is a sample badge::
class HundredCommits(Badge):
counter = 'total_commits'
target = 100
badge_class = 'fa-trophy expert'
badge_text = '100+ Commits'
badge_popup = 'One hundredth Commit'
show_progress = True
Example badge json blob::
{
'total_commits': 1232,
'total_projects': 34,
'game_commits': 120,
'game_days': 20,
'_current_comment': "current badges are calculated everytime",
'badges': [
{
'title': 'Committed',
'subtitle': '100+ Commits',
'count': 200,
'total': 100,
'awarded': true,
'icon': "fa-trophy",
'level': "novice"
}
],
'_archived_comment': "This is the list of previous game awards",
'archived_badges': [
{
'title': 'Committed',
'badge_popup': '100+ Commits in Julython 2012'
'count': 200,
'total': 100,
'awarded': true,
'icon': "fa-trophy",
'level': "novice"
}
]
}
Badge Levels
------------
There are currently 5 levels which are differnent colored icons.
* novice
* journeyman
* expert
* rockstar
"""
import re
from django.core.cache import cache
from july.game.models import Game
from july.people.models import UserBadge, Commit
# TODO(rmyers): copied from django 1.7 remove after we update to it
re_camel_case = re.compile(r'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))')
def camel_case_to_dashes(value):
return re_camel_case.sub(r' \1', value).strip().lower().replace(' ', '_')
class Badge(object):
"""Base badge class"""
counter = None
total = 0
icon = None
title = ""
subtitle = ""
level = ""
def __init__(self, user_data):
self.user_data = user_data
self.count = self.user_data.get(self.counter)
@property
def awarded(self):
return self.count >= self.total
def to_dict(self):
return {
'title': self.title,
'subtitle': self.subtitle,
'icon': self.icon,
'total': self.total,
'count': self.count,
'level': self.level,
'awarded': self.awarded,
}
class Counter(object):
"""Base Counter Class"""
query = None
metric = None
def __init__(self, user, game=None):
self.user = user
self.game = game
@property
def name(self):
return camel_case_to_dashes(self.__class__.__name__)
@property
def cache_key(self):
return '%s-%s' % (self.name, self.user.pk)
def update(self, user_data):
"Update the user json with the count from the query"
cached = cache.get(self.cache_key)
if cached:
count_dict = cached
else:
count_dict = self.run_query()
cache.set(self.cache_key, count_dict, timeout=300)
user_data.update(count_dict)
def run_query(self):
"""Return the count for this query."""
q = getattr(self.user, self.query)
return {self.name: q.count()}
class GameCounter(Counter):
"""Counter for Game Related Counts
This provides a number of counters for a single game.
* game_commits (total number of commits in the game)
* game_days (number of days in the game the user committed)
"""
metric = 'game'
def run_query(self):
if self.game is None:
self.game = Game.active_or_latest()
# Commit.calender returns a list of objects for each day a user has
# commited along with the count during the day. So we can use this
# query to get the total and the number of days.
resp = Commit.calendar(self.game, user=self.user)
objects = resp['objects']
total = 0
for obj in objects:
total += obj.get('commit_count', 0)
return {
'game_commits': total,
'game_days': len(objects)
}
class TotalCommits(Counter):
query = 'commit_set'
metric = 'commits'
class TotalProjects(Counter):
query = 'projects'
metric = 'projects'
class FirstCommit(Badge):
counter = 'total_commits'
title = 'Welcome Aboard'
subtitle = 'Thanks for Joining'
total = 1
icon = "fa-heart"
level = "novice"
class TenCommits(Badge):
counter = 'game_commits'
title = 'A Healthy Start'
subtitle = '10+ Commits'
total = 10
icon = "fa-plus-circle"
level = "novice"
class ThirtyCommits(Badge):
counter = 'game_commits'
title = '1-a-Day Average'
subtitle = '31+ Commits'
total = 31
icon = "fa-plus-circle"
level = "journeyman"
class HundredCommits(Badge):
counter = 'game_commits'
title = 'Outstanding Commitment'
subtitle = '100+ Commits'
total = 100
icon = "fa-plus-circle"
level = "expert"
class ThousandCommits(Badge):
counter = 'game_commits'
title = 'Do You Sleep at All?'
subtitle = '1000+ Commits'
total = 1000
icon = "fa-plus-circle"
level = "rockstar"
class FiveProjects(Badge):
counter = 'total_projects'
title = 'Thanks for Sharing'
subtitle = '5+ Projects'
total = 5
icon = "fa-folder-o"
level = "novice"
class TenProjects(Badge):
counter = 'total_projects'
title = 'Nice Project List'
subtitle = '10+ Projects'
total = 10
icon = "fa-folder-o"
level = "journeyman"
class FiftyProjects(Badge):
counter = 'total_projects'
title = 'You Love Sharing'
subtitle = '50+ Projects'
total = 50
icon = "fa-folder-o"
level = "expert"
class HundredProjects(Badge):
counter = 'total_projects'
title = 'Wow just wow'
subtitle = '100+ Projects'
total = 100
icon = "fa-folder-o"
level = "rockstar"
class PlayedTheGame(Badge):
counter = 'game_commits'
title = 'Played in 2014'
subtitle = 'Everyone deserves a trophy!'
total = 1
icon = "fa-trophy"
level = "novice"
class OneWeekStreak(Badge):
counter = 'game_days'
title = 'Good Start'
subtitle = '7+ days'
total = 7
icon = 'fa-trophy'
level = "journeyman"
class TwoWeekStreak(Badge):
counter = 'game_days'
title = 'Keep it going'
subtitle = '14+ days'
total = 14
icon = 'fa-trophy'
level = "expert"
class EveryDay(Badge):
counter = 'game_days'
title = 'Excellent Commitment'
subtitle = 'Committing Everyday'
total = 31
icon = 'fa-trophy'
level = "rockstar"
BADGES = [
FirstCommit,
TenCommits,
ThirtyCommits,
HundredCommits,
ThousandCommits,
FiveProjects,
TenProjects,
FiftyProjects,
HundredProjects,
PlayedTheGame,
OneWeekStreak,
TwoWeekStreak,
EveryDay,
]
COUNTERS = [
GameCounter,
TotalCommits,
TotalProjects,
]
def update_user(user, game=None):
user_badge, created = UserBadge.objects.get_or_create(user=user)
user_data = user_badge.badges or {}
# Update all the counts in user_dict
for counter in COUNTERS:
c = counter(user, game=None)
c.update(user_data)
user_badges = []
for badge in BADGES:
b = badge(user_data)
user_badges.append(b.to_dict())
user_data['badges'] = user_badges
user_badge.badges = user_data
user_badge.save()
return user_data
|
Training Regime He trains twice a day.
When and where did you begin this sport? He began skating at primary school in 1998.
Reason for choosing this sport It was the only sport that he could take up in his village in Poland. "I loved competition and races."
|
"""
Sublime Text 3 plugin to update list of packages imported
in a Go (golang) source file (scope: source.go) using 'goimports'
(http://github.com/bradfitz/goimports)
Author: Hamid Ghadyani
URL: https://github.com/spamwax/goimports-sublime-text-3
"""
import sublime
import sublime_plugin
import os
import subprocess
import codecs
import tempfile
PLUGIN_FOLDER = os.path.dirname(os.path.realpath(__file__))
SETTINGS_FILE = "GoImports.sublime-settings"
SETTINGS_FILE = "GoImports.sublime-settings"
def plugin_loaded():
global s
s = sublime.load_settings(SETTINGS_FILE)
class GoImportsException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class GoimportsrunCommand(sublime_plugin.TextCommand):
def run(self, edit):
global s
# check the scope and run only if view is a source.go
scope = self.view.scope_name(0).split(' ')
go_scope = False
for _v in scope:
if "source.go" in _v:
go_scope = True
break
if not go_scope:
return
# Get the path to goimports binary.
# you can install using:
# $ go get -u golang.org/x/tools/cmd/goimports
goimports_cmd = s.get("goimports_bin")
# Save current text into a buffer that we can pass as stdin to goimports
buf = buffer_text(self.view)
try:
# Run the 'goimports' command
cur_dir = os.path.dirname(self.view.file_name())
r = subprocess.Popen(goimports_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, shell=True,
cwd=cur_dir, stderr=subprocess.PIPE).communicate(input=buf)
if len(r[1]) != 0:
raise GoImportsException(r[1])
newtext = r[0].decode("utf-8")
if self.view.settings().get("ensure_newline_at_eof_on_save"):
if not newtext.endswith("\n"):
newtext += "\n"
# replace the content of the whole file
selection = sublime.Region(0, self.view.size())
self.view.replace(edit, selection, newtext)
except Exception:
import sys
exc = sys.exc_info()[1]
sublime.status_message(str(exc))
class OpenGoimportsSublimeSettings(sublime_plugin.TextCommand):
"""docstring for OpenGoimportsSublimeSettings"""
def run(self, edit):
open_goimports_sublime_settings(self.view.window())
class Goimportsrun(sublime_plugin.EventListener):
"""Will be executed just before saving"""
def on_pre_save(self, view):
if s.get("goimports_enabled",
view.settings().get("goimports_enabled", True)):
view.run_command("goimportsrun")
def buffer_text(view):
file_text = sublime.Region(0, view.size())
return view.substr(file_text).encode('utf-8')
def open_goimports_sublime_settings(window):
fn = os.path.join(PLUGIN_FOLDER, SETTINGS_FILE)
window.open_file(fn)
|
Introducing our new ultra shorts designed to give a ultra comfortable fit in our signature low waist styles. Comes with a inner pouch for additional support and comfort. Side splits give additional freedom of movement.
Perfect to lounge around in. Now in silky smooth satin.
The satin fabric is nice, a little thicker and less elastic than the classic nylon/spandex the inside pouch is comfortable. To my opinion, all shorts should have inside pouch or at least a C sling !
These are the best shorts I have ever had. The quality Of the shorts are so fine and smooth which gives me a very comfortable feeling! Love them!
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Script demonstrating the use of the estrangement library to detect and
visualize temporal communities.
"""
__author__ = """\n""".join(['Vikas Kawadia ([email protected])',
'Sameet Sreenivasan <[email protected]>',
'Stephen Dabideen <[email protected]>'])
# Copyright (C) 2012 by
# Vikas Kawadia <[email protected]>
# Sameet Sreenivasan <[email protected]>
# Stephen Dabideen <[email protected]>
# All rights reserved.
import sys
import os
from Estrangement import estrangement
from Estrangement import plots
from Estrangement import options_parser
import multiprocessing
def detect_and_plot_temporal_communities():
""" Function to run simulations, based on a specified dataset, and created
tiled plots of the temporal communities.
Parameters can be specified at the command line, when calling this script.
Alternatively, a config file specifed at the command line can be used to set
the parameter. At the very minimum, a path to the data set must be specified.
Each experiment requires a name, which is used to create a folder to store the
results of the simulation. If the results already exist in the folder specified
by the experiment name, plots are created using these existing results and the
simulation is not run on subsequent calls to EstrangementDemo.py.
To run the simulation again, delete the experiment folder before running this script,
or use a different experiment name.
Examples
--------
>>> # To see all configuarable parameters use the -h option
>>> EstrangementDemo.py -h
>>> # Configurable parameters can be specified at the command line
>>> EstrangementDemo.py --dataset_dir ./data --display_on True --exp_name my_experiment
>>> # A config file can be used, but it must be preceeded by an '@'
>>> # Three config files are provided as examples, check that that path to the dataset is valid.
>>> EstrangementDemo.py @senate.conf
>>> EstrangementDemo.py @markovian.conf
>>> EstrangementDemo.py @realitymining.conf
"""
# use argparse to parse command-line arguments using optionsadder.py
opt = options_parser.parse_args()
# A dir is created, specified by the --exp_name argument in
# the current working directory to place all output from the experiment
if(not os.path.exists(opt.exp_name)):
os.mkdir(opt.exp_name)
expdir = os.path.abspath(opt.exp_name)
# set the values of delta to find communities for
deltas = opt.delta
datadir = os.path.abspath(opt.dataset_dir)
# we use the multiprocessing module to run computations for the different
# deltas in parallel.
process_dict = {}
for d in deltas:
output_dir = os.path.join(expdir, "task_delta_" + str(d))
if not os.path.exists(output_dir):
os.mkdir(output_dir)
results_filename = os.path.join(output_dir, "matched_labels.log")
if not os.path.exists(results_filename):
print("Detecting temporal communities for delta=%s"%d)
kwargs={'dataset_dir' : datadir,
'delta' : d,
'results_filename' : results_filename,
'minrepeats' : opt.minrepeats,
'increpeats' : opt.increpeats,
'write_stats': True,
}
os.chdir(output_dir)
process_dict[d] = multiprocessing.Process(target = estrangement.ECA, kwargs = kwargs)
process_dict[d].start()
else:
print("Seems like communities have already been computed for delta=%f; to recompute del dir %s"
%(d, output_dir))
for k in process_dict.keys():
process_dict[k].join()
print("\nDone computing all temporal communities, now producing some visualizations")
# dictionary to pass the output to the plot function
matched_labels_dict = {}
for d in deltas:
results_filename = os.path.join(expdir, "task_delta_" + str(d), "matched_labels.log")
with open(results_filename, 'r') as fr:
result = eval(fr.read())
matched_labels_dict[d] = result
os.chdir(expdir)
# plot the temporal communities
plots.plot_temporal_communities(matched_labels_dict)
os.chdir('..')
# to plot other parameters, set write_stats=True in estrangement.ECA()
# and use plots.plot_function(). For example,
# estrangement.plots.plot_function(['Estrangement'])
if __name__ == "__main__":
detect_and_plot_temporal_communities()
|
Senior high school online programs are helping students of all types obtain levels. There are many online high schools. Discover more about the subject here.
Students as well as their families have different causes of seeking a non-traditional classroom education.
Reasons reported by parents and students that like internet-based high schools include the requirement for extra attention, the necessity to escape the distractions within the traditional classroom, the need to escape bullies, a desire for obtaining a jump on their own higher education as well as an effort to escape the monotony of classroom training.
Some families have unique situations. For instance, some students are undergoing treatments that prevent them from entering school to have an long time. With no online option, the scholars would get behind within their studies.
No matter your circumstances, you ought to be pleased to know you have options. The web makes it simpler for everybody to obtain a quality education.
A higher school diploma received with an accredited online program is equivalent to one received via a traditional school. The diplomas are believed to be by colleges as acceptable. College entrance exams will typically be needed. But that’s the situation in many universities for students who finished a private or public school. As lengthy because the school is accredited, you can rely that the degree is reputable.
You will find scammers obviously and you ought to be skeptical associated with a school that attempts to sell a diploma. The scammers are generally known as diploma mills. Should there be couple of needs for earning the diploma, it’s most likely from a diploma mill. The document you’d receive is useless.
Some online choices are an element of the U.S. public school system. Other medication is outside of the general public schools. They’re a lot more like traditional private schools. GWU Online Senior High School, Keystone and K12 Worldwide Academy are three of the choices. Discover more about K12 below.
K12 Worldwide Academy offers courses of instruction for kids of every age group. Students who’ve already completed some senior high school or junior high school work can pick K12 to accomplish the amount.
Signing up for the academy is simple. Full-some time and part-time students located all over the world are recognized. For full-time students, the Academy is the primary school. Part-time students might also attend traditional classes. They will use K12 classes to supplement the amount and ready them for school.
What are the best kinds of SEO services suitable to sell my school online? The company should be able to handle your specific marketing needs at affordable prices. In addition, they should be providing to your respective company needs in the best manner possible.
NextIn the Tuition Center – So What Can Your Son Or Daughter Expect?
|
# coding: utf-8
"""
pyautogui モジュールのサンプルです。
マウスの移動について
"""
import pyautogui as autogui
from trypython.common.commoncls import SampleBase
class Sample(SampleBase):
def exec(self):
# ---------------------------------------------------------------------
# pyautogui モジュールは、GUIオートメーションをpythonで行うためのモジュール
# ---------------------------------------------------------------------
# http://pyautogui.readthedocs.io/en/latest/cheatsheet.html
# ---------------------------------------------------------------------
# size() で メインモニタのスクリーンサイズを、moveTo() で指定位置にマウスカーソル
# の移動が行える。
# ---------------------------------------------------------------------
screen_width, screen_height = autogui.size()
autogui.moveTo(100, 100, duration=2)
autogui.moveTo(screen_width - 100, 100, duration=1)
autogui.moveTo(screen_width - 100, screen_height - 100, duration=1)
autogui.moveTo(100, screen_height - 100, duration=2)
def go():
obj = Sample()
obj.exec()
|
Built by CNI, a strategic partner for ICC in Brazil, the ICC/CNI International Arbitration Hearing Center will facilitate the conduct of dispute resolution proceedings involving parties from Brazil and the wider Latin America region.
The Hearing Center was officially inaugurated on the occasion of the 7th ICC Brazilian Arbitration Day by ICC Court President Alexis Mourre, Chair of ICC Brazil Daniel Feffer and CNI President Robson Andrade.
According to recently released ICC Arbitration statistics from the International Court of Arbitration, Brazil ranked seventh (of 140 countries) in global listing of new cases filed with the ICC Court in 2017. Nine cases are currently being managed by the ICC Court’s newly established case management team in Sao Paulo.
CNI President, Robson Braga de Andrade, said that the Centre would strengthen ties between Brazilian industry and ICC, and illustrated CNI’s commitment to improving the business environment of the country.
The inauguration of the Hearing Centre and ICC Brazil Arbitration Day took place as part of a number of ICC activities taking place in Sao Paulo this week. Now in its seventh year, Brazil Arbitration Day brought together close to 300 hundred practitioners to discuss issues and recent developments related to the current international arbitration landscape.
On 14 March, ICC held a training explaining the roles and tasks of a tribunal secretary and the benefits they can bring to proceedings. The week concludes on 16 March with a special training for State Entities.
|
from time import sleep
from datetime import datetime, timedelta
from django.utils.translation import ugettext_lazy as _
from django.contrib.webdesign import lorem_ipsum
from server_status.conf import settings
from server_status.base import BaseServerStatusPlugin
from server_status.registry import plugins
from server_status import exceptions
class BaseFileSystemTest(BaseServerStatusPlugin):
_name = "Filesystems"
_group = "Storage"
storage = None
filename_pattern = 'health_check_storage_test/test-{}-{}.txt'
def get_storage(self):
if isinstance(self.storage, basestring):
return get_storage_class(self.storage)()
else:
return self.storage
def get_file_name(self):
return self.filename_pattern.format(datetime.datetime.now(),
random.randint(10000, 99999))
def get_file_content(self):
# select 64 random lorem lipsum words.
return lorem_lipsum.words(64)
def check_status(self):
try:
# write the file to the storage backend
storage = self.get_storage()
file_name = self.get_file_name()
file_content = self.get_file_content()
# save the file
file_name = storage.save(
file_name, ContentFile(content=file_content))
# read the file and compare
f = storage.open(file_name)
if not storage.exists(file_name):
raise exceptions.ServiceUnavailable(
code="error",
description=_("Filesystem is currently in a readonly state."))
if not f.read() == file_content:
raise exceptions.ServiceUnavailable(
code="error",
message=_("Filesystem is content doesn't match"))
# delete the file and make sure it is gone
storage.delete(file_name)
if storage.exists(file_name):
raise exceptions.ServiceUnavailable(
code="error",
message=_("File was not deleted"))
return True
except Exception:
raise exceptions.ServiceUnstable(
code="error",
message="unknown problems")
@plugins.register
class DefaultFileStorageHealthCheck(BaseFileSystemTest):
storage = settings.DEFAULT_FILE_STORAGE
|
Your Party is Sure to Be a Hit!
Your party is guaranteed to be a hit if you put “Nick’s Music By The Bay” in charge of the music. For many years now, I have been encouraging people to have fun and to dance. I always find the right sound for weddings, birthday parties or any event you have. My repertoire ranges includes country, 60’s, 70’s, 80’s to Old & New. I mostly DJ around the Bay Area, however, I will be happy to go wherever your special event needs me to go.
If your party requires music videos, I can easily cover that too with an extensive collection of over 100,000 videos covering all types of music and periods from the 50’s thru the present, from disco to country and everything in between.
I am not just a DJ, I also am an experienced line dance instructor. I have contacts who can help you organize your entire event, no matter how large or small. I will be happy to meet with you to discuss your special plans so contact me at 925.388.6509.
Playing the best music for your party. It’s all about music.
Copyright© All right reserved. 2014 Nick's Music By The Bay.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Autore: Cristian Consonni <[email protected]>
# Inspired by this gist by atomotic:
# https://gist.github.com/atomotic/7229203
#
# The code is released with an MIT license
# please see the LICENSE file for details.
import sys
import csv
import requests
import urlparse
import StringIO
import pickle
from produce_enwiki_titles import PICKLE_FILE
FIELDNAMES_WIKIMAP = ('LC_head', 'relation', 'enwiki')
FIELDNAMES_THES2LSCH = ('thes_id', 'relation', 'lc_head_id', 'wikidata')
LOCH_BASEURL = 'http://id.loc.gov/authorities/label/'
OUTFILE = 'thes2lcsh.map'
with open(PICKLE_FILE, 'r') as infile:
enwiki_titles = pickle.load(infile)
f = StringIO.StringIO(sys.argv[1])
csvin = csv.DictReader(
filter(lambda row: row[0]!='#', f),
FIELDNAMES_WIKIMAP,
delimiter='|'
)
wikimap = [line for line in csvin]
if len(wikimap) == 1:
line = wikimap[0]
print "Process line: ", line
elif len(wikimap) == 0:
print "Discard comments or empty lines", wikimap
exit(0)
else:
print "Error! Line too long: ", wikimap
exit(-1)
finalout = open(OUTFILE, 'a+')
writer = csv.DictWriter(finalout, FIELDNAMES_THES2LSCH)
enwiki = line['enwiki']
if enwiki in enwiki_titles:
resolv = enwiki_titles[enwiki]
req = requests.get(LOCH_BASEURL+line['LC_head'])
if req.ok:
urlpath = urlparse.urlparse(req.url).path.split('/')[-1]
lc_head_no = urlpath.replace('.html', '')
fields = (resolv['thes_id'].strip().strip('"'),
line['relation'],
lc_head_no,
resolv['wikidata']
)
diz =dict(zip(FIELDNAMES_THES2LSCH, fields))
print "Writing: ", diz
writer.writerow(diz)
else:
print "Error with request: ", line
finalout.close()
|
Fundraising Efforts throughout the year enhance your child's experience here at UDCNS!
Pretzels sales are two Fridays a month. The Pre- K classes take responsibility for these sales. The kids stand at the table in the entranceway. They collect the money and count the change with smiles on their faces! The money collected is used for extras throughout the year for the kids! Plus it's a learning experience for the Pre- k children.
Scholastic Books has given us an amazing opportunity to encourage our children to read. There are two ways UDCNS participates in this wonderful program. We host two book fairs throughout the year. Secondly, you will see packets on the table each month with the books available.
If you order using our number MG2LQ, UDCNS will receive credit! We get books for our school library and our teachers!
KID STUFF HAS NEW COUPONS THIS YEAR!! We are very excited to offer this book to our families!
UDCNS receives 50% of each book sold! It is such a simple way to earn money for our school.
Giant A Plus Rewards is another easy way to earn money for out school.
number 26532. Signing up is easy. The number becomes associated with your bonus card that you use to shop each time. A letter will be sent out with set-up instructions.
|
from __future__ import print_function
import os
import sys
import socket
import traceback
import struct
import cPickle
import base64
class SpecialLengths(object):
PYTHON_EXCEPTION_THROWN = -3
END_OF_STREAM = -4
NULL = -5
class DataType(object):
INT = 1
LONG = 2
FLOAT = 3
DOUBLE = 4
BLOB = 5
EXCEPTION = 6
class Serializer(object):
@staticmethod
def read_long(stream):
obj = stream.read(8)
if not obj:
raise EOFError
return struct.unpack("!q", obj)[0]
@staticmethod
def read_float(stream):
obj = stream.read(4)
if not obj:
raise EOFError
return struct.unpack("!f", obj)[0]
@staticmethod
def read_double(stream):
obj = stream.read(8)
if not obj:
raise EOFError
return struct.unpack("!d", obj)[0]
@staticmethod
def read_int(stream):
obj = stream.read(4)
if not obj:
raise EOFError
return struct.unpack("!i", obj)[0]
@staticmethod
def write_int(value, stream):
stream.write(struct.pack("!i", value))
@staticmethod
def write_float(value, stream):
stream.write(struct.pack("!f", value))
@staticmethod
def write_double(value, stream):
stream.write(struct.pack("!d", value))
@staticmethod
def write_long(value, stream):
stream.write(struct.pack("!q", value))
class PickleSerializer(Serializer):
@classmethod
def read_item(cls, stream, item_type, length):
obj = None
if item_type == DataType.INT:
obj = cls.read_int(stream)
elif item_type == DataType.LONG:
obj = cls.read_long(stream)
elif item_type == DataType.FLOAT:
obj = cls.read_float(stream)
elif item_type == DataType.DOUBLE:
obj = cls.read_double(stream)
elif item_type == DataType.BLOB:
obj = cls.loads(stream.read(length))
return obj
@classmethod
def read_tuple(cls, stream, tuplesize):
datalist = []
for _ in range(tuplesize):
# first element read type
element_type = cls.read_int(stream)
# Second read the length
length = cls.read_int(stream)
if length == SpecialLengths.NULL or length == 0:
datalist.append(0)
# length is > 0, read the item now
elif length > 0:
obj = cls.read_item(stream, element_type, length)
datalist.append(obj)
else:
raise ValueError("Invalid length for item.")
return datalist
@classmethod
def write_with_length(cls, obj, stream, output_type):
if output_type == DataType.INT:
cls.write_int(DataType.INT, stream)
cls.write_int(obj, stream)
elif output_type == DataType.LONG:
cls.write_int(DataType.LONG, stream)
cls.write_long(obj, stream)
elif output_type == DataType.FLOAT:
cls.write_int(DataType.FLOAT, stream)
cls.write_float(obj, stream)
elif output_type == DataType.DOUBLE:
cls.write_int(DataType.DOUBLE, stream)
cls.write_double(obj, stream)
elif output_type == DataType.BLOB:
cls.write_int(DataType.BLOB, stream)
cls.pickle_and_write(obj, stream)
elif output_type == DataType.EXCEPTION:
assert type(obj) is str
cls.write_int(len(obj), stream)
stream.write(obj)
@classmethod
def read_command(cls, stream):
length = cls.read_int(stream)
if length < 0:
raise ValueError("Command length cannot be less than zero.")
s = stream.read(length)
if len(s) < length:
raise EOFError
unenc = base64.urlsafe_b64decode(s)
return cls.loads(unenc)
@staticmethod
def dumps(obj):
protocol = 2
return cPickle.dumps(obj, protocol)
@staticmethod
def loads(obj):
return cPickle.loads(obj)
@classmethod
def pickle_and_write(cls, obj, stream):
serialized = cls.dumps(obj)
if serialized is None:
raise ValueError("Serialized value should not be None.")
elif len(serialized) > (1 << 31):
raise ValueError("Cannot serialize object larger than 2G.")
cls.write_int(len(serialized), stream)
stream.write(serialized)
def main(in_file, out_file):
pickle_serializer = PickleSerializer()
try:
func = pickle_serializer.read_command(in_file)
tuple_size = pickle_serializer.read_int(in_file)
output_type = pickle_serializer.read_int(in_file)
is_flatmap = pickle_serializer.read_int(in_file)
if tuple_size < 1:
raise ValueError("Size of tuple should not be less than 1.")
while True:
num_tuples = pickle_serializer.read_int(in_file)
if num_tuples == SpecialLengths.END_OF_STREAM:
break
tuple_list = []
for _ in range(num_tuples):
tuple_list.append(
pickle_serializer.read_tuple(in_file, tuple_size))
retval = func(tuple_list)
if is_flatmap:
count = len(retval)
pickle_serializer.write_int(count, out_file)
for i in range(count):
pickle_serializer.write_with_length(
retval[i], out_file, output_type)
else:
pickle_serializer.write_with_length(
retval, out_file, output_type)
out_file.flush()
except Exception:
try:
pickle_serializer.write_int(
SpecialLengths.PYTHON_EXCEPTION_THROWN, out_file)
pickle_serializer.write_with_length(traceback.format_exc().encode("utf-8"),
out_file, DataType.EXCEPTION)
print(traceback.format_exc(), file=sys.stderr)
except IOError:
# JVM closed the socket
print("IOError:\n{}".
format(traceback.format_exc()), file=sys.stderr)
except Exception:
print("Python worker process failed with exception:\n{}".
format(traceback.format_exc()), file=sys.stderr)
sys.exit(-1)
if __name__ == '__main__':
# Read a local port to connect to from stdin
port_number = int(sys.stdin.readline())
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("127.0.0.1", port_number))
with os.fdopen(os.dup(sock.fileno()), "rb", 65536) as infile,\
os.fdopen(os.dup(sock.fileno()), "wb", 65536) as outfile:
main(infile, outfile)
|
There may be times when your garage door at home becomes unsafe to use, unresponsive or unsightly due to damage. It face many challenges, from the hardships of extreme weather conditions and simple wear and tear, to the ever-present possibility of accidents. When damage occurs, all is not lost. Our team of professional garage door repair technicians is here to assist you. Contact us today to find out more about our services.
Door panels may receive damage due to a number of reasons, which can leave them looking unsightly while also lessening the effectiveness of your door’s security. Garage door panels can be replaced easily with the help of our technicians who are experienced in all areas of repairs on automated as well as manual garage doors.
Many of the components that are associated with the functionality and safety of your doors will be damaged with long-term use. Parts may become worn down, warped through pressure or worse, weakened substantially from rust or galvanic corrosion. When these parts fail, you must replace them as soon as possible. Contact us for details on our replacement and garage door adjustment services.
Whether you need your safety utilities checked, need a garage door spring replaced or need to have your opener and its components replaced, our team has extensive experience and training in all areas of repairs, maintenance and installations for garage doors and their various parts. We also work with products from industry leading manufacturers, which means that we can source the perfect parts for your needs, and will replace them with professional care.
If damage occurs to your garage door, it is often a priority to have it restored as soon as possible. If you are looking to have your door repaired by a trusted technician, or would like to learn more about our various garage door repair services, feel free to get into contact with a representative from Garage Door repair Newcastle today for further information.
|
#wag.v.01
#Body_movement
#This frame contains words for motions or actions an Agent p
#erforms using some part of his/her body. A number of word
#s in this frame occur as blends with Communication, in whic
#h the action has an Addressee. For example, 'Pat nodded
#at Kim.' These examples differ from Communication.Gesture
#in that no specific message need be expressed, as in 'She
#nodded to him to sit down.' Since this frame involves a pa
#rticular type of motion, it contains the frame elements Sou
#rce, Path, Goal and Area, which originate in the motion fra
#me. All of these frame elements are generally expressed in
# PP Complements. 'The boy swung his legs from under the ta
#ble.'
def applicability_condition(self,agent,Addressee=-1,Place=-1):
if not checkCapability(agent,self.id):
return FAILURE
return SUCCESS
def preparatory_spec(self,agent,Addressee=-1,Place=-1):
return SUCCESS
def execution_steps(self,agent,Addressee=-1,Place=-1):
return {'PRIMITIVE':('jiggle',{'agents':agent,'objects':(Addressee,Place)})}
def culmination_condition(self,agent,Addressee=-1,Place=-1):
if finishedAction(self.id):
return SUCCESS
return INCOMPLETE
|
President Donald Trump on Tuesday signed a bill that finalizes the seven-state drought contingency plan.
Legislation put forth by a powerful Tucson Democratic congressman that would make the Secretary of the Interior carry out the agreements made by the seven Colorado River states is expected to be voted on this afternoon.
A bill that could have derailed a seven-state Drought Contingency Plan that lawmakers passed last month was put on ice at the request of its sponsor, House Speaker Rusty Bowers, after a lengthy and contentious committee hearing.
Nearly three-in-four Arizona voters believe climate change is a serious problem, including a strong majority of Republicans, according to a new statewide poll of environmental issues.
Clayton Honyumptewa says the Powamuya ceremony will be observed this weekend as usual on parts of the Hopi reservation, but the planting that traditionally follows the ceremony might not come until May.
Gosar called the Green New Deal plan a “socialist fairy tale” at a U.S. House hearing on climate change Tuesday morning.
Rep. Raúl Grijalva is on board with young House Democrats pushing for a “Green New Deal” to combat climate change.
|
from test import *
import matplotlib.pyplot as plt
resultsTable = [] #Table of number of nodes and packet transmitted(%)
#Generate a frequency table of (number of node in simulation, percentage of packets transmitted)
with sqlite3.connect("sample_db/grid.db") as conn:
cursor = conn.cursor()
exprRootName="grid_topo_node_"
for i in range(2, 9):
numNodes = i*i
amount_pkt = "select count(*) from readings where\
readings.experiment_id = '{exprName}{exprNo}'".format(exprName=exprRootName, exprNo=numNodes)
cursor.execute(amount_pkt)
noPkts = cursor.fetchall()[0][0];
maxExpect_pkts = "select experiments.expected_no_transmission_per_node \
* (experiments.no_nodes - 1) from experiments\
where experiments.experiment_id = '{exprName}{exprNo}'".format(exprName=exprRootName, exprNo=numNodes)
cursor.execute(maxExpect_pkts)
maxPkts = cursor.fetchall()[0][0];
resultsTable.append((numNodes, float(noPkts)/float(maxPkts)*100))
#Create plot
plt.plot(*zip(*resultsTable))
plt.title("Comparing WSN network size with %success rate\n of pkt transmission for grid topology")
plt.xlabel('Number of node in simulation')
plt.ylabel('% of Pkts successfully transmitted')
plt.grid(True)
plt.xlim(0, 80)
plt.xticks([x*x for x in range(2, 10)])
plt.show()
|
If you are here, I am deeply sorry for your loss. Whether you had to make the decision to release a pregnancy or a release occurred naturally, I am here to help guide you through the process. This is a delicate time. One that requires rest, love, gentleness and care. If you are currently experiencing or have experienced a pregnancy loss in the past there are ways to help your body, mind, and spirit recover. There is a natural grieving that takes place on all levels of your being.
Postpartum : This stage begins immediately after pregnancy release whether you had a baby or not, this stage lasts about 6 weeks. Your body is going through many changes as you transition back to a non-pregnant state. Your hormone levels are dropping, your uterus is shrinking, your breasts may be still swollen and tender, you may still be experiencing morning sickness.
This can also be a scary and unpredictable time. There is peace that comes with knowing your options and dealing with any circumstances that may arise.
The key to recovery is giving yourself the nourishment you need in all the ways. Womb stewardship the art of learning how to care deeply for ourselves and honour the seasons of our lives that are joyful as well as painful. The process of welcoming healing is messy, beautiful, life changing and can result in loving yourself exactly where you are.
It is my honour to hold space for your process.
This is but a chapter of your life. ever changing always evolving.
These practices can be called upon anytime there is death in your life. If you have ever experienced pregnancy loss and feel there are lingering parts of yourself that need closure & healing there are ways to bring yourself back to balance, joy and vitality. Time in not linear, healing takes place when you are ready.
|
# This file vendored from Autorelease
import os
import subprocess
try:
from configparser import ConfigParser, NoSectionError, NoOptionError
except ImportError:
# py2
from ConfigParser import ConfigParser, NoSectionError, NoOptionError
try:
from ._installed_version import _installed_version
from ._installed_version import _installed_git_hash
from ._installed_version import _version_setup_depth
except ImportError:
_installed_version = "Unknown"
_installed_git_hash = "Unknown"
_version_setup_depth = -1
def get_git_version():
"""
Return the git hash as a string.
Apparently someone got this from numpy's setup.py. It has since been
modified a few times.
"""
# Return the git revision as a string
# copied from numpy setup.py
def _minimal_ext_cmd(cmd):
# construct minimal environment
env = {}
for k in ['SYSTEMROOT', 'PATH']:
v = os.environ.get(k)
if v is not None:
env[k] = v
# LANGUAGE is used on win32
env['LANGUAGE'] = 'C'
env['LANG'] = 'C'
env['LC_ALL'] = 'C'
with open(os.devnull, 'w') as err_out:
out = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=err_out, # maybe debug later?
env=env).communicate()[0]
return out
try:
git_dir = os.path.dirname(os.path.realpath(__file__))
out = _minimal_ext_cmd(['git', '-C', git_dir, 'rev-parse', 'HEAD'])
GIT_REVISION = out.strip().decode('ascii')
except OSError:
GIT_REVISION = 'Unknown'
return GIT_REVISION
def _seek_parent_dirs_for_file(filename):
rel_directory = None
my_dir = os.path.dirname(os.path.abspath(__file__))
rel_directory_arr = []
while not rel_directory:
expected_dir = os.path.join(*rel_directory_arr) \
if rel_directory_arr else '.'
expected = os.path.join(expected_dir, filename)
if os.path.isfile(os.path.normpath(expected)):
rel_directory = expected_dir
else:
rel_directory_arr.append('..')
if len(rel_directory_arr) > len(my_dir.split(os.sep)):
rel_directory_arr = []
break
return rel_directory
def _find_rel_path_for_file(depth, filename):
rel_directory = None
if depth == 0:
rel_directory = '.'
elif depth >= 1:
rel_directory = os.sep.join(['..'] * depth)
else:
rel_directory = _seek_parent_dirs_for_file(filename)
if rel_directory:
return os.path.normpath(os.path.join(rel_directory, filename))
else:
return None
def get_setup_cfg(directory, filename="setup.cfg"):
"""Load the setup.cfg as a dict-of-dict.
Parameters
----------
directory : str
directory for setup.cfg, relative to cwd; default '.'
filename : str
filename for setup.cfg; default 'setup.cfg'
"""
if isinstance(directory, int):
rel_path = _find_rel_path_for_file(directory, filename)
start_dir = os.path.abspath(os.path.dirname(__file__))
setup_cfg = os.path.normpath(os.path.join(start_dir, rel_path))
else:
setup_cfg = os.path.join(directory, filename)
conf = None
if os.path.exists(setup_cfg):
conf = ConfigParser()
conf.read(setup_cfg)
return conf
def get_setup_version(default_version, directory, filename="setup.cfg"):
version = default_version
conf = get_setup_cfg(directory, filename)
try:
version = conf.get('metadata', 'version')
except (NoSectionError, NoOptionError):
pass # version (or metadata) not defined in setup.cfg
except AttributeError:
pass # no setup.cfg found (conf is None)
return version
short_version = get_setup_version(_installed_version,
directory=_version_setup_depth)
_git_version = get_git_version()
_is_repo = (_git_version != '' and _git_version != "Unknown")
if _is_repo:
git_hash = _git_version
full_version = short_version + "+g" + _git_version[:7]
version = full_version
else:
git_hash = "Unknown"
full_version = short_version + "+g" + _installed_git_hash[:7] + '.install'
version = short_version
|
Leatha was born on January 7, 1939 and passed away on Thursday, January 24, 2019.
Leatha was a resident of Portland, Indiana at the time of passing.
Survivors include: 1 daughter: Karen Smith - Portland, Indiana 2 brothers: Samuel Anderson - Enfield, Connecticut Donnie Anderson - Ansonia, Ohio A memorial service will be held at a later date at Williamson-Spencer and Penrod Funeral Home in Portland.
The beautiful and interactive Eternal Tribute tells Leatha's life story the way it deserves to be told in words, pictures and video.
|
import numpy as np
from numpy.linalg import norm
from sim_problem import SimProblem, PDController, STR
class GPBow(SimProblem):
def __init__(self):
super(GPBow, self).__init__('urdf/BioloidGP/BioloidGP.URDF')
self.__init__simulation__()
desc = []
desc.append([('l_thigh', 1.0), ('r_thigh', 1.0), ])
desc.append([('l_shin', 1.0), ('r_shin', 1.0), ])
desc.append([('l_heel', 1.0), ('r_heel', 1.0), ])
self.desc = desc
self.dim = len(self.desc)
self.eval_counter = 0 # Well, increasing when simulated
self.params = None
def __init__simulation__(self):
self.init_state = self.skel().x
self.init_state[0] = -0.50 * 3.14
self.init_state[4] = 0.230
self.init_state[5] = 0.230
self.reset()
self.controller = PDController(self.skel(), 60, 3.0, 0.3)
self.controller.target = self.skel().q
def simulate(self, sample):
self.eval_counter += 1
self.reset()
self.set_params(sample)
while not self.terminated():
self.step()
# print 'result:', self.params, self.collect_result()
return self.collect_result()
def evaluate(self, result, task):
# Calculate the validity of COM
C = result['C']
lo = np.array([0.0, 0.10, 0.0])
hi = np.array([0.0, 0.15, 0.0])
w = task
C_hat = lo * (1 - w) + hi * w
weight = np.array([1.0, 1.0, 1.0]) * 2.0
obj = norm((C - C_hat) * weight) ** 2
# Calculate parameter penalty
params = result['params']
penalty = 0.0
if params is not None:
for i in range(self.dim):
v = params[i]
penalty += max(0.0, v - 1.0) ** 2
penalty += min(0.0, v - (-1.0)) ** 2
return obj + penalty
def set_random_params(self):
# self.set_params(0.45 + 0.1 * np.random.rand(self.dim))
# self.set_params(2.0 * (np.random.rand(self.dim) - 0.5))
# self.set_params([0.5, -1.0, 0.7])
self.set_params([0.5, -0.5, 0.1])
def set_params(self, x):
self.params = x
ndofs = self.skel().ndofs
q = np.array(self.init_state[:ndofs])
lo = np.array([-2.0] * ndofs)
hi = -lo
for i, dofs in enumerate(self.desc):
v = (x[i] - (-1.0)) / 2.0 # Change to 0 - 1 scale
for (d, w) in dofs:
index = d if isinstance(d, int) else self.skel().dof_index(d)
vv = v if w > 0.0 else 1.0 - v
q[index] = lo[index] + (hi[index] - lo[index]) * vv
self.controller.target = q
def collect_result(self):
res = {}
res['C'] = self.skel().C
res['params'] = self.params
return res
def terminated(self):
return (self.world.t > 0.5)
def __str__(self):
res = self.collect_result()
status = ""
status += '[GPBow at %.4f' % self.world.t
# if self.params is not None:
# status += ' params = %s ' % self.params
for key, value in self.collect_result().iteritems():
if key == 'C':
status += ' %s : %s' % (key, STR(value, 3))
elif key == 'params':
status += ' %s : %s' % (key, STR(value, 4))
else:
status += ' %s : %s' % (key, value)
status += ' value = {'
tasks = np.linspace(0.0, 1.0, 6)
values = [self.evaluate(res, t) for t in tasks]
status += ' '.join(['%.4f' % v for v in values])
status += '}]'
return status
def __repr__(self):
return 'problems.GPBow()'
|
In their presentation posted at InfoQ systems and data architects Ben Stopford, Farzad Pezeshkpour and Mark Atwell show how RBS leveraged new technologies in their architectures while facing difficult challenges such as regulation, competition and tighter budgets. They also need to cope with stringent technical challenges including efficiency and scalability.
In high frequency trading an unavailable system causes significant costs, even if the unavailability only lasts for a second. As Stopford explains, a full garbage collection might lead to such a break which is why full GC needs to be avoided under all circumstances. More technical details about pauses in the CMS (Concurrent Mark Sweep) garbage collection being used can be found in Alexey Ragozin's blog.
Farzad Pezeshkpour talked about handling business risk in the financial domain. He talked about using multi-step Monte Carlo simulations, which require large amounts of storage and computations, to handle business risk - all those interested in the mathematical foundation may read an article on how to apply such simulations to credit risk.
But how can IT handle the issue of storage volumes and efficient computations? A standard Grid architecture with application servers connected to directors and brokers which themselves are connected to the grid computers was the base for further investigation. Such an environment must deal with terabytes of storage and requirements such as speed, robustness, scalability, interoperability with Linux and Windows servers, as well as the need for an adequate support infrastructure. Among the possible options are databases, storage appliances, and distributed filesystems. The software engineers decided for using the hadoop HDFS distributed file system. In their architecture approach they collocated grid engines and HDFS nodes using 10 GB Ethernet for inter-node communication, after experimenting with various architecture alternatives.
As Pezeshkpour mentioned, there'll be further optimizations in the future with the innovations addressing all aspects, "methodologies, architecture & engineering."
The copying of data lies at the root of many of the bank’s problems. By supplying a single real-time view that all systems can interface with we remove the need for reconciliation and promote the concept of truly shared services.
Two factors make this difficult to solve: Throughput and low latency. The base element of RBS's ODC approach is the Java-based in-memory data grid solution Oracle Coherence database. RBS needed to add joins to recompose different slices of data and handle object versioning. Stopford and his team introduced various patterns to add these joins while avoiding performance and memory penalties.
They implemented replication to increase performance while at the same time applying the Connected Replication Pattern that only replicates used data. This strategy reduces replication necessities to one-tenth and depends on tracking data usage. ODC consists of two tiers, an in-memory database tier on top and a sharded in-memory database below. Both tiers together enable RBS to meet their desired quality attributes.
Mark Atwell presented an approach for data virtualization that prevents the YAS (Yet Another System) syndrome by introducing a strategic DAL (Data Access Layer) instead. The RBS architects created a uniform layer that offers a unified database view. On top of this layer virtual transformations take place that adapt the data in accordance with what the accessing applications expect. As Atwell explains, the approach reveals benefits such as improvements in distributed joins & query optimizations. But there are also some liabilities such as "Pesky physics still get in the way!" but also the necessity to create adapters for some "unusual" data sources.
As the RBS show cases illustrate, software engineering for large systems is not only about technology but also about how to leverage the right technologies efficiently and effectively within an appropriate architecture.
I'm wondering how an Erlang based system might have done here. Since GC happens per process in Erlang, is it possible that it can also be used to solve this problem?
Sure, it's a decent idea. Certainly a large number of firms stick with C/C++ for this very reason. However when the rest of the stack is Java the one language thing has many benefits and the GC problems are obviously solvable.
It'd be interesting to know if dispatching work to multiple consumers (or conflating to one) done in the Manhattan processor would be easier to solve in Erlang. I don't know enough to answer that one I'm afraid.
|
# coding=utf-8
from datetime import datetime
from itertools import chain
from wtforms import Form, DateTimeField, FieldList, FormField, HiddenField, TextField, validators
class MonthField(DateTimeField):
def __init__(self, label=None, validators=None, format='%Y-%m', **kwargs):
super(MonthField, self).__init__(label, validators, format, **kwargs)
def process_formdata(self, valueslist):
if not valueslist:
return
date_str = u' '.join(valueslist).strip().lower()
dt = None
formats = (self.format, '%Y/%m', '%m-%Y', '%m/%Y', '%b %Y', '%Y %b', '%B %Y', '%Y %B')
formats = chain(formats, (format.replace('%Y', '%y') for format in formats))
for format in formats:
try:
dt = datetime.strptime(date_str, format)
except ValueError:
pass
else:
break
if dt is None:
self.data = None
raise ValueError(u"Field should be in YYYY-MM format (such as “2012-01”).")
self.data = dt.date()
class WikiForm(Form):
reason = TextField(u'Notes (optional)', [validators.Length(max=140), validators.Optional()])
class MakerForm(WikiForm):
name = TextField(u'Name', [validators.Required(), validators.Length(max=100)])
avatar_url = TextField(u'Avatar URL', [validators.URL(require_tld=True), validators.Optional()],
description=u'Avatar images should display at 150×150 and 75×75 pixel sizes.')
html_url = TextField(u'Web URL', [validators.URL(require_tld=True), validators.Required()],
description=u"Web URLs should be the address of the person's main personal web site.")
class ParticipationForm(WikiForm):
role = TextField(u'Role', [validators.Required(), validators.Length(max=140)])
start_date = MonthField(u'Start month')
end_date = MonthField(u'End month', [validators.Optional()],
description=u'Enter months like “2012-01”. Leave the end month blank for current ongoing projects.')
class ProjectForm(WikiForm):
name = TextField(u'Name', [validators.Length(min=1, max=50), validators.Required()])
html_url = TextField(u'Web URL', [validators.URL(require_tld=True), validators.Required()],
description=u'Web URLs should be the address of a hosted web app or the official web site for a project of some other kind.')
description = TextField(u'Description', [validators.Length(max=140)])
avatar_url = TextField(u'Avatar URL', [validators.URL(require_tld=True), validators.Optional()],
description=u'Avatar images should display at 150×150 and 75×75 pixel sizes.')
class ProjectAddParticipationForm(ParticipationForm):
maker = TextField(u'Maker ID', [validators.Required()])
|
The various type of benefits offered to the employee under employee welfare scheme are within the industrial establishment,these are known as intra-mural and the benefits outside the industrial establishment are known as extra mural. These welfare services include canteen,rest and recreational facilities, sanitary and medical facilities.
Social security is related to the high ideals of the social security and human dignity. According to the ILO, Social security is the protection provided by the society to its members through a series of public measures against the social and economic distress that otherwise would be caused by the stoppage or substantial reduction of earnings resulting from sickness,maternity,employment injury,old age and health.
The continuous and minimum wage or salary gives a sense of security to the employees. The payment of wages act,1936, The minimum wages Act,1948, the payment of bonus Act 1965, provide income security to the employees.
It includes the provisions for the payment to the workers for the days not worked.
The compensation benefits are paid to the employees either according to workmen compensation act,1948 or under contractual obligation. According to the act, the compensation is paid to the employees in case of injuries,accidents or otherwise to the worker during the course of his job performance. Under contractual obligation,an employer is liable to pay equivalent to three months wages or salaries.
Most of the large organizations provide health services over and above the legal requirements to their employees free of cost by setting up clinics,hospitals,dispensaries etc.
This entry was posted in Human Resource Management and tagged Employee Welfare, Fringe Benefits, Health benefits, Retirement benefits, Types of Fringe Benefits.
|
"""The tests for the androidtv platform."""
import base64
import copy
import logging
from unittest.mock import patch
from androidtv.constants import APPS as ANDROIDTV_APPS
from androidtv.exceptions import LockNotAcquiredException
import pytest
from homeassistant.components.androidtv.media_player import (
ANDROIDTV_DOMAIN,
ATTR_COMMAND,
ATTR_DEVICE_PATH,
ATTR_LOCAL_PATH,
CONF_ADB_SERVER_IP,
CONF_ADBKEY,
CONF_APPS,
CONF_EXCLUDE_UNNAMED_APPS,
CONF_TURN_OFF_COMMAND,
CONF_TURN_ON_COMMAND,
KEYS,
SERVICE_ADB_COMMAND,
SERVICE_DOWNLOAD,
SERVICE_LEARN_SENDEVENT,
SERVICE_UPLOAD,
)
from homeassistant.components.media_player import (
ATTR_INPUT_SOURCE,
ATTR_MEDIA_VOLUME_LEVEL,
ATTR_MEDIA_VOLUME_MUTED,
DOMAIN,
SERVICE_MEDIA_NEXT_TRACK,
SERVICE_MEDIA_PAUSE,
SERVICE_MEDIA_PLAY,
SERVICE_MEDIA_PLAY_PAUSE,
SERVICE_MEDIA_PREVIOUS_TRACK,
SERVICE_MEDIA_STOP,
SERVICE_SELECT_SOURCE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
SERVICE_VOLUME_DOWN,
SERVICE_VOLUME_MUTE,
SERVICE_VOLUME_SET,
SERVICE_VOLUME_UP,
)
from homeassistant.components.websocket_api.const import TYPE_RESULT
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_DEVICE_CLASS,
CONF_HOST,
CONF_NAME,
CONF_PLATFORM,
EVENT_HOMEASSISTANT_STOP,
STATE_OFF,
STATE_PLAYING,
STATE_STANDBY,
STATE_UNAVAILABLE,
)
from homeassistant.setup import async_setup_component
from tests.components.androidtv import patchers
SHELL_RESPONSE_OFF = ""
SHELL_RESPONSE_STANDBY = "1"
# Android TV device with Python ADB implementation
CONFIG_ANDROIDTV_PYTHON_ADB = {
DOMAIN: {
CONF_PLATFORM: ANDROIDTV_DOMAIN,
CONF_HOST: "127.0.0.1",
CONF_NAME: "Android TV",
CONF_DEVICE_CLASS: "androidtv",
}
}
# Android TV device with ADB server
CONFIG_ANDROIDTV_ADB_SERVER = {
DOMAIN: {
CONF_PLATFORM: ANDROIDTV_DOMAIN,
CONF_HOST: "127.0.0.1",
CONF_NAME: "Android TV",
CONF_DEVICE_CLASS: "androidtv",
CONF_ADB_SERVER_IP: "127.0.0.1",
}
}
# Fire TV device with Python ADB implementation
CONFIG_FIRETV_PYTHON_ADB = {
DOMAIN: {
CONF_PLATFORM: ANDROIDTV_DOMAIN,
CONF_HOST: "127.0.0.1",
CONF_NAME: "Fire TV",
CONF_DEVICE_CLASS: "firetv",
}
}
# Fire TV device with ADB server
CONFIG_FIRETV_ADB_SERVER = {
DOMAIN: {
CONF_PLATFORM: ANDROIDTV_DOMAIN,
CONF_HOST: "127.0.0.1",
CONF_NAME: "Fire TV",
CONF_DEVICE_CLASS: "firetv",
CONF_ADB_SERVER_IP: "127.0.0.1",
}
}
def _setup(config):
"""Perform common setup tasks for the tests."""
if CONF_ADB_SERVER_IP not in config[DOMAIN]:
patch_key = "python"
else:
patch_key = "server"
if config[DOMAIN].get(CONF_DEVICE_CLASS) != "firetv":
entity_id = "media_player.android_tv"
else:
entity_id = "media_player.fire_tv"
return patch_key, entity_id
async def _test_reconnect(hass, caplog, config):
"""Test that the error and reconnection attempts are logged correctly.
"Handles device/service unavailable. Log a warning once when
unavailable, log once when reconnected."
https://developers.home-assistant.io/docs/en/integration_quality_scale_index.html
"""
patch_key, entity_id = _setup(config)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[
patch_key
], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER:
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
caplog.clear()
caplog.set_level(logging.WARNING)
with patchers.patch_connect(False)[patch_key], patchers.patch_shell(error=True)[
patch_key
], patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER:
for _ in range(5):
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_UNAVAILABLE
assert len(caplog.record_tuples) == 2
assert caplog.record_tuples[0][1] == logging.ERROR
assert caplog.record_tuples[1][1] == logging.WARNING
caplog.set_level(logging.DEBUG)
with patchers.patch_connect(True)[patch_key], patchers.patch_shell(
SHELL_RESPONSE_STANDBY
)[patch_key], patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER:
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_STANDBY
if patch_key == "python":
assert (
"ADB connection to 127.0.0.1:5555 successfully established"
in caplog.record_tuples[2]
)
else:
assert (
"ADB connection to 127.0.0.1:5555 via ADB server 127.0.0.1:5037 successfully established"
in caplog.record_tuples[2]
)
return True
async def _test_adb_shell_returns_none(hass, config):
"""Test the case that the ADB shell command returns `None`.
The state should be `None` and the device should be unavailable.
"""
patch_key, entity_id = _setup(config)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[
patch_key
], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER:
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state != STATE_UNAVAILABLE
with patchers.patch_shell(None)[patch_key], patchers.patch_shell(error=True)[
patch_key
], patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER:
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_UNAVAILABLE
return True
async def test_reconnect_androidtv_python_adb(hass, caplog):
"""Test that the error and reconnection attempts are logged correctly.
* Device type: Android TV
* ADB connection method: Python ADB implementation
"""
assert await _test_reconnect(hass, caplog, CONFIG_ANDROIDTV_PYTHON_ADB)
async def test_adb_shell_returns_none_androidtv_python_adb(hass):
"""Test the case that the ADB shell command returns `None`.
* Device type: Android TV
* ADB connection method: Python ADB implementation
"""
assert await _test_adb_shell_returns_none(hass, CONFIG_ANDROIDTV_PYTHON_ADB)
async def test_reconnect_firetv_python_adb(hass, caplog):
"""Test that the error and reconnection attempts are logged correctly.
* Device type: Fire TV
* ADB connection method: Python ADB implementation
"""
assert await _test_reconnect(hass, caplog, CONFIG_FIRETV_PYTHON_ADB)
async def test_adb_shell_returns_none_firetv_python_adb(hass):
"""Test the case that the ADB shell command returns `None`.
* Device type: Fire TV
* ADB connection method: Python ADB implementation
"""
assert await _test_adb_shell_returns_none(hass, CONFIG_FIRETV_PYTHON_ADB)
async def test_reconnect_androidtv_adb_server(hass, caplog):
"""Test that the error and reconnection attempts are logged correctly.
* Device type: Android TV
* ADB connection method: ADB server
"""
assert await _test_reconnect(hass, caplog, CONFIG_ANDROIDTV_ADB_SERVER)
async def test_adb_shell_returns_none_androidtv_adb_server(hass):
"""Test the case that the ADB shell command returns `None`.
* Device type: Android TV
* ADB connection method: ADB server
"""
assert await _test_adb_shell_returns_none(hass, CONFIG_ANDROIDTV_ADB_SERVER)
async def test_reconnect_firetv_adb_server(hass, caplog):
"""Test that the error and reconnection attempts are logged correctly.
* Device type: Fire TV
* ADB connection method: ADB server
"""
assert await _test_reconnect(hass, caplog, CONFIG_FIRETV_ADB_SERVER)
async def test_adb_shell_returns_none_firetv_adb_server(hass):
"""Test the case that the ADB shell command returns `None`.
* Device type: Fire TV
* ADB connection method: ADB server
"""
assert await _test_adb_shell_returns_none(hass, CONFIG_FIRETV_ADB_SERVER)
async def test_setup_with_adbkey(hass):
"""Test that setup succeeds when using an ADB key."""
config = copy.deepcopy(CONFIG_ANDROIDTV_PYTHON_ADB)
config[DOMAIN][CONF_ADBKEY] = hass.config.path("user_provided_adbkey")
patch_key, entity_id = _setup(config)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[
patch_key
], patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER, patchers.PATCH_ISFILE, patchers.PATCH_ACCESS:
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
async def _test_sources(hass, config0):
"""Test that sources (i.e., apps) are handled correctly for Android TV and Fire TV devices."""
config = copy.deepcopy(config0)
config[DOMAIN][CONF_APPS] = {
"com.app.test1": "TEST 1",
"com.app.test3": None,
"com.app.test4": SHELL_RESPONSE_OFF,
}
patch_key, entity_id = _setup(config)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
if config[DOMAIN].get(CONF_DEVICE_CLASS) != "firetv":
patch_update = patchers.patch_androidtv_update(
"playing",
"com.app.test1",
["com.app.test1", "com.app.test2", "com.app.test3", "com.app.test4"],
"hdmi",
False,
1,
"HW5",
)
else:
patch_update = patchers.patch_firetv_update(
"playing",
"com.app.test1",
["com.app.test1", "com.app.test2", "com.app.test3", "com.app.test4"],
"HW5",
)
with patch_update:
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_PLAYING
assert state.attributes["source"] == "TEST 1"
assert sorted(state.attributes["source_list"]) == ["TEST 1", "com.app.test2"]
if config[DOMAIN].get(CONF_DEVICE_CLASS) != "firetv":
patch_update = patchers.patch_androidtv_update(
"playing",
"com.app.test2",
["com.app.test2", "com.app.test1", "com.app.test3", "com.app.test4"],
"hdmi",
True,
0,
"HW5",
)
else:
patch_update = patchers.patch_firetv_update(
"playing",
"com.app.test2",
["com.app.test2", "com.app.test1", "com.app.test3", "com.app.test4"],
"HW5",
)
with patch_update:
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_PLAYING
assert state.attributes["source"] == "com.app.test2"
assert sorted(state.attributes["source_list"]) == ["TEST 1", "com.app.test2"]
return True
async def test_androidtv_sources(hass):
"""Test that sources (i.e., apps) are handled correctly for Android TV devices."""
assert await _test_sources(hass, CONFIG_ANDROIDTV_ADB_SERVER)
async def test_firetv_sources(hass):
"""Test that sources (i.e., apps) are handled correctly for Fire TV devices."""
assert await _test_sources(hass, CONFIG_FIRETV_ADB_SERVER)
async def _test_exclude_sources(hass, config0, expected_sources):
"""Test that sources (i.e., apps) are handled correctly when the `exclude_unnamed_apps` config parameter is provided."""
config = copy.deepcopy(config0)
config[DOMAIN][CONF_APPS] = {
"com.app.test1": "TEST 1",
"com.app.test3": None,
"com.app.test4": SHELL_RESPONSE_OFF,
}
patch_key, entity_id = _setup(config)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
if config[DOMAIN].get(CONF_DEVICE_CLASS) != "firetv":
patch_update = patchers.patch_androidtv_update(
"playing",
"com.app.test1",
[
"com.app.test1",
"com.app.test2",
"com.app.test3",
"com.app.test4",
"com.app.test5",
],
"hdmi",
False,
1,
"HW5",
)
else:
patch_update = patchers.patch_firetv_update(
"playing",
"com.app.test1",
[
"com.app.test1",
"com.app.test2",
"com.app.test3",
"com.app.test4",
"com.app.test5",
],
"HW5",
)
with patch_update:
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_PLAYING
assert state.attributes["source"] == "TEST 1"
assert sorted(state.attributes["source_list"]) == expected_sources
return True
async def test_androidtv_exclude_sources(hass):
"""Test that sources (i.e., apps) are handled correctly for Android TV devices when the `exclude_unnamed_apps` config parameter is provided as true."""
config = copy.deepcopy(CONFIG_ANDROIDTV_ADB_SERVER)
config[DOMAIN][CONF_EXCLUDE_UNNAMED_APPS] = True
assert await _test_exclude_sources(hass, config, ["TEST 1"])
async def test_firetv_exclude_sources(hass):
"""Test that sources (i.e., apps) are handled correctly for Fire TV devices when the `exclude_unnamed_apps` config parameter is provided as true."""
config = copy.deepcopy(CONFIG_FIRETV_ADB_SERVER)
config[DOMAIN][CONF_EXCLUDE_UNNAMED_APPS] = True
assert await _test_exclude_sources(hass, config, ["TEST 1"])
async def _test_select_source(hass, config0, source, expected_arg, method_patch):
"""Test that the methods for launching and stopping apps are called correctly when selecting a source."""
config = copy.deepcopy(config0)
config[DOMAIN][CONF_APPS] = {
"com.app.test1": "TEST 1",
"com.app.test3": None,
"com.youtube.test": "YouTube",
}
patch_key, entity_id = _setup(config)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
with method_patch as method_patch_:
await hass.services.async_call(
DOMAIN,
SERVICE_SELECT_SOURCE,
{ATTR_ENTITY_ID: entity_id, ATTR_INPUT_SOURCE: source},
blocking=True,
)
method_patch_.assert_called_with(expected_arg)
return True
async def test_androidtv_select_source_launch_app_id(hass):
"""Test that an app can be launched using its app ID."""
assert await _test_select_source(
hass,
CONFIG_ANDROIDTV_ADB_SERVER,
"com.app.test1",
"com.app.test1",
patchers.PATCH_LAUNCH_APP,
)
async def test_androidtv_select_source_launch_app_name(hass):
"""Test that an app can be launched using its friendly name."""
assert await _test_select_source(
hass,
CONFIG_ANDROIDTV_ADB_SERVER,
"TEST 1",
"com.app.test1",
patchers.PATCH_LAUNCH_APP,
)
async def test_androidtv_select_source_launch_app_id_no_name(hass):
"""Test that an app can be launched using its app ID when it has no friendly name."""
assert await _test_select_source(
hass,
CONFIG_ANDROIDTV_ADB_SERVER,
"com.app.test2",
"com.app.test2",
patchers.PATCH_LAUNCH_APP,
)
async def test_androidtv_select_source_launch_app_hidden(hass):
"""Test that an app can be launched using its app ID when it is hidden from the sources list."""
assert await _test_select_source(
hass,
CONFIG_ANDROIDTV_ADB_SERVER,
"com.app.test3",
"com.app.test3",
patchers.PATCH_LAUNCH_APP,
)
async def test_androidtv_select_source_overridden_app_name(hass):
"""Test that when an app name is overridden via the `apps` configuration parameter, the app is launched correctly."""
# Evidence that the default YouTube app ID will be overridden
assert "YouTube" in ANDROIDTV_APPS.values()
assert "com.youtube.test" not in ANDROIDTV_APPS
assert await _test_select_source(
hass,
CONFIG_ANDROIDTV_ADB_SERVER,
"YouTube",
"com.youtube.test",
patchers.PATCH_LAUNCH_APP,
)
async def test_androidtv_select_source_stop_app_id(hass):
"""Test that an app can be stopped using its app ID."""
assert await _test_select_source(
hass,
CONFIG_ANDROIDTV_ADB_SERVER,
"!com.app.test1",
"com.app.test1",
patchers.PATCH_STOP_APP,
)
async def test_androidtv_select_source_stop_app_name(hass):
"""Test that an app can be stopped using its friendly name."""
assert await _test_select_source(
hass,
CONFIG_ANDROIDTV_ADB_SERVER,
"!TEST 1",
"com.app.test1",
patchers.PATCH_STOP_APP,
)
async def test_androidtv_select_source_stop_app_id_no_name(hass):
"""Test that an app can be stopped using its app ID when it has no friendly name."""
assert await _test_select_source(
hass,
CONFIG_ANDROIDTV_ADB_SERVER,
"!com.app.test2",
"com.app.test2",
patchers.PATCH_STOP_APP,
)
async def test_androidtv_select_source_stop_app_hidden(hass):
"""Test that an app can be stopped using its app ID when it is hidden from the sources list."""
assert await _test_select_source(
hass,
CONFIG_ANDROIDTV_ADB_SERVER,
"!com.app.test3",
"com.app.test3",
patchers.PATCH_STOP_APP,
)
async def test_firetv_select_source_launch_app_id(hass):
"""Test that an app can be launched using its app ID."""
assert await _test_select_source(
hass,
CONFIG_FIRETV_ADB_SERVER,
"com.app.test1",
"com.app.test1",
patchers.PATCH_LAUNCH_APP,
)
async def test_firetv_select_source_launch_app_name(hass):
"""Test that an app can be launched using its friendly name."""
assert await _test_select_source(
hass,
CONFIG_FIRETV_ADB_SERVER,
"TEST 1",
"com.app.test1",
patchers.PATCH_LAUNCH_APP,
)
async def test_firetv_select_source_launch_app_id_no_name(hass):
"""Test that an app can be launched using its app ID when it has no friendly name."""
assert await _test_select_source(
hass,
CONFIG_FIRETV_ADB_SERVER,
"com.app.test2",
"com.app.test2",
patchers.PATCH_LAUNCH_APP,
)
async def test_firetv_select_source_launch_app_hidden(hass):
"""Test that an app can be launched using its app ID when it is hidden from the sources list."""
assert await _test_select_source(
hass,
CONFIG_FIRETV_ADB_SERVER,
"com.app.test3",
"com.app.test3",
patchers.PATCH_LAUNCH_APP,
)
async def test_firetv_select_source_stop_app_id(hass):
"""Test that an app can be stopped using its app ID."""
assert await _test_select_source(
hass,
CONFIG_FIRETV_ADB_SERVER,
"!com.app.test1",
"com.app.test1",
patchers.PATCH_STOP_APP,
)
async def test_firetv_select_source_stop_app_name(hass):
"""Test that an app can be stopped using its friendly name."""
assert await _test_select_source(
hass,
CONFIG_FIRETV_ADB_SERVER,
"!TEST 1",
"com.app.test1",
patchers.PATCH_STOP_APP,
)
async def test_firetv_select_source_stop_app_id_no_name(hass):
"""Test that an app can be stopped using its app ID when it has no friendly name."""
assert await _test_select_source(
hass,
CONFIG_FIRETV_ADB_SERVER,
"!com.app.test2",
"com.app.test2",
patchers.PATCH_STOP_APP,
)
async def test_firetv_select_source_stop_hidden(hass):
"""Test that an app can be stopped using its app ID when it is hidden from the sources list."""
assert await _test_select_source(
hass,
CONFIG_FIRETV_ADB_SERVER,
"!com.app.test3",
"com.app.test3",
patchers.PATCH_STOP_APP,
)
async def _test_setup_fail(hass, config):
"""Test that the entity is not created when the ADB connection is not established."""
patch_key, entity_id = _setup(config)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(False)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[
patch_key
], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER:
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is None
return True
async def test_setup_fail_androidtv(hass):
"""Test that the Android TV entity is not created when the ADB connection is not established."""
assert await _test_setup_fail(hass, CONFIG_ANDROIDTV_PYTHON_ADB)
async def test_setup_fail_firetv(hass):
"""Test that the Fire TV entity is not created when the ADB connection is not established."""
assert await _test_setup_fail(hass, CONFIG_FIRETV_PYTHON_ADB)
async def test_setup_two_devices(hass):
"""Test that two devices can be set up."""
config = {
DOMAIN: [
CONFIG_ANDROIDTV_ADB_SERVER[DOMAIN],
copy.deepcopy(CONFIG_FIRETV_ADB_SERVER[DOMAIN]),
]
}
config[DOMAIN][1][CONF_HOST] = "127.0.0.2"
patch_key = "server"
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
for entity_id in ["media_player.android_tv", "media_player.fire_tv"]:
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
async def test_setup_same_device_twice(hass):
"""Test that setup succeeds with a duplicated config entry."""
patch_key, entity_id = _setup(CONFIG_ANDROIDTV_ADB_SERVER)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert hass.services.has_service(ANDROIDTV_DOMAIN, SERVICE_ADB_COMMAND)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
async def test_adb_command(hass):
"""Test sending a command via the `androidtv.adb_command` service."""
patch_key, entity_id = _setup(CONFIG_ANDROIDTV_ADB_SERVER)
command = "test command"
response = "test response"
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.adb_shell", return_value=response
) as patch_shell:
await hass.services.async_call(
ANDROIDTV_DOMAIN,
SERVICE_ADB_COMMAND,
{ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: command},
blocking=True,
)
patch_shell.assert_called_with(command)
state = hass.states.get(entity_id)
assert state is not None
assert state.attributes["adb_response"] == response
async def test_adb_command_unicode_decode_error(hass):
"""Test sending a command via the `androidtv.adb_command` service that raises a UnicodeDecodeError exception."""
patch_key, entity_id = _setup(CONFIG_ANDROIDTV_ADB_SERVER)
command = "test command"
response = b"test response"
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.adb_shell",
side_effect=UnicodeDecodeError("utf-8", response, 0, len(response), "TEST"),
):
await hass.services.async_call(
ANDROIDTV_DOMAIN,
SERVICE_ADB_COMMAND,
{ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: command},
blocking=True,
)
# patch_shell.assert_called_with(command)
state = hass.states.get(entity_id)
assert state is not None
assert state.attributes["adb_response"] is None
async def test_adb_command_key(hass):
"""Test sending a key command via the `androidtv.adb_command` service."""
patch_key = "server"
entity_id = "media_player.android_tv"
command = "HOME"
response = None
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.adb_shell", return_value=response
) as patch_shell:
await hass.services.async_call(
ANDROIDTV_DOMAIN,
SERVICE_ADB_COMMAND,
{ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: command},
blocking=True,
)
patch_shell.assert_called_with(f"input keyevent {KEYS[command]}")
state = hass.states.get(entity_id)
assert state is not None
assert state.attributes["adb_response"] is None
async def test_adb_command_get_properties(hass):
"""Test sending the "GET_PROPERTIES" command via the `androidtv.adb_command` service."""
patch_key = "server"
entity_id = "media_player.android_tv"
command = "GET_PROPERTIES"
response = {"test key": "test value"}
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
with patch(
"androidtv.androidtv.androidtv_async.AndroidTVAsync.get_properties_dict",
return_value=response,
) as patch_get_props:
await hass.services.async_call(
ANDROIDTV_DOMAIN,
SERVICE_ADB_COMMAND,
{ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: command},
blocking=True,
)
patch_get_props.assert_called()
state = hass.states.get(entity_id)
assert state is not None
assert state.attributes["adb_response"] == str(response)
async def test_learn_sendevent(hass):
"""Test the `androidtv.learn_sendevent` service."""
patch_key = "server"
entity_id = "media_player.android_tv"
response = "sendevent 1 2 3 4"
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.learn_sendevent",
return_value=response,
) as patch_learn_sendevent:
await hass.services.async_call(
ANDROIDTV_DOMAIN,
SERVICE_LEARN_SENDEVENT,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
patch_learn_sendevent.assert_called()
state = hass.states.get(entity_id)
assert state is not None
assert state.attributes["adb_response"] == response
async def test_update_lock_not_acquired(hass):
"""Test that the state does not get updated when a `LockNotAcquiredException` is raised."""
patch_key, entity_id = _setup(CONFIG_ANDROIDTV_ADB_SERVER)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
with patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
with patch(
"androidtv.androidtv.androidtv_async.AndroidTVAsync.update",
side_effect=LockNotAcquiredException,
), patchers.patch_shell(SHELL_RESPONSE_STANDBY)[patch_key]:
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
with patchers.patch_shell(SHELL_RESPONSE_STANDBY)[patch_key]:
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_STANDBY
async def test_download(hass):
"""Test the `androidtv.download` service."""
patch_key, entity_id = _setup(CONFIG_ANDROIDTV_ADB_SERVER)
device_path = "device/path"
local_path = "local/path"
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
# Failed download because path is not whitelisted
with patch("androidtv.basetv.basetv_async.BaseTVAsync.adb_pull") as patch_pull:
await hass.services.async_call(
ANDROIDTV_DOMAIN,
SERVICE_DOWNLOAD,
{
ATTR_ENTITY_ID: entity_id,
ATTR_DEVICE_PATH: device_path,
ATTR_LOCAL_PATH: local_path,
},
blocking=True,
)
patch_pull.assert_not_called()
# Successful download
with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.adb_pull"
) as patch_pull, patch.object(hass.config, "is_allowed_path", return_value=True):
await hass.services.async_call(
ANDROIDTV_DOMAIN,
SERVICE_DOWNLOAD,
{
ATTR_ENTITY_ID: entity_id,
ATTR_DEVICE_PATH: device_path,
ATTR_LOCAL_PATH: local_path,
},
blocking=True,
)
patch_pull.assert_called_with(local_path, device_path)
async def test_upload(hass):
"""Test the `androidtv.upload` service."""
patch_key, entity_id = _setup(CONFIG_ANDROIDTV_ADB_SERVER)
device_path = "device/path"
local_path = "local/path"
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
# Failed upload because path is not whitelisted
with patch("androidtv.basetv.basetv_async.BaseTVAsync.adb_push") as patch_push:
await hass.services.async_call(
ANDROIDTV_DOMAIN,
SERVICE_UPLOAD,
{
ATTR_ENTITY_ID: entity_id,
ATTR_DEVICE_PATH: device_path,
ATTR_LOCAL_PATH: local_path,
},
blocking=True,
)
patch_push.assert_not_called()
# Successful upload
with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.adb_push"
) as patch_push, patch.object(hass.config, "is_allowed_path", return_value=True):
await hass.services.async_call(
ANDROIDTV_DOMAIN,
SERVICE_UPLOAD,
{
ATTR_ENTITY_ID: entity_id,
ATTR_DEVICE_PATH: device_path,
ATTR_LOCAL_PATH: local_path,
},
blocking=True,
)
patch_push.assert_called_with(local_path, device_path)
async def test_androidtv_volume_set(hass):
"""Test setting the volume for an Android TV device."""
patch_key, entity_id = _setup(CONFIG_ANDROIDTV_ADB_SERVER)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.set_volume_level", return_value=0.5
) as patch_set_volume_level:
await hass.services.async_call(
DOMAIN,
SERVICE_VOLUME_SET,
{ATTR_ENTITY_ID: entity_id, ATTR_MEDIA_VOLUME_LEVEL: 0.5},
blocking=True,
)
patch_set_volume_level.assert_called_with(0.5)
async def test_get_image(hass, hass_ws_client):
"""Test taking a screen capture.
This is based on `test_get_image` in tests/components/media_player/test_init.py.
"""
patch_key, entity_id = _setup(CONFIG_ANDROIDTV_ADB_SERVER)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
with patchers.patch_shell("11")[patch_key]:
await hass.helpers.entity_component.async_update_entity(entity_id)
client = await hass_ws_client(hass)
with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.adb_screencap", return_value=b"image"
):
await client.send_json(
{"id": 5, "type": "media_player_thumbnail", "entity_id": entity_id}
)
msg = await client.receive_json()
assert msg["id"] == 5
assert msg["type"] == TYPE_RESULT
assert msg["success"]
assert msg["result"]["content_type"] == "image/png"
assert msg["result"]["content"] == base64.b64encode(b"image").decode("utf-8")
with patch(
"androidtv.basetv.basetv_async.BaseTVAsync.adb_screencap",
side_effect=RuntimeError,
):
await client.send_json(
{"id": 6, "type": "media_player_thumbnail", "entity_id": entity_id}
)
msg = await client.receive_json()
# The device is unavailable, but getting the media image did not cause an exception
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_UNAVAILABLE
async def _test_service(
hass,
entity_id,
ha_service_name,
androidtv_method,
additional_service_data=None,
return_value=None,
):
"""Test generic Android TV media player entity service."""
service_data = {ATTR_ENTITY_ID: entity_id}
if additional_service_data:
service_data.update(additional_service_data)
androidtv_patch = (
"androidtv.androidtv_async.AndroidTVAsync"
if "android" in entity_id
else "firetv.firetv_async.FireTVAsync"
)
with patch(
f"androidtv.{androidtv_patch}.{androidtv_method}", return_value=return_value
) as service_call:
await hass.services.async_call(
DOMAIN,
ha_service_name,
service_data=service_data,
blocking=True,
)
assert service_call.called
async def test_services_androidtv(hass):
"""Test media player services for an Android TV device."""
patch_key, entity_id = _setup(CONFIG_ANDROIDTV_ADB_SERVER)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[patch_key]:
with patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(
hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER
)
await hass.async_block_till_done()
with patchers.patch_shell(SHELL_RESPONSE_STANDBY)[patch_key]:
await _test_service(
hass, entity_id, SERVICE_MEDIA_NEXT_TRACK, "media_next_track"
)
await _test_service(hass, entity_id, SERVICE_MEDIA_PAUSE, "media_pause")
await _test_service(hass, entity_id, SERVICE_MEDIA_PLAY, "media_play")
await _test_service(
hass, entity_id, SERVICE_MEDIA_PLAY_PAUSE, "media_play_pause"
)
await _test_service(
hass, entity_id, SERVICE_MEDIA_PREVIOUS_TRACK, "media_previous_track"
)
await _test_service(hass, entity_id, SERVICE_MEDIA_STOP, "media_stop")
await _test_service(hass, entity_id, SERVICE_TURN_OFF, "turn_off")
await _test_service(hass, entity_id, SERVICE_TURN_ON, "turn_on")
await _test_service(
hass, entity_id, SERVICE_VOLUME_DOWN, "volume_down", return_value=0.1
)
await _test_service(
hass,
entity_id,
SERVICE_VOLUME_MUTE,
"mute_volume",
{ATTR_MEDIA_VOLUME_MUTED: False},
)
await _test_service(
hass,
entity_id,
SERVICE_VOLUME_SET,
"set_volume_level",
{ATTR_MEDIA_VOLUME_LEVEL: 0.5},
0.5,
)
await _test_service(
hass, entity_id, SERVICE_VOLUME_UP, "volume_up", return_value=0.2
)
async def test_services_firetv(hass):
"""Test media player services for a Fire TV device."""
patch_key, entity_id = _setup(CONFIG_FIRETV_ADB_SERVER)
config = copy.deepcopy(CONFIG_FIRETV_ADB_SERVER)
config[DOMAIN][CONF_TURN_OFF_COMMAND] = "test off"
config[DOMAIN][CONF_TURN_ON_COMMAND] = "test on"
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[patch_key]:
with patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
with patchers.patch_shell(SHELL_RESPONSE_STANDBY)[patch_key]:
await _test_service(hass, entity_id, SERVICE_MEDIA_STOP, "back")
await _test_service(hass, entity_id, SERVICE_TURN_OFF, "adb_shell")
await _test_service(hass, entity_id, SERVICE_TURN_ON, "adb_shell")
async def test_connection_closed_on_ha_stop(hass):
"""Test that the ADB socket connection is closed when HA stops."""
patch_key, entity_id = _setup(CONFIG_ANDROIDTV_ADB_SERVER)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[patch_key]:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_ADB_SERVER)
await hass.async_block_till_done()
with patch(
"androidtv.androidtv.androidtv_async.AndroidTVAsync.adb_close"
) as adb_close:
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
assert adb_close.called
async def test_exception(hass):
"""Test that the ADB connection gets closed when there is an unforeseen exception.
HA will attempt to reconnect on the next update.
"""
patch_key, entity_id = _setup(CONFIG_ANDROIDTV_PYTHON_ADB)
with patchers.PATCH_ADB_DEVICE_TCP, patchers.patch_connect(True)[
patch_key
], patchers.patch_shell(SHELL_RESPONSE_OFF)[
patch_key
], patchers.PATCH_KEYGEN, patchers.PATCH_ANDROIDTV_OPEN, patchers.PATCH_SIGNER:
assert await async_setup_component(hass, DOMAIN, CONFIG_ANDROIDTV_PYTHON_ADB)
await hass.async_block_till_done()
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
# When an unforessen exception occurs, we close the ADB connection and raise the exception
with patchers.PATCH_ANDROIDTV_UPDATE_EXCEPTION, pytest.raises(Exception):
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_UNAVAILABLE
# On the next update, HA will reconnect to the device
await hass.helpers.entity_component.async_update_entity(entity_id)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
|
Premier Prints Dimensions Printed Cotton Drapery Fabric in Felix Natural. This printed fabric is perfect for window treatments, decorative pillows, handbags, light duty upholstery applications and almost any craft project. This fabric has a soft workable feel yet is stable and durable.
|
#
# Milovision: A camera pose estimation programme
#
# Copyright (C) 2013 Joris Stork
# See LICENSE.txt
#
# argparse.py
"""
:synopsis: Parses command line arguments using the optparse library.
Note that we use the now deprecated optparse library to maintain
compatibility with the pydc1394 library. This application and the
pydc1394 library it uses should eventually be refactored to the
newer argparse library.
.. moduleauthor:: Joris Stork <[email protected]>
"""
__author__ = "Joris Stork"
from optparse import OptionParser
from pydc1394.cmdline import add_common_options
def run():
""" parses command line args; adds to options defined in pydc/cmdline.py """
usage = "usage: %prog [options] file"
parser = OptionParser(usage)
add_common_options(parser)
parser.add_option("-v", "--verbosity", dest="verbosity",
help="set stdout verbosity (0: critical, 1: error, 2: warning, 3: info, 4: debug)",
type="int")
parser.add_option("-n", "--modules", dest="nr_modules", default=1,
help="set number of pipeline stages to run (1: edge detection; 2: ellipse fitting; 3: pose-1; 4: identify markers; 5: pose-2; 6: register data), default is all",
type="int")
parser.add_option("-s", "--simulate", dest="simulate",
help="set simulation mode (-2: linear generated markers; -1: random generated markers; 0<:preset marker configurations by index nr)",
type="int")
parser.add_option("-w", "--windows", dest="windows",
help="set image display (0: off; 1: on [default])",
type="int")
parser.add_option("-d", "--disk", dest="disk",
help="load marker poses from disk (0: off [default]; 1: on)",
type="int")
parser.add_option("-t", "--simtime", dest="simtime",
help="number of seconds to run simulation (default: 60)",
type="int")
(options, args) = parser.parse_args()
if not options.verbosity:
options.verbosity = 2
if not options.simulate:
options.simulate = 0
return options, args
|
[ZD][Psycho] started an hub for PtokaX and LUA Discussions.
It's not many there at the moment, hope we can change that.
--/ Sticky time - Typhoon?
Re: Unofficial PtokaX/LUA Discussions Hub.
I'd second Madman on that one.
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""an app to monitor and report on replication lag in PG databases"""
from crontabber.base import BaseCronApp
from socorro.external.postgresql.dbapi2_util import (
execute_no_results,
execute_query_fetchall,
)
from crontabber.mixins import with_postgres_transactions
#==============================================================================
@with_postgres_transactions()
class LagLog(BaseCronApp):
app_name = 'LagLog'
app_version = '0.1'
app_description = __doc__
#--------------------------------------------------------------------------
insert_sql = (
"INSERT INTO lag_log (replica_name, moment, lag, master) "
"VALUES (%s, %s, %s, %s)"
)
each_server_sql = (
"SELECT NOW(), client_addr, sent_location, replay_location "
"FROM pg_stat_replication"
)
#--------------------------------------------------------------------------
@staticmethod
def xlog_transform(xlog):
logid, offset = xlog.split('/')
return int('ffffffff', 16) * int(logid, 16) + int(offset, 16)
#--------------------------------------------------------------------------
def run(self):
each_server = self.database_transaction_executor(
execute_query_fetchall,
self.each_server_sql
)
self.config.logger.debug(
'replication database servers: %s',
each_server
)
for now, client_addr, sent_location, replay_location in each_server:
sent_location = self.xlog_transform(sent_location)
replay_location = self.xlog_transform(replay_location)
lag = sent_location - replay_location
self.config.logger.debug(
'%s %s %s %s',
client_addr,
now,
lag,
self.config.database.database_name
)
self.database_transaction_executor(
execute_no_results,
self.insert_sql,
(client_addr, now, lag, self.config.database.database_name)
)
|
For the last two years, Aaron Wolf has been studying crowdfunding for free software – specifically, sustained funding rather than one-time donations. His conclusion is that a new model of funding is needed, which he plans to implement with Snowdrift.coop, a new project that should launch in early 2015.
A music teacher by trade, Wolf became interested in free software because of his growing disillusion with Apple. “I remember feeling outraged when I learned that Apple’s iOS terms effectively censor GNU General Public License software,” he says. “Volunteers worked to provide valuable free resources to the world, and then Apple could just block users’ access in order to compel people to get only proprietary apps and see ads or pay.” By 2012, he had discovered GNU/Linux, “and the welcoming community far surpassed my limitations.” Feeling a need to repay the benefits he received from free software, he was soon writing documentation for the KXStudio music system and providing non-technical help for the task manager Task Coach.
At the same time, Wolf started to focus on the economics of free software. He proposed some of his developing ideas to Task Coach, but “the developers weren’t sure how to implement them.” Instead, they urged him to spend his time developing his ideas. His friend David Thomas also encouraged him, offering to help build the necessary infrastructure. Although initially reluctant, Wolf allowed himself to be convinced. He now devotes much of his time to the project.
The project takes its name from its structure, a cooperative, and its mission, to solve what Wolf calls the Snowdrift dilemma, a variation of the Prisoner’s dilemma.
The Snowdrift dilemma compares the funding of free software to clearing a road of snow by volunteer effort. The blocked road is analogous to the problems that free software often faces, such as less polished interfaces or a developer-centric approach.
Clearing the road can be done three ways: If no one helps, the road remains blocked, and nobody wins – the problems remain. If you get others to do the work without you, you get a clear road at no cost to you and they get a clear road at heavy cost that may disincline them to contribute again. By contrast, if everyone helps everyone gets a clear road at modest cost; the problem is solved, and everyone is reasonably likely to contribute another time.
Snowdrift.coop is designed as a way around this dilemma. Instead of making a one-time donation, donors become patrons on the site. Each donor makes the pledge: “I’ll chip in a sustaining share based on how many others are with me in supporting this project – I’ll do more if more people will help.” In this way, Wolf theorizes, individual contributions make a difference, and ongoing contributions remain relevant.
The project is organized as a cooperative, with three types of members: those who work on the site, the teams for projects asking for funding, and the general community of patrons. Each category of member will be represented on the board, and decisions will be by consensus.
This model is based on an extensive study of other funding models, including a review of more than 700 crowdfunding and donation sites. According to Wolf, the problem with most crowdfunding sites is that they are good for one-off campaigns but can be costly and do not provide a sustainable solution. Moreover, they are mostly proprietary in their governance.
Asked about the current state of the project, Wolf replies: “Two years after initializing, we have a functioning test site; a first full draft of bylaws, many wiki pages of writings covering background research, explanations, and project plans, and a community of volunteers and supporters enthusiastic about making this happen.” In addition, the site is available for users to test with imaginary money.
Moreover, the project is still in the process of ensuring that its proposed handling of funds and its co-op structure are legally valid. Attracting members, finalizing user account settings, translations and other site tweaks remain to be done, and a launch fundraiser is also scheduled for the near future.
Most of all, the Snowdrift dilemma remains to be proved relevant to funding – to say nothing of the project’s solutions to it. However, with its patron pledge and cooperative structure, Snowdrift.coop might just be different enough to succeed. For anyone looking to fund or donate to free software, it’s definitely a site to keep an eye on.
|
from critiquebrainz.frontend.testing import FrontendTestCase
class ArtistViewsTestCase(FrontendTestCase):
def test_artist_page(self):
# Basic artist page should be available.
response = self.client.get("/artist/aef06569-098f-4218-a577-b413944d9493")
self.assert200(response)
self.assertIn("HAIM", response.data)
# Album tab
response = self.client.get("/artist/aef06569-098f-4218-a577-b413944d9493?release_type=album")
self.assert200(response)
self.assertIn("Days Are Gone", response.data)
# Singles tab
response = self.client.get("/artist/aef06569-098f-4218-a577-b413944d9493?release_type=single")
self.assert200(response)
self.assertIn("The Wire", response.data)
# EPs tab
response = self.client.get("/artist/aef06569-098f-4218-a577-b413944d9493?release_type=ep")
self.assert200(response)
self.assertIn("Forever", response.data)
# Broadcasts tab
response = self.client.get("/artist/aef06569-098f-4218-a577-b413944d9493?release_type=broadcast")
self.assert200(response)
# Other releases tab
response = self.client.get("/artist/aef06569-098f-4218-a577-b413944d9493?release_type=other")
self.assert200(response)
|
This 2019 Lincoln Nautilus Reserve is proudly offered by PUGMIRE LINCOLN INC. This Lincoln includes: CARGO UTILITY PACKAGE Cargo Shade RUBY RED METALLIC TINTED CLEARCOAT ENGINE: 2.7L TWIN-TURBOCHARGED V6 V6 Cylinder Engine Gasoline Fuel Turbocharged *Note - For third party subscriptions or services, please contact the dealer for more information.* Want more room? Want more style? This Lincoln Nautilus is the vehicle for you. You deserve a vehicle designed for higher expectations. This Lincoln Nautilus delivers with a luxurious, well-appointed interior and world-class engineering. You can finally stop searching... You've found the one you've been looking for.
|
from cnrclient.display import print_package_info
from cnrclient.commands.command_base import CommandBase
class ShowCmd(CommandBase):
name = 'show'
help_message = "print the package manifest"
def __init__(self, options):
super(ShowCmd, self).__init__(options)
self.package = options.package
self.registry_host = options.registry_host
self.version = options.version
self.verbose = options.wide
self.media_type = options.media_type
self.result = None
@classmethod
def _add_arguments(cls, parser):
cls._add_registryhost_option(parser)
cls._add_packagename_option(parser)
cls._add_packageversion_option(parser)
cls._add_mediatype_option(parser, default=None, required=False)
parser.add_argument("-w", "--wide", help="Extend display informations",
action="store_true", default=False)
def _call(self):
client = self.RegistryClient(self.registry_host)
self.result = client.show_package(self.package, version=self.version,
media_type=self.media_type)
def _render_dict(self):
return self.result
def _render_console(self):
return "Info: %s\n\n" % self.package + print_package_info(self.result, self.verbose)
|
Looking to highlight your rental property. Do it on http://seerental.com Quick easy video tours of your rental to help weed through potential clients. Let them see it online before you have to go and show it.
|
import sched
import csv
from southwest import SouthwestCheckIn
import time
class MultipleSouthwestCheckIns(object):
"""
parses csv file and
schedules multiple check-ins
@params fname - user information csv filename
@info - CSV File Headers:
first name, last name, code, mm/dd/yyyy, hh:mm (24 hr)
"""
def __init__(self, filename):
self.filename = filename
self.users_csv = None
self.users = []
self._assert()
self.scheduler = sched.scheduler(time.time, time.sleep)
self._parse_file()
def _parse_file(self):
try:
with open(self.filename, 'r+') as f:
self.users_csv = list(csv.reader(f, skipinitialspace = True))
self.users = map(lambda user: SouthwestCheckIn(*user), self.users_csv)
except IOError:
print ("IO Error. Check file and filename parameter")
def _schedule(self):
for i, user in enumerate(self.users):
seconds = user._get_seconds()
print ("{0} is scheduled to check-in in {1:.1f} seconds"
.format(user.name, seconds))
self.scheduler.enter(seconds, 1, user.check_in, ())
def _assert(self):
try:
f = open(self.filename, 'r')
except IOError as e:
print e
exit(0)
try:
csv_reader = csv.reader(f, skipinitialspace = True)
except csv.Error as e:
print e
exit(0)
finally:
f.close()
def run(self):
self._schedule()
self.scheduler.run()
|
Free shipping over 13,000원 purchase.
I got it. good. Recommended. buy.
Joe is well received at affordable prices. If you read the reviews and bought it, you will buy a big one.
I will order another product. It's a little tight, but it's good elasticity.
Recommend written by mobile It's nice. It's very warm.
It does not seem to be a problem in winter.
It is warmer than other products.
I bought it for the first time and it is better than the price.
I like it. I like it. It's warm.
Warm and good. It's a little short.
Delivery is fast and thick brushed, but it is good.
Fast shipping I will wear warm well.
It is not good yet, but it is not bad, but it is not bad.
good! It is better to choose big size. Products are satisfied.
I am satisfied. Delivery is fast. Very good.
I will ship it to you after confirmation.
|
# TODO: consider using u/struct
from rfb.utils import bytes_to_int
def dispatch_msgs(self, msg):
# handle multiple messages
ptr = 0
while ptr < len(msg):
# ClientSetPixelFormat(self, bpp, depth, big, true, masks, shifts)
if msg[ptr] == 0:
# if ClientSetPixelFormat is received, post init
# over-rules ServerSetPixelFormat sent, during init
self.bpp = msg[ptr+4]
self.depth = msg[ptr+5]
self.big = msg[ptr+6] == 1
self.true = msg[ptr+7] == 1
self.masks = (
bytes_to_int( msg[ptr+8:ptr+10] ),
bytes_to_int( msg[ptr+10:ptr+12] ),
bytes_to_int( msg[ptr+12:ptr+14] ),
)
self.shifts = (
msg[ptr+14],
msg[ptr+15],
msg[ptr+16]
)
if hasattr(self, 'ClientSetPixelFormat'):
self.ClientSetPixelFormat(
self.bpp,
self.depth,
self.big,
self.true,
self.masks,
self.shifts
)
# Colourmap (not currently implemented):
# If this msg is recv'd from client svr colourmap sent during
# init is cleared, therefore svr must send again before sending
# any framebuffer updates
ptr += 20 # includes trailing padding
# ClientSetEncodings(self, encodings)
elif msg[ptr] == 2:
count = bytes_to_int( msg[ptr+2:ptr+4] )
encodings = [
bytes_to_int( msg[ptr+4+i : ptr+8+i] )
for i in range(0, count*4, 4)
]
# session encodings are sent/set by client post init
self.encodings = encodings
if hasattr(self, 'ClientSetEncodings'):
self.ClientSetEncodings(encodings)
ptr += 4 + (count*4)
# ClientFrameBufferUpdateRequest(self, incr, x, y, w, h)
elif msg[ptr] == 3:
if hasattr(self, 'ClientFrameBufferUpdateRequest'):
self.ClientFrameBufferUpdateRequest(
msg[ptr+1] == 1,
bytes_to_int( msg[ptr+2:ptr+4] ),
bytes_to_int( msg[ptr+4:ptr+6] ),
bytes_to_int( msg[ptr+6:ptr+8] ),
bytes_to_int( msg[ptr+8:ptr+10] )
)
ptr += 10
# ClientKeyEvent(self, down, key)
elif msg[ptr] == 4:
if hasattr(self, 'ClientKeyEvent'):
self.ClientKeyEvent(
msg[ptr+1] == 1,
bytes_to_int( msg[ptr+4:ptr+8] )
)
ptr += 8
# ClientPointerEvent(self, buttons, x, y)
elif msg[ptr] == 5:
if hasattr(self, 'ClientPointerEvent'):
self.ClientPointerEvent(
msg[ptr+1],
bytes_to_int( msg[ptr+2:ptr+4] ),
bytes_to_int( msg[ptr+4:ptr+6] )
)
ptr += 6
# ClientCutText(self, text)
elif msg[ptr] == 6:
l = bytes_to_int( msg[2:6] )
if hasattr(self, 'ClientCutText'):
self.ClientCutText(
msg[ptr+6 : ptr+l]
)
ptr += 6 + l
elif msg[ptr] > 6:
if hasattr(self, 'ClientOtherMsg'):
# ClientOtherMsg must return len of 1st msg
ptr += ClientOtherMsg(msg)
else:
# skip all messages
# ... no way to tell how long the msg is ...
ptr = len(msg)
|
Buy some art and help someone in need—that’s the mutually beneficial idea driving ArtLifting, a startup that recently raised $1.1 million and provides a platform for homeless and disabled artists to sell their art.
Liz Powers, who started Artlifting, has found paintings through art therapy sessions in homeless shelters, “including about 20 in the Boston area,” The Boston Globe reports. Powers says her business gives homeless and disabled artists the opportunity to earn cash and gain some confidence.
ArtLifting’s main website sells posters and case covers that start at $35 USD, but the website also has some original paintings ranging from a few hundred dollars to over five thousand. Fifty-five per cent of that money goes to the artist. So far, Airlifting is representing 50 people, and five “have received housing and are no longer homeless,” according to TechCrunch.
Stephanie Wright, who curates the works, told the Harvard Crimson that “in deciding which pieces of art to accept onto the ArtLifting platform, I rigorously evaluate each piece based on my perception of its technical intricacy and saleability.” Art stands out not just for its aesthetic also but also the concept and the artist’s story. This is what makes the art unique—their stories of struggle are much like fingerprints.
Ten artists of the Boston-based startup were also selected to display their art at the Museum of Fine Arts in Boston.
ArtLifting says it is staying for-profit so they don’t have to be tied to gaining grant money. With the $1.1 million seed, of which Tom’s Shoes’ creator was a backer, ArtLifting will expand to reach more potential artists.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import bson
import cherrypy
import pymongo
import six
import uuid
from six import BytesIO
from girder import logger
from girder.models import getDbConnection
from girder.models.model_base import ValidationException
from hashlib import sha512
from . import hash_state
from .abstract_assetstore_adapter import AbstractAssetstoreAdapter
# 2MB chunks. Clients must not send any chunks that are smaller than this
# unless they are sending the final chunk.
CHUNK_SIZE = 2097152
class GridFsAssetstoreAdapter(AbstractAssetstoreAdapter):
"""
This assetstore type stores files within MongoDB using the GridFS data
model.
"""
@staticmethod
def validateInfo(doc):
"""
Validate the assetstore -- make sure we can connect to it and that the
necessary indexes are set up.
"""
if not doc.get('db', ''):
raise ValidationException('Database name must not be empty.', 'db')
if '.' in doc['db'] or ' ' in doc['db']:
raise ValidationException('Database name cannot contain spaces'
' or periods.', 'db')
chunkColl = getDbConnection(
doc.get('mongohost', None), doc.get('replicaset', None),
autoRetry=False, serverSelectionTimeoutMS=10000)[doc['db']].chunk
try:
chunkColl.create_index([
('uuid', pymongo.ASCENDING),
('n', pymongo.ASCENDING)
], unique=True)
except pymongo.errors.ServerSelectionTimeoutError as e:
raise ValidationException(
'Could not connect to the database: %s' % str(e))
return doc
@staticmethod
def fileIndexFields():
return ['sha512']
def __init__(self, assetstore):
"""
:param assetstore: The assetstore to act on.
"""
super(GridFsAssetstoreAdapter, self).__init__(assetstore)
try:
self.chunkColl = getDbConnection(
self.assetstore.get('mongohost', None),
self.assetstore.get('replicaset', None)
)[self.assetstore['db']].chunk
except pymongo.errors.ConnectionFailure:
logger.error('Failed to connect to GridFS assetstore %s',
self.assetstore['db'])
self.chunkColl = 'Failed to connect'
self.unavailable = True
return
except pymongo.errors.ConfigurationError:
logger.exception('Failed to configure GridFS assetstore %s',
self.assetstore['db'])
self.chunkColl = 'Failed to configure'
self.unavailable = True
return
def initUpload(self, upload):
"""
Creates a UUID that will be used to uniquely link each chunk to
"""
upload['chunkUuid'] = uuid.uuid4().hex
upload['sha512state'] = hash_state.serializeHex(sha512())
return upload
def uploadChunk(self, upload, chunk):
"""
Stores the uploaded chunk in fixed-sized pieces in the chunks
collection of this assetstore's database.
"""
# If we know the chunk size is too large or small, fail early.
self.checkUploadSize(upload, self.getChunkSize(chunk))
if isinstance(chunk, six.text_type):
chunk = chunk.encode('utf8')
if isinstance(chunk, six.binary_type):
chunk = BytesIO(chunk)
# Restore the internal state of the streaming SHA-512 checksum
checksum = hash_state.restoreHex(upload['sha512state'], 'sha512')
# This bit of code will only do anything if there is a discrepancy
# between the received count of the upload record and the length of
# the file stored as chunks in the database. This code simply updates
# the sha512 state with the difference before reading the bytes sent
# from the user.
if self.requestOffset(upload) > upload['received']:
cursor = self.chunkColl.find({
'uuid': upload['chunkUuid'],
'n': {'$gte': upload['received'] // CHUNK_SIZE}
}, projection=['data']).sort('n', pymongo.ASCENDING)
for result in cursor:
checksum.update(result['data'])
cursor = self.chunkColl.find({
'uuid': upload['chunkUuid']
}, projection=['n']).sort('n', pymongo.DESCENDING).limit(1)
if cursor.count(True) == 0:
n = 0
else:
n = cursor[0]['n'] + 1
size = 0
startingN = n
while not upload['received']+size > upload['size']:
data = chunk.read(CHUNK_SIZE)
if not data:
break
# If a timeout occurs while we are trying to load data, we might
# have succeeded, in which case we will get a DuplicateKeyError
# when it automatically retries. Therefore, log this error but
# don't stop.
try:
self.chunkColl.insert_one({
'n': n,
'uuid': upload['chunkUuid'],
'data': bson.binary.Binary(data)
})
except pymongo.errors.DuplicateKeyError:
logger.info('Received a DuplicateKeyError while uploading, '
'probably because we reconnected to the database '
'(chunk uuid %s part %d)', upload['chunkUuid'], n)
n += 1
size += len(data)
checksum.update(data)
chunk.close()
try:
self.checkUploadSize(upload, size)
except ValidationException:
# The user tried to upload too much or too little. Delete
# everything we added
self.chunkColl.delete_many({
'uuid': upload['chunkUuid'],
'n': {'$gte': startingN}
}, multi=True)
raise
# Persist the internal state of the checksum
upload['sha512state'] = hash_state.serializeHex(checksum)
upload['received'] += size
return upload
def requestOffset(self, upload):
"""
The offset will be the CHUNK_SIZE * total number of chunks in the
database for this file. We return the max of that and the received
count because in testing mode we are uploading chunks that are smaller
than the CHUNK_SIZE, which in practice will not work.
"""
cursor = self.chunkColl.find({
'uuid': upload['chunkUuid']
}, projection=['n']).sort('n', pymongo.DESCENDING).limit(1)
if cursor.count(True) == 0:
offset = 0
else:
offset = cursor[0]['n'] * CHUNK_SIZE
return max(offset, upload['received'])
def finalizeUpload(self, upload, file):
"""
Grab the final state of the checksum and set it on the file object,
and write the generated UUID into the file itself.
"""
hash = hash_state.restoreHex(upload['sha512state'],
'sha512').hexdigest()
file['sha512'] = hash
file['chunkUuid'] = upload['chunkUuid']
file['chunkSize'] = CHUNK_SIZE
return file
def downloadFile(self, file, offset=0, headers=True, endByte=None,
contentDisposition=None, **kwargs):
"""
Returns a generator function that will be used to stream the file from
the database to the response.
"""
if endByte is None or endByte > file['size']:
endByte = file['size']
if headers:
cherrypy.response.headers['Accept-Ranges'] = 'bytes'
self.setContentHeaders(file, offset, endByte, contentDisposition)
# If the file is empty, we stop here
if endByte - offset <= 0:
return lambda: ''
n = 0
chunkOffset = 0
# We must "seek" to the correct chunk index and local offset
if offset > 0:
n = offset // file['chunkSize']
chunkOffset = offset % file['chunkSize']
cursor = self.chunkColl.find({
'uuid': file['chunkUuid'],
'n': {'$gte': n}
}, projection=['data']).sort('n', pymongo.ASCENDING)
def stream():
co = chunkOffset # Can't assign to outer scope without "nonlocal"
position = offset
shouldBreak = False
for chunk in cursor:
chunkLen = len(chunk['data'])
if position + chunkLen > endByte:
chunkLen = endByte - position + co
shouldBreak = True
yield chunk['data'][co:chunkLen]
if shouldBreak:
break
position += chunkLen - co
if co > 0:
co = 0
return stream
def deleteFile(self, file):
"""
Delete all of the chunks in the collection that correspond to the
given file.
"""
q = {
'chunkUuid': file['chunkUuid'],
'assetstoreId': self.assetstore['_id']
}
matching = self.model('file').find(q, limit=2, projection=[])
if matching.count(True) == 1:
try:
self.chunkColl.delete_many({'uuid': file['chunkUuid']})
except pymongo.errors.AutoReconnect:
# we can't reach the database. Go ahead and return; a system
# check will be necessary to remove the abandoned file
pass
def cancelUpload(self, upload):
"""
Delete all of the chunks associated with a given upload.
"""
self.chunkColl.delete_many({'uuid': upload['chunkUuid']})
|
1 The parable of Franz Kafka, used by Hannah Arendt in the preface to Between Past and Future: Eight Exercises in Political Thought, Hannah Arendt (Penguin Books, 1996), 7 2 Václav Havel (address to Joint Session of U.S. Congress, Washington, D.C., February 1990), 43.
Arendt describes the loss of meaning of the traditional key words of politics: justice, reason, responsibility, virtue, glory. Through a series of eight exercises, she shows how we can redistill once more the vital essence of these concepts.
|
""" Quick n Simple Image Folder, Tarfile based DataSet
Hacked together by / Copyright 2020 Ross Wightman
"""
import torch.utils.data as data
import os
import torch
import logging
from PIL import Image
from .parsers import create_parser
_logger = logging.getLogger(__name__)
_ERROR_RETRY = 50
class ImageDataset(data.Dataset):
def __init__(
self,
root,
parser=None,
class_map='',
load_bytes=False,
transform=None,
):
if parser is None or isinstance(parser, str):
parser = create_parser(parser or '', root=root, class_map=class_map)
self.parser = parser
self.load_bytes = load_bytes
self.transform = transform
self._consecutive_errors = 0
def __getitem__(self, index):
img, target = self.parser[index]
try:
img = img.read() if self.load_bytes else Image.open(img).convert('RGB')
except Exception as e:
_logger.warning(f'Skipped sample (index {index}, file {self.parser.filename(index)}). {str(e)}')
self._consecutive_errors += 1
if self._consecutive_errors < _ERROR_RETRY:
return self.__getitem__((index + 1) % len(self.parser))
else:
raise e
self._consecutive_errors = 0
if self.transform is not None:
img = self.transform(img)
if target is None:
target = torch.tensor(-1, dtype=torch.long)
return img, target
def __len__(self):
return len(self.parser)
def filename(self, index, basename=False, absolute=False):
return self.parser.filename(index, basename, absolute)
def filenames(self, basename=False, absolute=False):
return self.parser.filenames(basename, absolute)
class IterableImageDataset(data.IterableDataset):
def __init__(
self,
root,
parser=None,
split='train',
is_training=False,
batch_size=None,
class_map='',
load_bytes=False,
repeats=0,
transform=None,
):
assert parser is not None
if isinstance(parser, str):
self.parser = create_parser(
parser, root=root, split=split, is_training=is_training, batch_size=batch_size, repeats=repeats)
else:
self.parser = parser
self.transform = transform
self._consecutive_errors = 0
def __iter__(self):
for img, target in self.parser:
if self.transform is not None:
img = self.transform(img)
if target is None:
target = torch.tensor(-1, dtype=torch.long)
yield img, target
def __len__(self):
if hasattr(self.parser, '__len__'):
return len(self.parser)
else:
return 0
def filename(self, index, basename=False, absolute=False):
assert False, 'Filename lookup by index not supported, use filenames().'
def filenames(self, basename=False, absolute=False):
return self.parser.filenames(basename, absolute)
class AugMixDataset(torch.utils.data.Dataset):
"""Dataset wrapper to perform AugMix or other clean/augmentation mixes"""
def __init__(self, dataset, num_splits=2):
self.augmentation = None
self.normalize = None
self.dataset = dataset
if self.dataset.transform is not None:
self._set_transforms(self.dataset.transform)
self.num_splits = num_splits
def _set_transforms(self, x):
assert isinstance(x, (list, tuple)) and len(x) == 3, 'Expecting a tuple/list of 3 transforms'
self.dataset.transform = x[0]
self.augmentation = x[1]
self.normalize = x[2]
@property
def transform(self):
return self.dataset.transform
@transform.setter
def transform(self, x):
self._set_transforms(x)
def _normalize(self, x):
return x if self.normalize is None else self.normalize(x)
def __getitem__(self, i):
x, y = self.dataset[i] # all splits share the same dataset base transform
x_list = [self._normalize(x)] # first split only normalizes (this is the 'clean' split)
# run the full augmentation on the remaining splits
for _ in range(self.num_splits - 1):
x_list.append(self._normalize(self.augmentation(x)))
return tuple(x_list), y
def __len__(self):
return len(self.dataset)
|
When a juvenile is charged with robbery, it is scary for the individual charged and his or her parents. How will the charges affect the child's future? Will there be a lasting criminal record?
As you look for answers to these questions, it is important to understand your legal rights and to seek the help of an experienced juvenile defense lawyer.
At Groshek Law in Minneapolis, Minnesota, defense attorney Christa Groshek is experienced in both the adult and juvenile justice systems. She understands the unique needs of minors and how to protect their rights in juvenile court, including defending against attempts by prosecutors to seek extended juvenile jurisdiction (EJJ) or adult certification.
Robbery, especially armed robbery, is a serious charge that can result in severe penalties. Prosecutors can pursue EJJ or adult certification, which can result in adult charges and prison time.
Christa Groshek has experience filing the necessary motions in these cases to protect your child against being charged as an adult. She understands the resources and programs available to teens who have been charged with a crime, and she will work with the state and the judge to develop a program that is best suited to the needs of the minor.
Ultimately, our goal at Groshek Law PA is to do what it takes to give our clients the best chance for a successful future. To discuss your juvenile robbery case with a skilled criminal defense lawyer, contact Groshek Law today by calling 612-424-5829 or you may send us an email.
|
from __future__ import division
import numpy as np
import chainer
from chainer.functions import dropout
from chainer.functions import max_pooling_2d
from chainer.functions import relu
from chainer.functions import softmax
from chainer.initializers import constant
from chainer.initializers import normal
from chainer.links import Linear
from chainercv.links.connection.conv_2d_activ import Conv2DActiv
from chainercv.links.model.pickable_sequential_chain import \
PickableSequentialChain
from chainercv import utils
# RGB order
_imagenet_mean = np.array(
[123.68, 116.779, 103.939], dtype=np.float32)[:, np.newaxis, np.newaxis]
class VGG16(PickableSequentialChain):
"""VGG-16 Network.
This is a pickable sequential link.
The network can choose output layers from set of all
intermediate layers.
The attribute :obj:`pick` is the names of the layers that are going
to be picked by :meth:`forward`.
The attribute :obj:`layer_names` is the names of all layers
that can be picked.
Examples:
>>> model = VGG16()
# By default, forward returns a probability score (after Softmax).
>>> prob = model(imgs)
>>> model.pick = 'conv5_3'
# This is layer conv5_3 (after ReLU).
>>> conv5_3 = model(imgs)
>>> model.pick = ['conv5_3', 'fc6']
>>> # These are layers conv5_3 (after ReLU) and fc6 (before ReLU).
>>> conv5_3, fc6 = model(imgs)
.. seealso::
:class:`chainercv.links.model.PickableSequentialChain`
When :obj:`pretrained_model` is the path of a pre-trained chainer model
serialized as a :obj:`.npz` file in the constructor, this chain model
automatically initializes all the parameters with it.
When a string in the prespecified set is provided, a pretrained model is
loaded from weights distributed on the Internet.
The list of pretrained models supported are as follows:
* :obj:`imagenet`: Loads weights trained with ImageNet and distributed \
at `Model Zoo \
<https://github.com/BVLC/caffe/wiki/Model-Zoo>`_.
Args:
n_class (int): The number of classes. If :obj:`None`,
the default values are used.
If a supported pretrained model is used,
the number of classes used to train the pretrained model
is used. Otherwise, the number of classes in ILSVRC 2012 dataset
is used.
pretrained_model (string): The destination of the pre-trained
chainer model serialized as a :obj:`.npz` file.
If this is one of the strings described
above, it automatically loads weights stored under a directory
:obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/models/`,
where :obj:`$CHAINER_DATASET_ROOT` is set as
:obj:`$HOME/.chainer/dataset` unless you specify another value
by modifying the environment variable.
mean (numpy.ndarray): A mean value. If :obj:`None`,
the default values are used.
If a supported pretrained model is used,
the mean value used to train the pretrained model is used.
Otherwise, the mean value calculated from ILSVRC 2012 dataset
is used.
initialW (callable): Initializer for the weights.
initial_bias (callable): Initializer for the biases.
"""
_models = {
'imagenet': {
'param': {'n_class': 1000, 'mean': _imagenet_mean},
'overwritable': ('mean',),
'url': 'https://chainercv-models.preferred.jp/'
'vgg16_imagenet_converted_2017_07_18.npz'
}
}
def __init__(self,
n_class=None, pretrained_model=None, mean=None,
initialW=None, initial_bias=None):
param, path = utils.prepare_pretrained_model(
{'n_class': n_class, 'mean': mean},
pretrained_model, self._models,
{'n_class': 1000, 'mean': _imagenet_mean})
self.mean = param['mean']
if initialW is None:
# Employ default initializers used in the original paper.
initialW = normal.Normal(0.01)
if pretrained_model:
# As a sampling process is time-consuming,
# we employ a zero initializer for faster computation.
initialW = constant.Zero()
kwargs = {'initialW': initialW, 'initial_bias': initial_bias}
super(VGG16, self).__init__()
with self.init_scope():
self.conv1_1 = Conv2DActiv(None, 64, 3, 1, 1, **kwargs)
self.conv1_2 = Conv2DActiv(None, 64, 3, 1, 1, **kwargs)
self.pool1 = _max_pooling_2d
self.conv2_1 = Conv2DActiv(None, 128, 3, 1, 1, **kwargs)
self.conv2_2 = Conv2DActiv(None, 128, 3, 1, 1, **kwargs)
self.pool2 = _max_pooling_2d
self.conv3_1 = Conv2DActiv(None, 256, 3, 1, 1, **kwargs)
self.conv3_2 = Conv2DActiv(None, 256, 3, 1, 1, **kwargs)
self.conv3_3 = Conv2DActiv(None, 256, 3, 1, 1, **kwargs)
self.pool3 = _max_pooling_2d
self.conv4_1 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
self.conv4_2 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
self.conv4_3 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
self.pool4 = _max_pooling_2d
self.conv5_1 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
self.conv5_2 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
self.conv5_3 = Conv2DActiv(None, 512, 3, 1, 1, **kwargs)
self.pool5 = _max_pooling_2d
self.fc6 = Linear(None, 4096, **kwargs)
self.fc6_relu = relu
self.fc6_dropout = dropout
self.fc7 = Linear(None, 4096, **kwargs)
self.fc7_relu = relu
self.fc7_dropout = dropout
self.fc8 = Linear(None, param['n_class'], **kwargs)
self.prob = softmax
if path:
chainer.serializers.load_npz(path, self)
def _max_pooling_2d(x):
return max_pooling_2d(x, ksize=2)
|
Compare an Xml-file to a master file to locate missing elements.
Support for Xml Tag Diff is available from the Publisher site.
Planetsofts.com didn't scan Xml Tag Diff for viruses adware, spyware, or other badware. For safety reasons, we recommend that you always keep an updated antivirus installed on your pc when downloading and installing software from the web.
|
"""
Support for Verisure Smartplugs.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.verisure/
"""
import logging
from time import time
from homeassistant.components.verisure import HUB as hub
from homeassistant.components.verisure import CONF_SMARTPLUGS
from homeassistant.components.switch import SwitchDevice
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Verisure switch platform."""
if not int(hub.config.get(CONF_SMARTPLUGS, 1)):
return False
hub.update_overview()
switches = []
switches.extend([
VerisureSmartplug(device_label)
for device_label in hub.get('$.smartPlugs[*].deviceLabel')])
add_entities(switches)
class VerisureSmartplug(SwitchDevice):
"""Representation of a Verisure smartplug."""
def __init__(self, device_id):
"""Initialize the Verisure device."""
self._device_label = device_id
self._change_timestamp = 0
self._state = False
@property
def name(self):
"""Return the name or location of the smartplug."""
return hub.get_first(
"$.smartPlugs[?(@.deviceLabel == '%s')].area",
self._device_label)
@property
def is_on(self):
"""Return true if on."""
if time() - self._change_timestamp < 10:
return self._state
self._state = hub.get_first(
"$.smartPlugs[?(@.deviceLabel == '%s')].currentState",
self._device_label) == "ON"
return self._state
@property
def available(self):
"""Return True if entity is available."""
return hub.get_first(
"$.smartPlugs[?(@.deviceLabel == '%s')]",
self._device_label) is not None
def turn_on(self, **kwargs):
"""Set smartplug status on."""
hub.session.set_smartplug_state(self._device_label, True)
self._state = True
self._change_timestamp = time()
def turn_off(self, **kwargs):
"""Set smartplug status off."""
hub.session.set_smartplug_state(self._device_label, False)
self._state = False
self._change_timestamp = time()
# pylint: disable=no-self-use
def update(self):
"""Get the latest date of the smartplug."""
hub.update_overview()
|
We help companies maximize their online presence with a customized strategy to digital advertising and marketing. By connecting your business with a devoted strategist, we deliver seamless digital advertising and marketing strategies for companies that wish to be validated by present customers and discovered by new ones.
Jon-Mikel Bailey – Before co-founding Wood Street in 2002, Jon labored in gross sales, marketing and enterprise growth for know-how and marketing companies. A in style speaker, he offers seminars on advertising Online Jobs, internet advertising, branding and web design to chambers of commerce, commerce associations and faculties. He has a BFA in Photography from Frostburg State University and still shoots images for Wood Street clients.
Chris Ducker is famed as the author of the bestseller Virtual Freedom”. A serial speaker and entrepreneur, Chris is initially from the United Kingdom but lived for 14 years within the Philippines. There he has based quite a few businesses, akin to a digital assistant recruitment hub, a co-working area, and an outsourced call heart facility. Ducker is a popular podcaster and he maintains his own business blog.
There is an added benefit to creating common blog content material: organic search results. We’ve saved this for last as a result of it’s the most labor intensive and it takes the longest to realize. You’ll have more capability, confidence, and hopefully more new sufferers, in the event you get some fast wins with other methods first. There is, however, great benefit to having the highest Small Business Administration spot for natural search. If you’ve achieved any research on search engine optimisation strategy in the last few years, it’s essential to notice that things have modified not too long ago as Google’s machine has gotten smarter.
I really feel for girls who do not have the support and steerage to realize the form of success, that I know, is completely inside their attain. You most certainly should discover it at MBK middle. It is a big mall with many choices. As well as different purchasing centers nearby. Without a correct funnel and thoroughly driving down individuals into the funnel, sales are just not going to happen because it used to.
While it can be intimidating to journey to China and attend the honest, it is well price your time. If you propose on going, then try my itinerary! In example, for those who stay on Sukhumvit 21 (Asoke area) there’s pier you may take a boat to bore which will drop you in the midst of market and some minutes walk to Bobe Tower. We guarantee increase in ROI and buyer acquisition for our clients with help of our proven methodologies.
Internet advertising, often known as Online Marketing, Web-Marketing, Search Engine Marketing (SEM) entails advertising and marketing each services and products on the internet.
Adobe’s UI is credited for changing into more intuitive,” although the report nonetheless cited it as being a bit daunting.” That tool additionally hit prime scores in such areas as knowledge ingestion, knowledge repository, information analytics and reporting. One of the core practices of website positioning, ensuring your splendid clients can find you, all comes all the way Internet Marketing down to realizing the fitting keywords and how one can use them. Welcome to OMG search engine optimisation Melbourne; an offshoot of Online Marketing Gurus – one of Australia’s main website positioning agencies.
My personal favorite was when a client needed me to receives a commission commission off profit,” which might be roughly x quantity. Commissions need to be on gross, profit is simply too easy to control on paper. Raso and Homayunfard say beginning their company wasn’t the troublesome part; it was differentiating their providing from the masses that proved challenging. Not positive I’d go that far Although, I’m getting some good traction. Always nice to get positive suggestions. Keep up the nice work!
On a median day greater than four thousand people from greater than 100 different international locations be a part of SFI Marketing Group which means one enrollment every 20 seconds. And that is just from ONE day!
AdMarula was based by Daniel Gross and Daniel Bernholc, a Swedish duo with backgrounds in system improvement and financing of early-stage tech companies. Amongst his earlier roles, Bernholc was formerly the CTO at TradeDoubler, one in all Europe’s largest and most profitable efficiency advertising networks, whereas Daniel Gross has gained intensive expertise in working with tech start-ups across Europe, the US and Israel.
Ok, now we’re speaking severely. Frank is considered one of many high web advertising gurus on the planet. Kern shares some data on his blog but appears that his magic lies into his subscription-based mostly marketing program that is killing. I’m so thrilled I can still do this for you immediately, and make it really easy so that you can get began proper now with Ignite one hundred% risk free!
According to findings in a January 2001 Forrester report, Online Advertising Eclipsed,” 83 percent of spending by 2003 shall be pure price-per-motion offers or a hybrid of CPM and efficiency. This shall be up from sixty two percent in 2000. The Interactive Advertising Bureau (IAB) estimated models for Internet promoting hybrid deals accounted for 46% of spending, CPMs or impression-based deals were at 44% and performance-primarily based deals were at 10% (Q2 2000).
Their 1984-fashion administration has resulted in sycophantic staff – yes males with restricted transferable abilities. To the bemusement of the creator, one of many newer developments sees the search engine marketing/PPC staff moved away from tasks requiring tender skills. The second part of changing a prospect into Financial Consultant a customer is Persuasion. Leads grow to be heat via advertising and marketing automation and lead nurturing… however generally they are sitting on the fence about shopping for your product. They should be persuaded to buy your product. What to say about Pat Flynn. He’s story is among the greatest within the industry. He is what all of us dream about.
Integrating trending subjects into your technique is a great way to advertise your marketing as they have high search volumes and nice conversions rates. You’ll be giving your customers precisely what they are excited about as it’s happening, permitting you to turn out to be the go-to” source for the information they crave. With over 20 million accounts in the UK and over one hundred forty million worldwide and a spread of inexpensive products to suit your business wants, contact them today to get arrange with one of the international leaders in Online Payments.
We additionally conduct employees coaching on ICT to get them acquainted with the operation and effective use of IT tools and other pc accessories to achieve optimal result. This is without doubt one of the very best fashionable advertising and marketing books at explaining how search, social and content material all work collectively and why they go collectively like peanut butter, jelly and bread in a peanut butter and jelly sandwich. New updates on Smart Traveller have upgraded warnings for Turkey with news Anzac Day commemorations might be a goal of terror attacks. Product categories include CPU coolers, perform panels, M/B sockets, graphic playing cards, sound cards, displays and gaming gear.
Waisberg is the go-to supply for marketers and website owners trying to perceive buyer behavior and what it takes to convert website visitors into loyal website consumers. Among his high suggestions: how retailers can combine Marketing Consultant offline transactions into Google Analytics to realize extra perception into customer behavior. As an added bonus, Waisberg can also be the Analytics Advocate for Google, which suggests his data comes straight from the supply.
Chiropractors have had to change along with the health and wellness trade as new technologies and analysis can be found, so that they have an easier time adapting to a endless provide of recent advertising tactics, consumer behaviors, and business fashions. The greatest marketing is comprehensive; gaining synergy from cellular, social, search engine marketing, and web site alerts, and working collectively to market your chiropractic follow.
Training of associates is among the key success components to a performance based mostly promoting campaign,” says Declan Dunn, author of Winning at the Affiliate Game. Make your companions profitable and you instantly benefit. If you are ordering products to be manufactured overseas, anticipate to put a pretty big minimum order. Every producer could have their own MOQs(minimal order amount) but don’t be embarrassed to ask earlier than ordering. My recommendation in the event you determine to travel all the way to Asia is to attend each the Global Sources show and the Canton Fair throughout the same journey. Canton is barely a 2 hour practice ride away and it costs $30.
But digital advertising ‘gurus’ shouldn’t be followed blindly, as a real guru could be. They would not have the infallibility of spiritual leaders, for a lot of superb causes, and generally I get the impression that thought leaders, or gurus, have that position just because they are saying they do, and in a self-perpetuating situation, are created thus by the following and sharing of their teachings by others. We see a chunk by a ‘guru’, so we wish to share it to show we’re in the know and follow the lead of this particular person, and so they retain their place of authority because of all those beautiful hyperlinks and shares and mentions.
Want to get the best advertising insights? Many of them come from the consultants and companies that work with clients across a vast vary of industries to assist set up their shoppers model and enhance the visibility of their purchasers on-line presence. In this submit, I’m going to share some of the most helpful advertising company blogs that you would be able to comply with to get advice about what is working now in advertising and advertising.
Some ask whether or not a blog counts as social media. I can say our blog is intimately tied to our social network activity and we continue to obtain feedback on posts. So sure, a weblog could be very much a part of social media. Content is the Kingdom in spite of everything and weblog content material makes up much of what folks share on the social net. Companies like ours and our purchasers are additionally leveraging social networks to source content, so it is a dynamic cycle of creation and collaboration. This is what enables a really small team of entrepreneurs to scale high quality content material in a approach that is meaningful to customers.
If you’re excited by getting began with this revolutionary, new” advertising method, check out the record beneath. From coaching guides that provides Dental Insurance you with the step-by-step actions needed to launch a content advertising marketing campaign to the precise tools used in these processes, you may find the entire sources wanted to start out improving your website results as we speak!
Adam Proehl, Managing Partner of Minneapolis based NordicClick Interactive , is a seasoned on-line advertising and marketing government who has a passion for helping clients use online channels to grow their business. As a practical marketer, his skill to get at the coronary heart of the core business issues, establish measurable targets, and map the suitable on-line tactics to additional those aims Carrier Information has served his purchasers nicely. He presents continuously at industry events, together with Search Engine Strategies and Online Marketing Summit. In his class, Building the Ultimate Dashboard , Adam explains tips on how to give attention to the proper metrics, and create an actionable dashboard.
David Lewis is an early pioneer in Internet advertising, advertising automation, and CRM methods with over 22 years expertise advertising and promoting know-how options by way of direct gross sales channels, online, and distribution channels. In 2007, he founded DemandGen International, Inc a global consulting firm serving to companies deploy and utilize advertising automation and CRM programs. DemandGen has become the trusted advisor to the world’s main advertising and gross sales groups combining superior service, business process, and expertise expertise.
True specialists in online marketing. web optimization Missing Money, content material writing, social media and extra, TopRank Online Marketing has you covered.
We provide SEO for the rising corporations within the state of North Carolina. As North Carolina search engine marketing agency, we work with corporations in the following areas: Raleigh web optimization, Charlotte SEO, for the cities in North Carolina. From as little as US$ninety six a day you’ll be able to have fully licensed English talking Thai tour guide at your facet to elucidate, translate and assist you to perceive what you’re seeing and hearing during your stay.
There continues to be more to write about selecting a advertising technique and studying from different individuals’s expertise, and I hope I’ll cowl more about it in the future, however for now I’d like to go away you with a number of suggestions to help you make this article actionable and not simply philosophical. You can attain the market from the BTS- get off at the Saphan Taksin station. You’ll need to cross the river, nevertheless it’s easy with the Phraya Express Boat. Afterward, you’ll be able to take another ferry across to Klong San Pier.
Onsite optimization is a crucial component of web optimization Basically, this pertains to how effectively Google ranks your web site without external link constructing. In most circumstances, performance based mostly search engine optimisation packages will only require very basic onsite optimization and rely solely on these external hyperlinks. So the worth of your precise website from an onsite optimization perspective doesn’t increase.
Set measurable targets for the weblog in response to the business goals you’re trying to succeed in. Based on the type of weblog and your objectives, seek to understand your viewers and create an editorial schedule that feeds both Finance Calculator your communication needs and the interests of your readers. Ask others in your group for assist in making posts and socialize with different blogs in your area of interest. Use analytics to observe efficiency, make adjustments and measure outcomes.
Video is a strong device that enables you to talk to a large audience about your organization and products. When discerning prospects Business Ideas want more than product descriptions, video is the reply to your advertising challenges.
Ian Cleary is called the founding father of award-winning advertising tech weblog, RazorSocial. He has greater than 15 years’ value of experience within the tech trade and has made reducing-edge tech products obtainable to numerous industries through the years. Not solely does Ian speak on the most influential social media conferences throughout the world, leading online publications like Huffington Post, , and VentureBeat all submit his works.
The easy reality is that most firms should not correctly registered online, and this improper registration lowers their ranking in search results. Not at Simply Chiropractic. We have hired former Google search specialists to guantee that each franchise location shouldn’t be solely properly registered, but also optimized. We have also created an internet site format and content material in your location that’s optimized to correctly reply search queries. Additionally, we offer a blogging again-link that generates credibility on your location in the online community.
After finishing my degree in Information programs at Kingston University, I began working as a Technical support analyst at Sellerdeck and I now work as a Technical Support Coordinator. My position includes, offering 2nd line assist on the eCommerce software program and coordinating the assist Online Business Opportunities division which incorporates monitoring service as well as assist. Working at Sellerdeck allows me to increase my expertise, I am learning one thing new day-after-day!
Mike Hamilton: We have been monitoring a variety of changes with Google+ and Google My Business. It looks as if Google has finally given up on Google+ and conceded to Facebook. With that battle misplaced it looks like Google is seeking to win the warfare in relation to Directory and Review Listings”. We have seen the addition of posting to your small business listing, the ability to add an appointment link and shortly to come if not already here…Instant Chat along with a Q&A sections.
When I joined SFI/Tripleclicks in 2013, I couldn’t assist however start blood surging up – I found an automatic money-making program! I called my buddies and kin to ask for them join Carrier Information. After nearly 3 years, my enthusiasm was disappeared steadily. Here I provde the fact of SFI and Tripleclicks with you thru my private experiences.
Using his webcast, e-commerce, e mail advertising and competitive pricing, he grew the Wine Library into a 60 million greenback business by 2005, 6 years after assuming control. In 2009 Gary and his brother AJ founded VaynerMedia. Using the techniques he pioneered with his household’s business, VaynerMedia provides social media and technique to Fortune 500 companies like GE Anheuser-Busch and PepsiCo. By 2016, VaynerMedia had grown to 600 workers and grossed one hundred million in revenue.
Carpet Care Marketing Gurus employs Google Analytics Certified marketers ready to bring the power of exhausting information to your carpet cleaning business. Prior to digital advertising and marketing, companies could not monitor their promoting finances past the examine they wrote to their advert agency. With the wonderful efficiency information gathered by Google Analytics, Carpet Care Marketing Gurus will track and report on each dollar of your advertising price range. Return on funding is not a question, it is a statistic.
Though Neil Patel is a very achieved digital marketer, I turn to him most for his unimaginable SEO strategies. He provides an enormous quantity of free training on and the QuickSprout blog , as well as the occasional paid course. What’s nice about Neil Patel’s search engine marketing content is that he uses solely above-board ways that can be utilized to construct a sustainable business.
From Organic search engine optimization and Social Media Marketing to guide technology services like PPC and Reputation Management, we offer a multitude of digital advertising options which are designed from the ground as much as deliver outcomes Marketing, be it rankings, net visitors, or revenue. Utilizing a white-hat method to search engine marketing, we specialize in remodeling an peculiar website into a rare money site.
|
"""
Copyright 2015-2016 @_rc0r <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sqlite3 as lite
from afl_utils.AflPrettyPrint import *
class sqliteConnector:
def __init__(self, database_path, verbose=True):
self.database_path = database_path
self.dbcon = lite.connect(database_path, isolation_level='Exclusive')
self.dbcur = self.dbcon.cursor()
self.dbcur.execute('PRAGMA synchronous = 0')
# self.dbcur.execute('PRAGMA journal_mode = OFF')
self.verbose = verbose
def init_database(self, table, table_spec):
"""
Prepares a sqlite3 database for data set storage. If the file specified in database_path doesn't exist a new
sqlite3 database with table 'Data' will be created. Otherwise the existing database is used to store additional
data sets.
DO NOT USE WITH USER SUPPLIED `table` AND `table_spec` PARAMS!
!!! THIS METHOD IS *NOT* SQLi SAFE !!!
:param table: Name of the table to create.
:param table_spec: String containing the SQL table specification
:return: None
"""
table_data_exists = False
if os.path.isfile(self.database_path):
try:
self.dbcur.execute("SELECT Count(*) FROM {}".format(table))
if self.verbose:
print_warn("Using existing database to store results, %s entries in this database so far." %
str(self.dbcur.fetchone()[0]))
table_data_exists = True
except lite.OperationalError:
if self.verbose:
print_warn("Table \'{}\' not found in existing database!".format(table))
if not table_data_exists: # If the database doesn't exist, we'll create it.
if self.verbose:
print_ok("Creating new table \'{}\' in database \'{}\' to store data!".format(table, self.database_path))
self.dbcur.execute("CREATE TABLE `{}` ({})".format(table, table_spec))
def dataset_exists(self, table, dataset, compare_fields):
"""
Check if dataset was already submitted into database.
DO NOT USE WITH USER SUPPLIED `table`, `dataset` or `compare_fields` PARAMS!
!!! THIS METHOD IS *NOT* SQLi SAFE !!!
:param table: Name of table to perform the check on.
:param dataset: A dataset dict consisting of sample filename, sample classification
and classification description.
:param compare_fields: List containing field names that will be checked using logical AND operation.
:return: True if the data set is already present in database, False otherwise.
"""
# The nice thing about using the SQL DB is that I can just have it make
# a query to make a duplicate check. This can likely be done better but
# it's "good enough" for now.
output = False
# check sample by its name (we could check by hash to avoid dupes in the db)
single_compares = []
for compare_field in compare_fields:
single_compares.append("({} IS '{}')".format(compare_field, dataset[compare_field]))
qstring = "SELECT * FROM {} WHERE {}".format(table, " AND ".join(single_compares))
self.dbcur.execute(qstring)
if self.dbcur.fetchone() is not None: # We should only have to pull one.
output = True
return output
def insert_dataset(self, table, dataset):
"""
Insert a dataset into the database.
DO NOT USE WITH USER SUPPLIED `table` AND `table_spec` PARAMS!
!!! THIS METHOD IS *NOT* SQLi SAFE !!!
:param table: Name of the table to insert data into.
:param dataset: A dataset dict consisting of sample filename, sample classification and classification
description.
:return: None
"""
# Just a simple function to write the results to the database.
if len(dataset) <= 0:
return
field_names_string = ", ".join(["`{}`".format(k) for k in dataset.keys()])
field_values_string = ", ".join(["'{}'".format(v) for v in dataset.values()])
qstring = "INSERT INTO {} ({}) VALUES({})".format(table, field_names_string, field_values_string)
self.dbcur.execute(qstring)
def commit_close(self):
"""
Write database changes to disk and close cursor and connection.
:return: None
"""
self.dbcon.commit()
self.dbcur.close()
self.dbcon.close()
|
Tallers Domènech, an official distributor of Cat® lift trucks and warehouse equipment, is celebrating 50 years in the industry by supporting a strong local company in its efforts to return from bankruptcy. Gay Sutton explains.
The past half dozen years have been a minefield for businesses of all types, and the oil and gas sector has been particularly badly hit. Fluctuating exchange rates, falling raw material prices, a paucity of investment funding and shrinking customer orders have all added to the difficulties. And the problems have not only hit those with small profit margins, or those who have been living on the edge.
This story shows how even the successful and respected can go under. However, the tale has a silver lining, illustrating how help can come from an unexpected quarter, offering a supportive and ongoing relationship and a great deal of hope for the future.
The company that ran into difficulties was Spain’s Ros Roca Indox CryoEnergy, which had a market-leading product range and a strong customer and supplier base. On the surface things had been looking very rosy indeed. Employing 220 staff across three sites in Catalonia, the company held a leading position in the Spanish oil and gas market, supplying cryogenic tanks for the transportation, distribution and storage of fuels and liquefied gases. Its customer base embraced all of the major Spanish energy companies operating both nationally and internationally, including Gas Natural, Endesa and Repson. In spite of this success, however, CryoEnergy went into liquidation in 2015.
Its difficulties had two root causes. Highly successful in Spain while times were good, the company had made the decision to expand into South America and had invested heavily in supplying to a variety of projects. Its experience and knowledge of South America was, however, very limited. Having borrowed significantly for this expansion, the changing global economic climate then took its toll. Sales on the global energy markets dropped away as the price of raw materials slumped. With mounting debts, shrinking profits and global uncertainty over the future of its international projects, the company was unable to service its debts and filed for bankruptcy.
Believing in the quality of its products, four former employees of Ros Roca Indox CryoEnergy brought together a package of €4 million in capital to purchase the company, one of its manufacturing sites and a range of assets. Then, with ten staff from CryoEnergy, they began operating under the new name Indox Energy Systems LS.
“We started operating the new company on 11th January 2016, with economic and financial support from a group of Catalan entrepreneurs, and also with the support of the former works council,” said Purchasing Manager Lluis Monturiol.
"Our company philosophy revolves around the belief that we have a duty to our customers."
"We are very grateful that, when they learnt about our situation, Tallers Domènech offered their assistance with servicing our Cat lift trucks which we inherited in the purchase of assets from the former company."
Indox is still a very new company. It has made a commitment to focus on the manufacturing of its best-known products and has learned from the tragedy of the previous management, intending to avoid accumulating excessive structural debts either now or in the future. It has made an explicit commitment to create jobs for the staff of the former company. Looking to the future, if growth follows the business plan it will ultimately make a significant contribution to the regional jobs market and the national economy.
The challenge now is to re-establish relationships with its client base. Previous customers are already interested and supportive of the plan, while some 95% of the previous company’s suppliers are offering favourable terms.
A family business, Tallers Domènech is celebrating its 50th Anniversary in 2016.
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Convert from pickle format 3 to pickle format 2 for use with Python 2.7
# Run this under Python 3
import sys
import gzip
import pickle
def main(argv):
if len(argv)<2:
print("usage: python pickle32.py file")
return -1
filename = argv[1]
with gzip.open(filename, mode='rb') as f:
unpickled = pickle.load(f)
with gzip.open(filename + '2', mode='wb') as d:
pickle.dump(unpickled, d, protocol=2)
if __name__ == '__main__':
main(sys.argv)
|
Nordewoehrden, Germany is one of the most visited cities in the country and has good road conditions for self-driving. EasyRentCars provides our guests with the Best Price Guarantee policy and 24/7 online customer service. It can be never easier than searching for a rental car that suits your needs and budget and making a booking online with us. We have partnered with global and local car rental companies in more than 200 countries and are still constantly expanding our partnerships with quality car rental brands, to offer you the widest choice of car rental deals worldwide.
Service code: 8055364113 | © 2019 Easy Tour International Ltd. All rights reserved.
|
import logging
from fluxclient.utils.version import StrictVersion
from fluxclient.upnp.discover import UpnpDiscover
from .abstract_backend import UpnpError, UpnpException, NotSupportError
from .udp1_backend import UpnpUdp1Backend
from .ssl1_backend import UpnpSSL1Backend
__all__ = ["UpnpTask", "UpnpError", "UpnpException"]
BACKENDS = [
UpnpSSL1Backend,
UpnpUdp1Backend]
logger = logging.getLogger(__name__)
class UpnpTask(object):
"""UpnpTask provides some configuration methods for the device. When creating \
a UpnpTask instance, the argument **uuid** is required. If parameter \
**device_metadata** is not given, UpnpTask will use lookup_callback and \
lookup_timeout to create a UpnpDiscover instance and try to get metadata from \
network.
:param uuid.UUID uuid: Device uuid, set UUID(int=0) while trying to connect \
via ip address.
:param encrypt.KeyObject client_key: Client key to connect to device.
:param str ipaddr: IP Address of the machine.
:param dict device_metadata: This is an internal parameter, which is not \
recommended to provide because it may has different definitions in \
different versions.
:param dict backend_options: More configuration for UpnpTask.
:param callable lookup_callback: Invoke repeatedly while looking for device.
:param float lookup_timeout: Raise an error if the program can not find the device in a limited time.
:raises UpnpError: For protocol or operation error.
:raises socket.error: For system defined socket error.
"""
name = None
uuid = None
serial = None
model_id = None
version = None
ipaddr = None
meta = None
_backend = None
def __init__(self, uuid, client_key, ipaddr=None, device_metadata=None,
remote_profile=None, backend_options={}, lookup_callback=None,
lookup_timeout=float("INF")):
self.uuid = uuid
self.ipaddr = ipaddr
self.client_key = client_key
self.backend_options = backend_options
if device_metadata:
if 'uuid' in device_metadata:
device_metadata.pop('uuid')
self.update_remote_profile(uuid, **device_metadata)
elif remote_profile:
self.update_remote_profile(uuid, **remote_profile)
else:
self.reload_remote_profile(lookup_callback, lookup_timeout)
self.initialize_backend()
def reload_remote_profile(self, lookup_callback=None,
lookup_timeout=float("INF")):
def on_discovered(instance, device, **kw):
self.update_remote_profile(**(device.to_old_dict()))
instance.stop()
if self.uuid.int:
d = UpnpDiscover(uuid=self.uuid)
else:
d = UpnpDiscover(device_ipaddr=self.ipaddr)
d.discover(on_discovered, lookup_callback, lookup_timeout)
def update_remote_profile(self, uuid, name, serial, model_id, version,
ipaddr, **meta):
if not self.uuid or self.uuid.int == 0:
self.uuid = uuid
self.name = name
self.serial = serial
self.model_id = model_id
self.version = StrictVersion(str(version))
self.ipaddr = ipaddr
self.device_meta = meta
def initialize_backend(self):
for klass in BACKENDS:
if klass.support_device(self.model_id, self.version):
self._backend = klass(self.client_key, self.uuid, self.version,
self.model_id, self.ipaddr,
self.device_meta, self.backend_options)
# TODO: debug information, remove after bugfix
logger.info("Backend %s selected", klass.__name__)
return
# TODO: debug information, remove after bugfix
logger.warn("Backend %s does not support device version `%s`",
klass.__name__, self.version)
raise NotSupportError(self.model_id, self.version)
def close(self):
"""Closes the upnp socket connection. After close(), any other method \
should not be called anymore."""
self._backend.close()
@property
def authorized(self):
"Indicates whether the connection has been authorized with a correct password or RSA key. If the connection is not authorized, you must \
call `authorize_with_password` first to authorize."
return self._backend.authorized
@property
def connected(self):
"""Indicates whether the upnp connection is connected with the device"""
return self._backend.connected
def authorize_with_password(self, password):
"""Authorizes via password, only use when the RSA key has not been trusted \
from device.
:param str password: Device password"""
if not self._backend.connected:
raise UpnpError("Disconnected")
if self._backend.authorized:
raise UpnpError("Already authorized")
self._backend.authorize_with_password(password)
def add_trust(self, label, key):
"""Adds a client_key to device trust list
:param str label: Key label will show for human only
:param object key: A vaild RSA key object or pem
:return: Key hash
:rtype: str"""
if isinstance(key, str):
pem = key
elif isinstance(key, bytes):
pem = key.decode("ascii")
else:
pem = key.public_key_pem.decode("ascii")
self._backend.add_trust(label, pem)
def list_trust(self):
"""Gets all trusted key in the device
:return: ((label, key hash), (label, key hash), ...)"""
return self._backend.list_trust()
def remove_trust(self, access_id):
"""Removes a trusted key
:param str access_id: Key hash which will be removed"""
return self._backend.remove_trust(access_id)
def rename(self, new_name):
"""Renames the device
:param str new_name: New device name"""
if not self._backend.connected:
raise UpnpError("Disconnected")
if not self._backend.authorized:
raise UpnpError("Authorize required")
self._backend.rename(new_name)
def modify_password(self, old_password, new_password, reset_acl=True):
"""Changes the device password, if **reset_acl** set to True, all other \
authorized user will be deauthorized.
:param str old_password: Old device password
:param str new_password: New device password
:param bool reset_acl: Clear authorized user list in device"""
if not self._backend.connected:
raise UpnpError("Disconnected")
if not self._backend.authorized:
raise UpnpError("Authorize required")
self._backend.modify_password(old_password, new_password, reset_acl)
def modify_network(self, **settings):
"""Modifies the device network, details will be revealed in future documentation."""
if not self._backend.connected:
raise UpnpError("Disconnected")
if not self._backend.authorized:
raise UpnpError("Authorize required")
self._backend.modify_network(**settings)
def get_wifi_list(self):
"""Gets wifi lists discovered from the device"""
if not self._backend.connected:
raise UpnpError("Disconnected")
if not self._backend.authorized:
raise UpnpError("Authorize required")
return self._backend.get_wifi_list()
|
1. What conditions make me eligible to obtain medical marijuana?
2. What is the first step to obtaining medical marijuana?
First, speak with your treating physician to determine if medical marijuana is appropriate for your condition. If your treating physician would like to register with the program, additional information can be found on the Department of Health's website Under: "Practitioner Education for the Medical Marijuana Program"
Yes. If the applicant for a registry identification card is under the age of eighteen (18) or a person who is otherwise incapable of consenting to medical treatment, the application must be submitted by an appropriate person over twenty-one (21) years of age. The applicant must designate at least one, and up to two, caregivers who must be among the following: (i) a parent or legal guardian of the certified patient; (ii) a person designated by a parent or legal guardian; or (iii) an appropriate person approved by the Department upon a sufficient showing that no parent or legal guardian is available or appropriate.
A patient who is registered with the program must first designate you as a caregiver during the patient registration process. Upon approval of the patient's registration, the caregiver(s) may register. The patient will have access to instructions for caregiver registration. To register with the Department as a designated caregiver, you must be a resident of New York State and have a valid NYS Driver's License or New York State Non-Driver ID card.
Once the application to register has been submitted successfully and approved, please allow approximately three business days to receive your Patient or Caregiver Registry ID Card. Once you have received your registry ID card you may visit a registered organization's dispensing facility to obtain medical marijuana products. Dispensing facility locations can be found on the Dept. of Health Website by clicking: "Registered Organizations"
1. Where can I find basic information about each Registered Organization?
|
#__BEGIN_LICENSE__
# Copyright (c) 2015, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All rights reserved.
#
# The xGDS platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#__END_LICENSE__
import traceback
import cgi
import re
from datetime import datetime, timedelta
import itertools
import json
import pytz
import csv
import ast
from dateutil.parser import parse as dateparser
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.views.decorators.cache import never_cache
from django.http import HttpResponse, JsonResponse
from django.core.urlresolvers import reverse
from django.shortcuts import redirect, render
from django.template.loader import render_to_string
from geocamUtil.datetimeJsonEncoder import DatetimeJsonEncoder
from geocamUtil.loader import LazyGetModelByName, getClassByName
from geocamUtil.modelJson import modelToDict
from geocamUtil import TimeUtil
from geocamTrack.utils import getClosestPosition
from treebeard.mp_tree import MP_Node
from xgds_notes2.forms import NoteForm, UserSessionForm, TagForm, ImportNotesForm
from xgds_core.views import getTimeZone, addRelay, getDelay
from xgds_core.flightUtils import getFlight
from xgds_map_server.views import getSearchPage, getSearchForms, buildFilterDict
from models import HierarchichalTag
from httplib2 import ServerNotFoundError
from apps.xgds_notes2.forms import SearchNoteForm
if False and settings.XGDS_SSE:
from sse_wrapper.events import send_event
UNSET_SESSION = 'Unset Session'
Note = LazyGetModelByName(getattr(settings, 'XGDS_NOTES_NOTE_MODEL'))
Tag = LazyGetModelByName(getattr(settings, 'XGDS_NOTES_TAG_MODEL'))
def serverTime(request):
return HttpResponse(
datetime.now(pytz.utc).strftime('%Y-%m-%d %H:%M:%S'),
content_type="text"
)
def editUserSession(request, ajax=False):
# display a form to edit the content of the UserSession object in request.session['notes_user_session']
existing_data = request.session.get('notes_user_session', None)
if request.method == 'POST':
form = UserSessionForm(request.POST)
if form.is_valid():
request.session['notes_user_session'] = form.data.dict()
#persist the session data in user preferences, if that feature is available (see plrpExplorer.models.UserPreferences)
if hasattr(request.user, 'preferences'):
for field in form.fields:
request.user.preferences['default_' + field] = form.data[field]
if not ajax:
return redirect('search_xgds_notes_map')
else:
resultDict = {'success': True}
for key, value in form.cleaned_data.iteritems():
resultDict[key] = str(value);
return HttpResponse(json.dumps(resultDict),
content_type='application/json')
else:
return HttpResponse(json.dumps(form.errors),
content_type='application/json',
status=406)
else:
defaults = {}
if hasattr(request.user, 'preferences'):
empty_form = UserSessionForm() # Used as a source of enum choices
for fieldname in empty_form.fields:
if 'default_' + fieldname in itertools.chain(request.user.preferences.keys(), getattr(settings, 'DEFAULT_USER_PREFERENCES', [])):
value = request.user.preferences.get('default_' + fieldname)
defaults[fieldname] = value
if existing_data:
defaults.update(existing_data) # merge anything in the session store with the user preferences
form = UserSessionForm(initial=defaults)
template = 'xgds_notes2/user_session.html'
return render(
request,
template,
{
'form': form,
'title': settings.XGDS_NOTES_MONIKER,
'help_content_path': 'xgds_notes2/help/recordSession.rst'
},
)
def update_note_request_post(request):
"""
Take the author and the notes user session from the request session and put it into the request post
:param request:
:return: the request with more stuff in the post
"""
request.POST._mutable = True
# Hijack the UserSessionForm's validation method to translate enumerations to objects in session data
if 'notes_user_session' in request.session.keys():
session_form = UserSessionForm()
session_data = {'%s' % (k): str(session_form.fields[k].clean(v).id)
for k, v in request.session['notes_user_session'].iteritems()
if k in session_form.fields}
request.POST.update(session_data)
if request.user:
request.POST['author'] = str(request.user.id)
request.POST._mutable = False
return request.POST
def populateNoteData(request, form):
""" Populate the basic data dictionary for a new note from a submitted form
Form must already be valid
"""
errors = []
data = form.cleaned_data
if data['app_label'] and data['model_type']:
data['content_type'] = ContentType.objects.get(app_label=data['app_label'], model=data['model_type'])
data.pop('app_label')
data.pop('model_type')
tags = data.pop('tags')
# handle extras
try:
extras = data.pop('extras')
if str(extras) != 'undefined':
extrasDict = ast.literal_eval(extras)
data.update(extrasDict)
except:
pass
# This is for relay purposes
if 'id' in request.POST:
data['id'] = request.POST['id']
return data, tags, errors
def linkTags(note, tags):
if tags:
note.tags.clear()
for t in tags:
try:
tag = HierarchichalTag.objects.get(pk=int(t))
note.tags.add(tag)
except:
tag = HierarchichalTag.objects.get(slug=t)
note.tags.add(tag)
note.save()
def createNoteFromData(data, delay=True, serverNow=False):
NOTE_MODEL = Note.get()
empty_keys = [k for k,v in data.iteritems() if v is None]
for k in empty_keys:
del data[k]
try:
del data['note_submit_url']
except:
pass
note = NOTE_MODEL(**data)
for (key, value) in data.items():
setattr(note, key, value)
note.creation_time = datetime.now(pytz.utc)
note.modification_time = note.creation_time
# if we are taking a note on an object, get the flight and position from the object
if note.content_object:
try:
if hasattr(note, 'flight'):
note.flight = note.content_object.flight
note.position = note.content_object.getPosition()
except:
pass
else:
if delay:
# this is to handle delay state shifting of event time by default it does not change event time
note.event_time = note.calculateDelayedEventTime(data['event_time'])
elif serverNow:
note.event_time = note.calculateDelayedEventTime(note.creation_time)
if not note.event_timezone:
note.event_timezone = getTimeZone(note.event_time)
if hasattr(note, 'flight') and not note.flight:
# hook up the flight, this should always be true
note.flight = getFlight(note.event_time)
# TODO handle using the vehicle that came in from session
# hook up the position if it can have one
if hasattr(note, 'position') and not note.position:
note.lookupPosition()
note.save()
return note
def record(request):
if request.method == 'POST':
update_note_request_post(request)
form = NoteForm(request.POST)
if form.is_valid():
data, tags, errors = getClassByName(settings.XGDS_NOTES_POPULATE_NOTE_DATA)(request, form)
data = {str(k): v
for k, v in data.items()}
if 'author_id' in request.POST:
data['author'] = User.objects.get(id=request.POST['author_id'])
delay = getDelay()
note = createNoteFromData(data, delay=delay>0)
linkTags(note, tags)
jsonNote = json.dumps([note.toMapDict()], cls=DatetimeJsonEncoder)
# Right now we are using relay for the show on map
if note.show_on_map:
if settings.XGDS_CORE_REDIS and settings.XGDS_SSE:
note.broadcast()
mutable = request.POST._mutable
request.POST._mutable = True
request.POST['id'] = note.pk
request.POST['author_id'] = note.author.id
request.POST._mutable = mutable
addRelay(note, None, json.dumps(request.POST, cls=DatetimeJsonEncoder), reverse('xgds_notes_record'))
return HttpResponse(jsonNote,
content_type='application/json')
# if not settings.XGDS_SSE:
# return HttpResponse(jsonNote,
# content_type='application/json')
# else:
# return HttpResponse(json.dumps({'success': 'true'}), content_type='application/json')
else:
return HttpResponse(str(form.errors), status=400) # Bad Request
else:
raise Exception("Request method %s not supported." % request.method)
def recordSimple(request):
if request.method != 'POST':
return HttpResponse(json.dumps({'error': {'code': -32099,
'message': 'You must post, cheater.'}
}),
content_type='application/json')
update_note_request_post(request)
form = NoteForm(request.POST)
if form.is_valid():
data, tags, errors = getClassByName(settings.XGDS_NOTES_POPULATE_NOTE_DATA)(request, form)
note = createNoteFromData(data, False, 'serverNow' in request.POST)
linkTags(note, tags)
json_data = json.dumps([note.toMapDict()], cls=DatetimeJsonEncoder)
# Right now we are using relay for the show on map
if note.show_on_map:
if settings.XGDS_CORE_REDIS and settings.XGDS_SSE:
note.broadcast()
mutable = request.POST._mutable
request.POST._mutable = True
request.POST['id'] = note.pk
request.POST['author_id'] = note.author.id
request.POST._mutable = mutable
addRelay(note, None, json.dumps(request.POST, cls=DatetimeJsonEncoder), reverse('xgds_notes_record'))
return HttpResponse(json_data,
content_type='application/json')
else:
return JsonResponse({'error': {'code': -32099,
'message': 'problem submitting note',
'data': form.errors}
},
safe=False,
status=406)
def editNote(request, note_pk=None):
try:
tags_list = []
note = Note.get().objects.get(pk=int(note_pk))
tags_changed = False
if len(request.POST) == 1:
note.tags.clear()
else:
for key, value in request.POST.iteritems():
strkey = str(key)
if strkey.startswith('data'):
p = re.compile(r'^data\[(?P<pk>\d+)\]\[(?P<attr>\w*)\]')
m = p.match(strkey)
if m:
attr = m.group('attr')
if attr == 'content':
setattr(note, attr, cgi.escape(str(value)))
elif attr == 'tag_names':
tags_changed = True
tag_regex = re.compile(r'^data\[(?P<pk>\d+)\]\[(?P<attr>\w*)\]\[(?P<index>\d+)\]\[(?P<tag_attr>\w*)\]')
tag_match = tag_regex.match(strkey)
if tag_match:
tag_attr = tag_match.group('tag_attr')
if tag_attr == 'id':
tags_list.append(int(value))
else:
setattr(note, attr, str(value))
note.modification_time = datetime.now(pytz.utc)
if tags_changed:
linkTags(note, tags_list)
else:
note.save()
return HttpResponse(json.dumps({'data': [note.toMapDict()]}, cls=DatetimeJsonEncoder),
content_type='application/json')
except:
traceback.print_exc()
return HttpResponse(json.dumps({'error': {'code': -32099,
'message': 'problem submitting note'
}
}),
content_type='application/json')
def getSortOrder():
if settings.XGDS_NOTES_SORT_FUNCTION:
noteSortFn = getClassByName(settings.XGDS_NOTES_SORT_FUNCTION)
return noteSortFn()
else:
return getattr(settings, 'XGDS_NOTES_REVIEW_DEFAULT_SORT', '-event_time')
def editTags(request):
return render(
request,
'xgds_notes2/tags_tree.html',
{'addTagForm': TagForm(),
'title': settings.XGDS_NOTES_MONIKER,
'help_content_path': 'xgds_notes2/help/editTags.rst'},
)
def tagsGetRootTreesJson(root):
if root is None:
return []
root_json = root.get_tree_json()
return root_json
def tagsJsonArray(request):
allTags = Tag.get().objects.all()
return HttpResponse(json.dumps([tag.toSimpleDict() for tag in allTags], separators=(', ',': ')).replace("},","},\n").replace("}]","}\n]"),
content_type="application/json"
)
def tagsSearchJsonArray(request):
search_term = request.GET.get('term', '')
# TODO: execute a prefix search with Sphinx, if available
tfilter = Tag.get().objects.filter
result = []
for tag in tfilter(name__istartswith=search_term):
result.append(tag.name)
for tag in tfilter(abbreviation__istartswith=search_term):
result.append(tag.abbreviation)
result.sort()
return HttpResponse(json.dumps(result),
content_type="application/json"
)
@never_cache
def tagsget_tree_json(request, root=None):
"""
json tree of children
note that this does json for jstree
"""
root = Tag.get().objects.get(pk=root)
children_json = []
if root.numchild:
for child in root.get_children():
children_json.append(child.get_tree_json())
json_data = json.dumps(children_json)
return HttpResponse(content=json_data,
content_type="application/json")
@never_cache
def tagsGetOneLevelTreeJson(request, root=None):
"""
json tree of tags one level deep
note that this does json for jstree
"""
roots = []
if not root:
roots = Tag.get().get_root_nodes()
else:
roots.append(Tag.get().objects.get(pk=root))
keys_json = []
for root in roots:
keys_json.append(tagsGetRootTreesJson(root))
json_data = json.dumps(keys_json)
return HttpResponse(content=json_data,
content_type="application/json")
@never_cache
def deleteTag(request, tag_id):
found_tag = Tag.get().objects.get(pk=tag_id)
if found_tag:
if found_tag.numchild > 0:
# TODO need to check all the descendant tags; right now this is disabled.
return HttpResponse(json.dumps({'failed': found_tag.name + " has children, cannot delete."}), content_type='application/json', status=406)
elif LazyGetModelByName(settings.XGDS_NOTES_TAGGED_NOTE_MODEL).get().objects.filter(tag=found_tag):
# cannot delete, this tag is in use
return HttpResponse(json.dumps({'failed': found_tag.name + ' is in use; cannot delete.'}), content_type='application/json', status=406)
else:
found_tag.delete()
return HttpResponse(json.dumps({'success': 'true'}), content_type='application/json')
def addRootTag(request):
if request.method == 'POST':
form = TagForm(request.POST)
if form.is_valid():
new_root = Tag.get().add_root(**form.cleaned_data)
return HttpResponse(json.dumps(new_root.get_tree_json()), content_type='application/json')
else:
return HttpResponse(json.dumps({'failed': 'Problem adding root: ' + form.errors}), content_type='application/json', status=406)
def makeRootTag(request, tag_id):
if request.method == 'POST':
tag = Tag.get().objects.get(pk=tag_id)
if not tag.is_root():
tag.move(Tag.get().get_root_nodes()[0], 'sorted-sibling')
return HttpResponse(json.dumps({'success': 'true'}), content_type='application/json')
else:
return HttpResponse(json.dumps({'failed': 'Problem making root'}), content_type='application/json', status=406)
def addTag(request):
if request.method == 'POST':
parent_id = request.POST.get('parent_id')
parent = Tag.get().objects.get(pk=parent_id)
form = TagForm(request.POST)
if form.is_valid():
new_child = parent.add_child(**form.cleaned_data)
return HttpResponse(json.dumps(new_child.get_tree_json()), content_type='application/json')
else:
return HttpResponse(json.dumps({'failed': 'Problem adding tag: ' + str(form.errors)}), content_type='application/json', status=406)
def editTag(request, tag_id):
if request.method == 'POST':
tag = Tag.get().objects.get(pk=tag_id)
form = TagForm(request.POST, instance=tag)
if form.is_valid():
form.save()
return HttpResponse(json.dumps(tag.get_tree_json()), content_type='application/json')
else:
return HttpResponse(json.dumps({'failed': 'Problem editing tag: ' + form.errors}), content_type='application/json', status=406)
def moveTag(request):
if request.method == 'POST':
parent_id = request.POST.get('parent_id')
tag_id = request.POST.get('tag_id')
found_tag = Tag.get().objects.get(pk=tag_id)
found_parent = Tag.get().objects.get(pk=parent_id)
if found_tag and found_parent:
try:
found_tag.move(found_parent, 'sorted-child')
return HttpResponse(json.dumps({'success': 'true'}), content_type='application/json')
except:
return HttpResponse(json.dumps({'failed': 'badness.'}), content_type='application/json', status=406)
def doImportNotes(request, sourceFile, tz, vehicle):
dictreader = csv.DictReader(sourceFile)
for row in dictreader:
row['author'] = request.user
if row['content'] or row['tags']:
if 'first_name' in row and 'last_name' in row:
if row['first_name'] and row['last_name']:
try:
row['author'] = User.objects.get(first_name=row['first_name'], last_name=row['last_name'])
del row['first_name']
del row['last_name']
except:
pass
if row['event_time']:
event_time = dateparser(row['event_time'])
if tz != pytz.utc:
localized_time = tz.localize(event_time)
event_time = TimeUtil.timeZoneToUtc(localized_time)
row['event_time'] = event_time
try:
# TODO implement tags when ready
del row['tags']
except:
pass
NOTE_MODEL = Note.get()
note = NOTE_MODEL(**row)
note.creation_time = datetime.now(pytz.utc)
note.modification_time = datetime.now(pytz.utc)
if vehicle:
note.position = getClosestPosition(timestamp=note.event_time, vehicle=vehicle)
note.save()
def importNotes(request):
errors = None
if request.method == 'POST':
form = ImportNotesForm(request.POST, request.FILES)
if form.is_valid():
doImportNotes(request, request.FILES['sourceFile'], form.getTimezone(), form.getVehicle())
return redirect('search_xgds_notes_map')
else:
errors = form.errors
return render(
request,
'xgds_notes2/import_notes.html',
{
'form': ImportNotesForm(),
'errorstring': errors,
'title': settings.XGDS_NOTES_MONIKER,
'help_content_path': 'xgds_notes2/help/import.rst'
},
)
def getObjectNotes(request, app_label, model_type, obj_pk):
"""
For a given object, get the notes on that object and return as a json dictionary from oldest to newest
"""
ctype = ContentType.objects.get(app_label=app_label, model=model_type)
result = Note.get().objects.filter(content_type__pk = ctype.id, object_id=obj_pk).order_by('event_time', 'creation_time')
resultList = []
for n in result:
resultList.append(n.toMapDict())
json_data = json.dumps(resultList, cls=DatetimeJsonEncoder)
return HttpResponse(content=json_data,
content_type="application/json")
def buildNotesForm(args):
theForm = SearchNoteForm(args)
return theForm
def notesSearchMap(request, filter=None):
noteType = Note.get().cls_type()
return getSearchPage(request, noteType, 'xgds_notes2/map_record_notes.html', True, getSearchForms(noteType, filter))
# @never_cache
# def getNotesJson(request, filter=None, range=0, isLive=1):
# """ Get the note json information to show in table or map views.
# """
# try:
# isLive = int(isLive)
# if filter:
# splits = str(filter).split(":")
# filterDict = {splits[0]: splits[1]}
#
# range = int(range)
# if isLive or range:
# if range==0:
# range = 6
# now = datetime.now(pytz.utc)
# yesterday = now - timedelta(seconds=3600 * range)
# if not filter:
# notes = Note.get().objects.filter(creation_time__lte=now).filter(creation_time__gte=yesterday)
# else:
# allNotes = Note.get().objects.filter(**filterDict)
# notes = allNotes.filter(creation_time__lte=now).filter(creation_time__gte=yesterday)
# elif filter:
# notes = Note.get().objects.filter(**filterDict)
# else:
# notes = Note.get().objects.all()
# except:
# return HttpResponse(json.dumps({'error': {'message': 'I think you passed in an invalid filter.',
# 'filter': filter}
# }),
# content_type='application/json')
#
# if notes:
# keepers = []
# for note in notes:
# resultDict = note.toMapDict()
# keepers.append(resultDict)
# json_data = json.dumps(keepers, indent=4, cls=DatetimeJsonEncoder)
# return HttpResponse(content=json_data,
# content_type="application/json")
# else:
# return HttpResponse(json.dumps({'error': {'message': 'No notes found.',
# 'filter': filter}
# }),
# content_type='application/json')
# @never_cache
# def note_json_extens(request, extens, today=False):
# """ Get the note json information to show in the fancy tree. this gets all notes in the mapped area
# """
# splits = str(extens).split(',')
# minLon = float(splits[0])
# minLat = float(splits[1])
# maxLon = float(splits[2])
# maxLat = float(splits[3])
#
# queryString = Note.get().getMapBoundedQuery(minLon, minLat, maxLon, maxLat)
# if queryString:
# found_notes = Note.get().objects.raw(queryString)
# if found_notes:
# keepers = []
# for note in found_notes:
# resultDict = note.toMapDict()
# keepers.append(resultDict)
# json_data = json.dumps(keepers, indent=4, cls=DatetimeJsonEncoder)
# return HttpResponse(content=json_data,
# content_type="application/json")
# return ""
if settings.XGDS_NOTES_ENABLE_GEOCAM_TRACK_MAPPING:
from geocamUtil.KmlUtil import wrapKmlDjango, djangoResponse
def getKmlNetworkLink(request):
''' This refreshes note_map_kml every 5 seconds'''
url = request.build_absolute_uri(settings.SCRIPT_NAME + 'notes/rest/notes.kml')
return djangoResponse('''
<NetworkLink>
<name>%(name)s</name>
<Link>
<href>%(url)s</href>
<refreshMode>onInterval</refreshMode>
<refreshInterval>5</refreshInterval>
</Link>
</NetworkLink>
''' % dict(name=settings.XGDS_NOTES_MONIKER,
url=url))
@never_cache
def note_map_kml(request, range=12):
now = datetime.now(pytz.utc)
yesterday = now - timedelta(seconds=3600 * range)
objects = Note.get().objects.filter(show_on_map=True).filter(creation_time__lte=now).filter(creation_time__gte=yesterday)
days = []
if objects:
days.append({'date': now,
'notes': objects
})
if days:
kml_document = render_to_string(
'xgds_notes2/notes_placemark_document.kml',
{'days': days},
request
)
return wrapKmlDjango(kml_document)
return wrapKmlDjango("")
def getSseNoteChannels(request):
# Look up the note channels we are using for SSE
return JsonResponse(settings.XGDS_SSE_NOTE_CHANNELS, safe=False)
def defaultCurrentMapNotes(request):
return HttpResponseRedirect(reverse('xgds_map_server_objectsJson', kwargs={'object_name': 'XGDS_NOTES_NOTE_MODEL',
'filter':{'show_on_map': True}}))
def getCurrentMapNotes(request):
getNotesFunction = getClassByName(settings.XGDS_NOTES_CURRENT_MAPPED_FUNCTION)
return getNotesFunction(request)
|
The NHS will be able to offer children a new cancer therapy labelled the “most exciting treatment advance for decades”.
CAR-T is for aggressive leukaemia and can be used when other drugs have failed. It typically costs hundreds of thousands of pounds per patient.
Hospitals could start giving it to a small number of children within weeks. The first three NHS hospitals to apply to use the CAR-T (which stands for “chimeric antigen receptor T-cell”) therapy are in London, Manchester and Newcastle.
The funding will come from the Cancer Drugs Fund, which aims to fast-track access to the most promising new cancer treatments.
|
# -*- coding: utf-8 -*-
# Copyright (C) 2010-2012, eskerda <[email protected]>
# Distributed under the AGPL license, see LICENSE.txt
import os
import sys
import time
import json
import argparse
from collections import namedtuple
import re
from lxml import etree
from googlegeocoder import GoogleGeocoder
from slugify import slugify
from pybikes.utils import PyBikesScraper
from pybikes.domoblue import Domoblue
MAIN = 'http://clientes.domoblue.es/onroll/'
TOKEN_URL = 'generaMapa.php?cliente={service}&ancho=500&alto=700'
XML_URL = 'generaXml.php?token={token}&cliente={service}'
TOKEN_RE = 'generaXml\.php\?token\=(.*?)\&cliente'
geocoder = GoogleGeocoder()
CityRecord = namedtuple('CityRecord', 'city, country, lat, lng')
description = 'Extract DomoBlue instances from the main site'
parser = argparse.ArgumentParser(description = description)
parser.add_argument('-o', metavar = "file", dest = 'outfile', default = None,
help="Save output to the specified file")
parser.add_argument('-g','--geocode', action="store_true",
help="Use Google GeoCoder for lat/lng and better names")
parser.add_argument('--proxy', metavar = "host:proxy", dest = 'proxy',
default = None, help="Use host:port as a proxy for site calls")
parser.add_argument('-v', action="store_true", dest = 'verbose',
default = False, help="Verbose output for debugging (no progress)")
args = parser.parse_args()
outfile = args.outfile
proxies = {}
user_agent = 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.168 Safari/535.19'
scraper = PyBikesScraper()
scraper.setUserAgent(user_agent)
sysdef = {
"system": "domoblue",
"class": "Domoblue",
"instances": []
}
if args.proxy is not None:
proxies['http'] = args.proxy
scraper.setProxies(proxies)
scraper.enableProxy()
def get_token(client_id):
if 'Referer' in scraper.headers:
del(scraper.headers['Referer'])
url = MAIN + TOKEN_URL.format(service = client_id)
data = scraper.request(url)
token = re.findall(TOKEN_RE, data)
scraper.headers['Referer'] = url
return token[0]
def get_xml(client_id):
token = get_token(client_id)
url = MAIN + XML_URL.format(token = token, service = client_id)
return scraper.request(url).encode('raw_unicode_escape').decode('utf-8')
def test_system_health(domo_sys):
online = False
for s in domo_sys.stations:
online = s.extra['status']['online']
if online:
break
return online
def google_reverse_geocode(lat, lng):
country_info = lambda lst: lst[len(lst) - 1].short_name
target = 'locality'
if args.verbose:
print "--- Javascript code for debugging output ---"
print " var geocoder = new google.maps.Geocoder()"
print " latlng = new google.maps.LatLng(%s,%s)" % (str(lat), str(lng))
print " geocoder.geocode({latLng:latlng}, function(res){console.log(res)})"
info = geocoder.get((lat, lng),language = 'es')
city_info = [i for i in info if target in i.types]
if len(city_info) == 0:
target = 'political'
city_info = [i for i in info if target in i.types]
if len(city_info) == 0:
raise Exception
else:
city_info = city_info[0]
city = city_info.address_components[0].long_name
country = country_info(city_info.address_components)
latitude = city_info.geometry.location.lat
longitude = city_info.geometry.location.lng
return CityRecord(city, country, latitude, longitude)
def extract_systems():
xml_data = get_xml('todos')
xml_dom = etree.fromstring(xml_data)
systems = []
for marker in xml_dom.xpath('//marker'):
if marker.get('tipo') == 'pendiente':
continue
sys = Domoblue('foo', {}, int(marker.get('codigoCliente')))
sys.update()
online = True #test_system_health(sys)
if args.verbose:
print "--- %s --- " % repr(marker.get('nombre'))
print " Total stations: %d" % len(sys.stations)
print " Health: %s" % (lambda b: 'Online' if b else 'Offline')(online)
if not online:
if args.verbose:
print " %s is Offline, ignoring!\n" % repr(marker.get('nombre'))
continue
name = 'Onroll %s' % marker.get('nombre')
slug = slugify(name)
city = marker.get('nombre')
latitude = marker.get('lat')
longitude = marker.get('lng')
country = 'ES'
if args.geocode:
time.sleep(1)
try:
city, country, latitude, longitude = google_reverse_geocode(latitude, longitude)
name = 'Onroll %s' % city
except Exception:
print " No geocoding results for %s!!" % repr(name)
system = {
'tag': slug,
'system_id': int(marker.get('codigoCliente')),
'meta': {
'name': name,
'latitude': latitude,
'longitude': longitude,
'city': city,
'country': 'ES'
}
}
systems.append(system)
if args.verbose:
print " Appended!\n"
return systems
instances = extract_systems()
sysdef['instances'] = sorted(instances, key = lambda inst: inst['tag'])
data = json.dumps(sysdef, sort_keys = False, indent = 4)
if outfile is not None:
f = open(outfile, 'w')
f.write(data)
f.close()
print "%s file written" % outfile
else:
print "---- OUTPUT ----"
print data
|
Behind The Scenes with a Deafened Rock Star. | Deafened But Not Silent.
This entry was posted in Coping with Hearing Loss, FM system, Hearing Loss and FM, Hearing loss and music, music and tagged assistive listening devices, bass, beat, cochlear implants, coping with hearing loss, deafened, FM systems, hard of hearing, hearing aids, hearing impaired, hearing loss, listening, music, musical instruments, Phonak, rhythm by Peter Stelmacovich. Bookmark the permalink.
This is an awesome account!!! I am glad the gig went well, but sad I couldn’t be there :-(. Keep posting future gigs!!
WOW – so interesting to read about all of your trials and tribulations – glad it all turned out so well in the end. Keep posting when you are playing next – may get down to see you yet!
Excellent write up Peter! I couldn’t even tell as you guys rockin’ out! Thanks for the invite we had a great time!
|
#!/usr/bin/env python
# -*- coding: latin-1 -*-
##
# Copyright 2009-2012 Ghent University
#
# This file is part of vsc-ldap
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/vsc-ldap
#
# vsc-ldap is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# vsc-ldap is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with vsc-ldap. If not, see <http://www.gnu.org/licenses/>.
##
"""Timestamp tools for this LDAP library.
@author: Andy Georges
@author: Stijn De Weirdt
"""
import datetime
from vsc.utils.cache import FileCache
from vsc.utils.dateandtime import Local, utc
LDAP_DATETIME_TIMEFORMAT = "%Y%m%d%H%M%SZ"
def convert_timestamp(timestamp=None):
"""Convert a timestamp, yielding a string and a datetime.datetime instance.
@type timestamp: either a string or a datetime.datetime instance. Default value is None, in which case the
local time is returned.
@returns: tuple with the timestamp as a
- LDAP formatted timestamp on GMT in the yyyymmddhhmmssZ format
- A datetime.datetime instance representing the timestamp
"""
if timestamp is None:
timestamp = datetime.datetime.today()
if isinstance(timestamp, datetime.datetime):
if timestamp.tzinfo is None:
timestamp = timestamp.replace(tzinfo=Local)
return (timestamp, timestamp.astimezone(utc).strftime(LDAP_DATETIME_TIMEFORMAT))
elif isinstance(timestamp, basestring):
tmp = datetime.datetime.strptime(timestamp, LDAP_DATETIME_TIMEFORMAT)
return (tmp.replace(tzinfo=utc).astimezone(Local), timestamp)
def read_timestamp(filename):
"""Read the stored timestamp value from a pickled file.
@returns: string representing a timestamp in the proper LDAP time format
"""
cache = FileCache(filename)
(_, timestamp) = cache.load('timestamp')
return timestamp
def write_timestamp(filename, timestamp):
"""Write the given timestamp to a pickled file.
@type timestamp: datetime.datetime timestamp
"""
if isinstance(timestamp, datetime.datetime) and timestamp.tzinfo is None:
# add local timezoneinfo
timestamp_ = timestamp.replace(tzinfo=Local)
(_, timestamp_) = convert_timestamp(timestamp)
else:
timestamp_ = timestamp
cache = FileCache(filename)
cache.update('timestamp', timestamp_, 0)
cache.close()
|
Situated in Saint-Nazaire, this residence is in the same area as Hippodrome de Pornichet-La Baule, Daniel Jouvance Thalassotherapie Centre, and Maritime Museum. Area attractions also include The Sea Serpent and Eglise des Pins. Boat enthusiasts and WWII experts find a great deal to explore in this harbour town. The city's shipbuilding legacy is on display at the Escal'Atlantic exhibition, where visitors embark on a simulated voyage aboard a life-size reconstruction of a great ocean liner. For more traditional holiday fare, Saint-Nazaire also features a number of quiet beaches along the coast. The hotel complements this historic town by providing comfortable accommodation and modern services. The friendly staff offers tour and ticket assistance for those who wish to further explore the area.
|
import traceback
try:
USE_BABELEX = True
if USE_BABELEX:
# Use regular Babelex instead of Babel
from flask_babelex import Babel as Babel_ex, gettext as gettext_ex, lazy_gettext as lazy_gettext_ex, ngettext as ngettext_ex, get_domain as get_domain
gettext = gettext_ex
ngettext = ngettext_ex
lazy_gettext = lazy_gettext_ex
get_domain = get_domain
Babel = Babel_ex
else:
# Use regular Babel instead of Babelex
from flask_babel import Babel as Babel_reg, gettext as gettext_reg, lazy_gettext as lazy_gettext_reg, ngettext as ngettext_reg, get_domain as get_domain
gettext = gettext_reg
ngettext = ngettext_reg
lazy_gettext = lazy_gettext_reg
get_domain = get_domain
Babel = Babel_reg
except ImportError:
DEBUG = True
if DEBUG:
traceback.print_exc()
Babel = None
def gettext(string, **variables):
return string % variables
def ngettext(singular, plural, num, **variables):
return (singular if num == 1 else plural) % variables
def lazy_gettext(string, **variables):
return gettext(string, **variables)
|
We have Directory theme and we need who can work near our website like admin, info sharer, strategy maker, designer, Seo maker, add plugins, change, renew them and so on.- and by this he can get salary here.
We need person who worked near General Classifieds websites before- who know how and where need to advertise different items, Real Estates, Deals, Events , Jobs and so on.
If you person like we need- give us to know and we will talk more.
We will ask to sign our NDA and for verification send us any doc. photo.
Hi. thanks for your posting. I have 10+ years of experiences in web development. I can meet your requirements fully. Looking forward to work with you soon. Thanks.
Hi I have read your project description carefully. As a professional web developer, I can become a good member of your team. Please contact me and let us discuss more detail. Thanks.
|
import socket
import fcntl
import struct
import requests
from requests.auth import HTTPBasicAuth
import os
import time
from flask import current_app, g, jsonify
from models import db, Hub, Endpoint, User, EndpointTypes, SectionTypes, EndpointGroup, Schedule, Properties, EndpointSchema, HubSchema
from datetime import datetime
import uuid
import interface
from errors import invalid_operation, no_records
import inspect
import traceback
from debugger import debug_msg
# from api.v1.interfaces import server_update_hub
def is_admin(user):
is_admin = False
users = User.query.filter_by(username=user).first()
if users.group == 'ADMIN':
is_admin = True
else:
is_admin = False
return is_admin
def unique_endpoint(section_id, node_id, endpoint_id):
unique = False
endpoint = Endpoint.query.filter_by(internal_sec_id=section_id,internal_nod_id=node_id,internal_end_id=endpoint_id).first()
if endpoint == None:
unique = True
else:
unique = False
return unique
def unique_endpoint_type(node_type, endpoint_type):
unique = False
endpointtypes = EndpointTypes.query.filter_by(node_type=node_type,endpoint_type=endpoint_type).first()
if endpointtypes == None:
unique = True
else:
unique = False
return unique
def unique_section_type(section_type):
unique = False
sectiontypes = SectionTypes.query.filter_by(section_type=section_type).first()
if sectiontypes == None:
unique = True
else:
unique = False
return unique
def unique_user(username):
user = User.query.filter_by(username=username).first()
if user == None:
unique = True
else:
unique = False
return unique
def valid_user(username):
user = User.query.filter_by(username=username).first()
if user != None:
valid = True
else:
valid = False
return valid
def endpoint_validation(data):
valid = False
endpointtypes = EndpointTypes.query.filter_by(node_type=data['node_type'],endpoint_type=data['endpoint_type']).first()
if endpointtypes == None:
valid = False
else:
valid = True
return valid
def schedule_validation(data):
valid = False
endpoint = Endpoint.query.filter_by(endpoint_uuid = data['uuid_id']).first()
if endpoint == None:
group = EndpointGroup.query.filter_by(group_uuid = data['uuid_id']).first()
if group == None:
valid = False
else:
valid = True
else:
# As the endpoint is found, then check expected_status is according to endpoint types
endpointtypes = EndpointTypes.query.filter_by(node_type=endpoint.node_type,endpoint_type=endpoint.endpoint_type).first()
if (endpointtypes.status_min <= data['expected_status']) and (endpointtypes.status_max >= data['expected_status']):
valid = True
else:
valid = False
return valid
def operate_validation(endpoint_uuid, status):
valid = False
endpoint = Endpoint.query.filter_by(endpoint_uuid = endpoint_uuid).first()
if endpoint == None:
valid = False
return valid
else:
valid = True
endpoint_types = EndpointTypes.query.filter_by(node_type=endpoint.node_type,endpoint_type=endpoint.endpoint_type).first()
if endpoint_types != None:
if status == endpoint_types.status_min:
valid = True
elif status == endpoint_types.status_max:
valid = True
elif (status > endpoint_types.status_min) and (status < endpoint_types.status_max):
valid = True
else:
valid = False
else:
valid = False
debug_msg('endpoint_types', endpoint.node_type,endpoint.endpoint_type,endpoint_types.status_min,endpoint_types.status_max,status,valid)
return valid
def unique_property(key):
property = Properties.query.filter_by(key = key).first()
if property == None:
unique = True
else:
unique = False
return unique
def unique_group_desc(group_desc):
group = EndpointGroup.query.filter_by(group_desc = group_desc).first()
if group == None:
unique = True
else:
unique = False
return unique
# def debug_msg(message,keyword1=-99,keyword2=-99,keyword3=-99,keyword4=-99,keyword5=-99,keyword6=-99,keyword7=-99,keyword8=-99,keyword9=-99,keyword10=-99):
# msg = ''
# property = Properties.query.filter_by(key = 'DEBUG').first()
# if property.value != None and property.value == 'true':
# callerframerecord = inspect.stack()[1] # 0 represents this line
# # 1 represents line at caller
# frame = callerframerecord[0]
# info = inspect.getframeinfo(frame)
# try:
# msg += '\t' + 'USER:' + str(g.user.username)
# except:
# msg += '\t' + 'USER:' + str("BackendUser")
# msg += '\t' + 'FILE:' + str(info.filename)
# msg += '\t' + 'FUNC:' + str(info.function)
# msg += '\t' + 'LINE:' + str(info.lineno)
# msg += '\t' + 'CALL:' + str(traceback.format_stack(limit=5))
#
# msg += '\t'
# if(keyword1!=-99):
# msg += 'KEY1:' + str(keyword1)
# msg += '\t'
# if(keyword2!=-99):
# msg += 'KEY2:' + str(keyword2)
# msg += '\t'
# if(keyword3!=-99):
# msg += 'KEY3:' + str(keyword3)
# msg += '\t'
# if(keyword4!=-99):
# msg += 'KEY4:' + str(keyword4)
# msg += '\t'
# if(keyword5!=-99):
# msg += 'KEY5:' + str(keyword5)
# msg += '\t'
# if(keyword6!=-99):
# msg += 'KEY6:' + str(keyword6)
# msg += '\t'
# if(keyword7!=-99):
# msg += 'KEY7:' + str(keyword7)
# msg += '\t'
# if(keyword8!=-99):
# msg += 'KEY8:' + str(keyword8)
# msg += '\t'
# if(keyword9!=-99):
# msg += 'KEY9:' + str(keyword9)
# msg += '\t'
# if(keyword10!=-99):
# msg += 'KEY10:' + str(keyword10)
# msg += '\t' + 'MSG:' + str(message)
# # Open log file in append mode
# f = open(current_app.config['LOG_FILE'],'a')
# f.write(str(datetime.today()))
# f.write(msg)
# print msg
# f.write('\n')
# f.close()
def get_intranet_ip_address(ifname):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
except:
return "0.0.0.0"
def get_server_credentials():
prop = Properties.query.filter_by(key='ServerUsr').first()
user = prop.value
prop = Properties.query.filter_by(key='ServerPwd').first()
password = prop.value
return (user, password)
def get_serial():
# Extract serial from cpuinfo file
cpuserial = "0000000000000000"
try:
f = open('/proc/cpuinfo','r')
for line in f:
if line[0:6]=='Serial':
cpuserial = line[10:26]
f.close()
cpuserial_int = int(cpuserial,16)
except:
cpuserial = "ERROR000000000"
cpuserial_int = 9999999999999999
return cpuserial_int
def get_external_url():
try:
r = requests.get('http://localhost:4040/api/tunnels')
datajson = r.json()
msg=None
for i in datajson['tunnels']:
# populate only https ngrok url
if 'https' in i['public_url']:
msg = i['public_url']
except requests.exceptions.ConnectionError:
r = None
msg = "Error"
except requests.exceptions.RequestException:
r = None
msg = "ERROR"
#
return msg
def server_hub_string(hubdetails):
# hub_schema_custom = HubSchema(exclude=('last_changed_on', 'last_changed_by'))
# hubstring = hub_schema_custom.dump(hubdetails).data
hubstring = '{"description":"'+ str(hubdetails.description) +'", "external_url":"'+str(hubdetails.external_url)+'","hub_id":"'+str(hubdetails.hub_id)+'","internal_url":"'+str(hubdetails.internal_url)+'"}'
# hubstring = ''
# hubstring = hubstring + '{"description":"'+ str(hubdetails.description)
# hubstring = hubstring +'", "external_url":"'+str(hubdetails.external_url)
# hubstring = hubstring +'","hub_id":"'+str(hubdetails.hub_id)
# hubstring = hubstring +'","internal_url":"'+str(hubdetails.internal_url)+'"}'
return hubstring
def server_update_hub(hubdetails):
resp = None
server = Properties.query.filter_by(key='ServerAPI').first()
serverurl = server.value
serverurl = serverurl + 'et_update_hub_info.php?arg={"hub":'
url = serverurl + str(server_hub_string(hubdetails)) + '}'
debug_msg('hub_defined__server_updated', url)
try:
user,password = get_server_credentials()
req = requests.get(url,auth=HTTPBasicAuth(user, password)).json()
debug_msg('response', req)
# resp = req['success']
except requests.exceptions.ConnectionError:
req = None
msg = "Error"
resp = None
except requests.exceptions.RequestException:
req = None
msg = "ERROR"
resp = None
except:
req = None
msg = "ERROR"
resp = None
return resp
def endpoint_update_status(endpoint_uuid,status):
resp = None
server = Properties.query.filter_by(key='ServerAPI').first()
serverurl = server.value
serverurl = serverurl + 'et_change_endpoint_status.php?arg={"etct_endpoint_id":"'
url = serverurl + str(endpoint_uuid)+'","status":"' + status +'"}'
debug_msg('endpoint_update_status, url')
try:
user,password = get_server_credentials()
req = requests.get(url,auth=HTTPBasicAuth(user, password)).json()
debug_msg('response', req)
# resp = req['success']
except requests.exceptions.ConnectionError:
req = None
msg = "Error"
resp = None
except requests.exceptions.RequestException:
req = None
msg = "ERROR"
resp = None
except:
req = None
msg = "ERROR"
resp = None
return resp
# http://shubansolutions.com/etct/ws/et_change_endpoint_status.php?arg={"etct_endpoint_id":"8a38f241-7c1d-4580-a9fa-6debd3f03061","status":"A"}
def server_endpoint_string(endpoints):
# endpoint_schemas_custom = EndpointSchema(exclude=('last_changed_on', 'last_changed_by'), many = True, extra={"qwe":'123',"qbc":1234})
# endpoint_schemas_custom = EndpointSchema(exclude=('last_changed_on', 'last_changed_by'), many = True)
# endpointstring = endpoint_schemas_custom.dump(endpoints).data
# # endpointstring = jsonify({'endpoints':endpointstring})
# debug_msg('endpoint_defined__server_updated', endpointstring)
endpointstring = ''
for endpoint_single in endpoints:
endpointstring += '{"internal_sec_id":"'+ str(endpoint_single.internal_sec_id) +'", "section_type":"' + str(endpoint_single.section_type)+'","internal_sec_desc":"'+str(endpoint_single.internal_sec_desc)+'","internal_nod_id":"'+str(endpoint_single.internal_nod_id)+'","node_type":"'+str(endpoint_single.node_type)+'","internal_nod_desc":"'+str(endpoint_single.internal_nod_desc)+'","internal_end_id":"'+str(endpoint_single.internal_end_id)+'","endpoint_type":"'+str(endpoint_single.endpoint_type)+'","endpoint_uuid":"'+str(endpoint_single.endpoint_uuid)+'","internal_end_desc":"'+str(endpoint_single.internal_end_desc)+'"}'
endpointstring += ','
endpointstring = endpointstring[:-1]
return endpointstring
def server_sync_endpoints():
server = Properties.query.filter_by(key='ServerAPI').first()
serverurl = server.value
serverurl = serverurl + 'et_update_hub_info.php?arg='
endpoints = Endpoint.query.all()
hubdetails = Hub.query.first()
url = serverurl + '{"endpoints":[['+ str(server_endpoint_string(endpoints)) + '],{}],"hub":' + str(server_hub_string(hubdetails)) + '}'
debug_msg('endpoint_defined__server_updated', url)
try:
user,password = get_server_credentials()
req = requests.get(url,auth=HTTPBasicAuth(user, password)).json()
debug_msg('response', req)
# resp = req['success']
except requests.exceptions.ConnectionError:
req = None
msg = "Error"
resp = None
except requests.exceptions.RequestException:
req = None
msg = "ERROR"
resp = None
except:
req = None
msg = "ERROR"
resp = None
# return resp
def get_scheduler_current_timestamp():
year = int(str(datetime.today())[:4])
month = int(str(datetime.today())[5:7])
weekday = datetime.weekday(datetime.today())
date = int(str(datetime.today())[8:10])
hour = int(str(datetime.today())[11:13])
min = int(str(datetime.today())[14:16])
# Return year, month, Weekday, date, hour and min based on today's datetime
return (year, month, weekday, date, hour, min)
def scheduled_endpoints_groups():
hourly_tasks = scheduled_hourly_tasks()
daily_tasks = scheduled_daily_tasks()
weekly_tasks = scheduled_weekly_tasks()
monthly_tasks = scheduled_monthly_tasks()
yearly_tasks = scheduled_yearly_tasks()
onlyonce_tasks = scheduled_onlyonce_tasks()
endpoints = []
endpoint_status = []
# endpoints = Endpoint.query.all()
endpointgroup = {}
for tasks in hourly_tasks:
endpoint = Endpoint.query.filter_by(endpoint_uuid=tasks.uuid_id).first()
if endpoint != None:
endpoints.append(endpoint)
endpoint_status.append(tasks.expected_status)
for tasks in daily_tasks:
endpoint = Endpoint.query.filter_by(endpoint_uuid=tasks.uuid_id).first()
if endpoint != None:
endpoints.append(endpoint)
endpoint_status.append(tasks.expected_status)
for tasks in weekly_tasks:
endpoint = Endpoint.query.filter_by(endpoint_uuid=tasks.uuid_id).first()
if endpoint != None:
endpoints.append(endpoint)
endpoint_status.append(tasks.expected_status)
for tasks in monthly_tasks:
endpoint = Endpoint.query.filter_by(endpoint_uuid=tasks.uuid_id).first()
if endpoint != None:
endpoints.append(endpoint)
endpoint_status.append(tasks.expected_status)
for tasks in yearly_tasks:
endpoint = Endpoint.query.filter_by(endpoint_uuid=tasks.uuid_id).first()
if endpoint != None:
endpoints.append(endpoint)
endpoint_status.append(tasks.expected_status)
for tasks in onlyonce_tasks:
endpoint = Endpoint.query.filter_by(endpoint_uuid=tasks.uuid_id).first()
if endpoint != None:
endpoints.append(endpoint)
endpoint_status.append(tasks.expected_status)
for endpoint1 in endpoints:
print endpoint1.endpoint_uuid
return (endpoints, endpointgroup, endpoint_status)
# def delete_all_except(endpoint):
def scheduled_hourly_tasks():
# Get current date time and weekday in variables
year, month, weekday, date, hour, min = get_scheduler_current_timestamp()
# Query the tasks which are marked true for hourly, and has current min
tasks = Schedule.query.filter_by(status = True, hourly = True, min = min)
return tasks
def scheduled_daily_tasks():
# Get current date time and weekday in variables
year, month, weekday, date, hour, min = get_scheduler_current_timestamp()
# Query the tasks which are marked true for daily, and has current hour and min
tasks = Schedule.query.filter_by(status = True, daily = True, hour = hour, min = min)
return tasks
def scheduled_weekly_tasks():
# Get current date time and weekday in variables
year, month, weekday, date, hour, min = get_scheduler_current_timestamp()
# Query the tasks which are marked true for weekly, and has current weekday, hour and min
tasks = Schedule.query.filter_by(status = True, weekly = True, weekday = weekday, hour = hour, min = min)
return tasks
def scheduled_monthly_tasks():
# Get current date time and weekday in variables
year, month, weekday, date, hour, min = get_scheduler_current_timestamp()
# Query the tasks which are marked true for monthly, and has current weekday, hour and min
tasks = Schedule.query.filter_by(status = True, monthly = True, date = date, hour = hour, min = min)
return tasks
def scheduled_yearly_tasks():
# Get current date time and weekday in variables
year, month, weekday, date, hour, min = get_scheduler_current_timestamp()
# Query the tasks which are marked true for yearly, and has current weekday, hour and min
tasks = Schedule.query.filter_by(status = True, yearly = True, month = month, date = date, hour = hour, min = min)
return tasks
def scheduled_onlyonce_tasks():
# Get current date time and weekday in variables
year, month, weekday, date, hour, min = get_scheduler_current_timestamp()
# Query the tasks which are marked true for yearly, and has current weekday, hour and min
tasks = Schedule.query.filter_by(status = True, onlyonce = True, year = year, month = month, date = date, hour = hour, min = min)
return tasks
def system_start():
str_ip = get_intranet_ip_address('eth0')
if str_ip == "0.0.0.0":
str_ip = get_intranet_ip_address('wlan0')
str_ip = 'http://' + str_ip
int_serial = get_serial()
str_ext_url = get_external_url()
hubdetails = Hub.query.first()
# Commit to db and call Server API only if there are any changes to PI Serial, internal_url, external_url
if (hubdetails.hub_id != int_serial and int_serial != 9999999999999999) or (hubdetails.internal_url != str_ip and str_ip != "0.0.0.0") or (hubdetails.external_url != str_ext_url and (str_ext_url != "Error" or str_ext_url != "ERROR")):
hubdetails.hub_id = int_serial
hubdetails.internal_url = str_ip
hubdetails.external_url = str_ext_url
try:
hubdetails.last_changed_by = g.user.username
except:
hubdetails.last_changed_by = str("BackendUser")
hubdetails.last_changed_on = datetime.today()
db.session.add(hubdetails)
db.session.commit()
# Call Server API
resp = server_update_hub(hubdetails)
# External URL is fetched, Server is updated
if (str_ext_url != 'Error' or str_ext_url != 'ERROR') and resp != None:
debug_msg('hub_started__external_url_fetched__server_updated', int_serial, str_ip, str_ext_url, resp, hubdetails.status)
# External URL is fetched, Server is not updated
elif (str_ext_url != 'Error' or str_ext_url != 'ERROR') and resp == None:
debug_msg('hub_started__external_url_fetched__server_not_updated', int_serial, str_ip, str_ext_url, resp, hubdetails.status)
# External URL is not fetched, Server is updated
elif (str_ext_url == 'Error' or str_ext_url == 'ERROR') and resp != None:
debug_msg('hub_started__external_url_not_fetched__server_updated', int_serial, str_ip, str_ext_url, resp, hubdetails.status)
# External URL is not fetched, Server is not updated
else:
debug_msg('hub_started__external_url_not_fetched__server_not_updated', int_serial, str_ip, str_ext_url, resp, hubdetails.status)
def operate_endpoint_group(uuid, expected_status):
status = -1
errors = ""
# Find if it is Endpoint or Group
endpoint = Endpoint.query.filter_by(endpoint_uuid = uuid).first()
if endpoint == None:
group = EndpointGroup.query.filter_by(group_uuid = uuid).first()
if group == None:
# UUID is not valid Endpoint or Group, exit the function with errors
errors = no_records('operate.operate.endpoint_group',uuid)
status = -1
debug_msg('improper_uuid', errors, status)
return (status,errors)
# Action required for Endpoint as UUID passed is endpoint
if endpoint != None:
# Validate if this status is possible
if not (operate_validation(uuid,expected_status)):
errors = invalid_operation()
status = -1
debug_msg('endpointvalidation', errors, status)
return (status,errors)
# Get the parameters stored in Endpoint
endpointtype = EndpointTypes.query.filter_by(node_type=endpoint.node_type, endpoint_type=endpoint.endpoint_type).first()
if endpointtype == None:
errors = no_records('operate.operate.endpointtype',endpoint.node_type,endpoint.endpoint_type)
status = -1
debug_msg('endpointtypes_validation', errors, status,endpoint.node_type,endpoint.endpoint_type)
return (status,errors)
# All the details are received, now call the corresponding method in interface and get the status
debug_msg('interface_communication', endpoint.endpoint_uuid,endpoint.node_type,endpoint.endpoint_type,endpoint.internal_nod_id,endpoint.internal_end_id,endpointtype.method,expected_status)
interfaces_method_name = getattr(interface,endpointtype.method)
status, errors = interfaces_method_name(endpoint,expected_status)
# Action required for Group as UUID passed is endpoint group
elif group != None:
pass
return (status,errors)
|
How to View New Internship Positions.
How to Search for Internships Posts.
How to upload Cover Letter.
How to apply for the INternship Positions.
@ copyrights Sukuul - For all your Questions and Answers.
|
"""
Migrates user preferences from one language code to another in batches. Dark lang preferences are not affected.
"""
from __future__ import print_function
import logging
from time import sleep
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from django.db.models import Q, Max
from openedx.core.djangoapps.dark_lang.models import DarkLangConfig
from openedx.core.djangoapps.user_api.models import UserPreference
DEFAULT_CHUNK_SIZE = 10000
DEFAULT_SLEEP_TIME_SECS = 10
LOGGER = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Implementation of the migrate command
"""
help = 'Migrate all user language preferences (excluding dark languages) from one language code to another.'
def add_arguments(self, parser):
parser.add_argument('old_lang_code',
help='Original language code, ex. "zh-cn"')
parser.add_argument('new_lang_code',
help='New language code, ex. "zh-hans"')
parser.add_argument('--start_id',
type=int,
default=1,
help='ID to begin from, in case a run needs to be restarted from the middle.')
parser.add_argument('--chunk_size',
type=int,
default=DEFAULT_CHUNK_SIZE,
help='Number of users whose preferences will be updated per batch.')
parser.add_argument('--sleep_time_secs',
type=int,
default=DEFAULT_SLEEP_TIME_SECS,
help='Number of seconds to sleep between batches.')
def handle(self, *args, **options):
"""
Execute the command.
"""
old_lang_code = options['old_lang_code']
new_lang_code = options['new_lang_code']
chunk_size = options['chunk_size']
sleep_time_secs = options['sleep_time_secs']
start = options['start_id']
end = start + chunk_size
# Make sure we're changing to a code that actually exists. Presumably it's safe to move away from a code that
# doesn't.
langs = [lang_code[0] for lang_code in settings.LANGUAGES]
langs += DarkLangConfig.current().released_languages_list
if new_lang_code not in langs:
raise CommandError('{} is not a configured language code in settings.LANGUAGES '
'or the current DarkLangConfig.'.format(new_lang_code))
max_id = UserPreference.objects.all().aggregate(Max('id'))['id__max']
print('Updating user language preferences from {} to {}. '
'Start id is {}, current max id is {}. '
'Chunk size is of {}'.format(old_lang_code, new_lang_code, start, max_id, chunk_size))
updated_count = 0
while True:
# On the last time through catch any new rows added since this run began
if end >= max_id:
print('Last round, includes all new rows added since this run started.')
id_query = Q(id__gte=start)
else:
id_query = Q(id__gte=start) & Q(id__lt=end)
curr = UserPreference.objects.filter(
id_query,
key='pref-lang',
value=old_lang_code
).update(value=new_lang_code)
updated_count += curr
print('Updated rows {} to {}, {} rows affected'.format(start, end - 1, curr))
if end >= max_id:
break
start = end
end += chunk_size
sleep(sleep_time_secs)
print('Finished! Updated {} total preferences from {} to {}'.format(updated_count, old_lang_code, new_lang_code))
|
Jennifer Flay is the New Zealand-born gallerist who transformed a dying Paris art fair into one of the best in the world. When she became director of FIAC fee-yack (Foire International d'Art Contemporain) - broadly translated as international contemporary arts fair - in 2003, it was its thirtieth birthday and there were headlines about whether its commemorations were a "birthday or funeral". When Jennifer Flay took over, she was still recovering from a car accident that almost killed her... it had left her in a coma with a broken neck, head injuries and many months of recovery. She'd lost her home, the contemporary art gallery she had set up - and in her journey to repair her life, she picked up the FIAC, determined that it too, would thrive. And it has. It's now one the world's most important international art fairs with 182 participating galleries at 2015's festival. For her efforts in putting Paris back at centre stage on the contemporary art scene, Jennifer was awarded France's highest decoration in October 2015. the Legion d'honneur.
|
import commands
MEDIAINFO_BIN = 'mediainfo'
class MediaInfoParsers:
@staticmethod
def parseValue(value):
value = value.split(' / ')[0] # support 'Sampling rate : 44.1 KHz / 22.05 KHz'
splittedValue = value.split(' ')
value = ''.join(splittedValue[:-1])
try:
if value.endswith('.0'):
value = int(value[:-2])
elif '.' in value:
value = float(value)
else:
value = int(value)
except ValueError:
return None
return (value, splittedValue[-1])
@staticmethod
def parseBitrate(value):
value, units = MediaInfoParsers.parseValue(value)
if units == 'bps':
return value
elif units == 'Kbps':
return value * 1024
elif units == 'Mbps':
return value * 1024 * 1024
elif units == 'Gbps':
return value * 1024 * 1024 * 1024
return None
@staticmethod
def parseSamplingRate(value):
value, units = MediaInfoParsers.parseValue(value)
if units == 'KHz':
return value * 1000
return None
@staticmethod
def getSimpleParser(allowedUnits):
def result(value):
value, units = MediaInfoParsers.parseValue(value)
if units in allowedUnits:
return value
return None
return result
@staticmethod
def parseVideoProfile(value):
splittedValue = value.split('@L')
if len(splittedValue) != 2:
return None
return splittedValue
@staticmethod
def parseAudioProfile(value):
return value.split(' / ')[0].split('@')[0] # support 'HE-AAC / LC'
class MediaInfo:
PARSING_CONFIG = {
'general': [
('overall bit rate', 'containerBitrate', MediaInfoParsers.parseBitrate),
],
'video': [
('bit rate', 'videoBitrate', MediaInfoParsers.parseBitrate),
('width', 'videoWidth', MediaInfoParsers.getSimpleParser(['pixels'])),
('height', 'videoHeight', MediaInfoParsers.getSimpleParser(['pixels'])),
('frame rate', 'videoFrameRate', MediaInfoParsers.getSimpleParser(['fps'])),
('format settings, reframes', 'videoReframes', MediaInfoParsers.getSimpleParser(['frame', 'frames'])),
('format profile', 'videoProfile', MediaInfoParsers.parseVideoProfile),
],
'audio': [
('bit rate', 'audioBitrate', MediaInfoParsers.parseBitrate),
('sampling rate', 'audioSamplingRate', MediaInfoParsers.parseSamplingRate),
('channel(s)', 'audioChannels', MediaInfoParsers.getSimpleParser(['channel', 'channels'])),
('format profile', 'audioProfile', MediaInfoParsers.parseAudioProfile),
],
}
def parse(self, inputFileName):
cmdLine = '%s %s' % (MEDIAINFO_BIN, inputFileName)
output = commands.getoutput(cmdLine)
sectionName = None
values = {}
for curLine in output.split('\n'):
curLine = curLine.strip()
if len(curLine) == 0:
sectionName = None
continue
splittedLine = map(lambda x: x.strip(), curLine.split(':', 1))
if len(splittedLine) == 1:
sectionName = splittedLine[0].lower()
elif sectionName != None:
values.setdefault(sectionName, {})
values[sectionName][splittedLine[0].lower()] = splittedLine[1]
for sectionName, fields in self.PARSING_CONFIG.items():
for keyName, memberName, parser in fields:
value = None
if values.has_key(sectionName) and values[sectionName].has_key(keyName):
value = parser(values[sectionName][keyName])
setattr(self, memberName, value)
self.hasVideo = values.has_key('video')
self.hasAudio = values.has_key('audio')
def normalizeBitrate(bitrate, standardBitrates):
normBitrate = standardBitrates[0]
for curBitrate in standardBitrates:
if abs(curBitrate - bitrate) < abs(normBitrate - bitrate):
normBitrate = curBitrate
return normBitrate
def normalizeVideoBitrate(bitrate):
return normalizeBitrate(bitrate, [300,400,500,700,900,1200,1600,2000,2500,3000,4000])
def normalizeAudioBitrate(bitrate):
return normalizeBitrate(bitrate, [64,128])
def getMpegTSEncodingParams(referenceFileName, blackDuration = 10):
# get the mediainfo of the source file
mediaInfo = MediaInfo()
mediaInfo.parse(referenceFileName)
if not mediaInfo.hasVideo and not mediaInfo.hasAudio:
return (None, None) # no audio and no video -> file is invalid
# video codec
if mediaInfo.hasVideo:
blackInput = '-t %s' % blackDuration
vcodec = "-vcodec libx264 -subq 7 -qcomp 0.6 -qmin 10 -qmax 50 -qdiff 4 -bf 0 -coder 1 -x264opts b-pyramid:weightb:mixed-refs:8x8dct:no-fast-pskip=0:force-cfr:sps-id=26 -pix_fmt yuv420p -threads 4 -force_key_frames \"expr:gte(t,n_forced*2)\""
videoProfile = ' -vprofile main -level 3.1'
if mediaInfo.videoProfile != None:
profile, level = mediaInfo.videoProfile
if profile.lower() in ['baseline', 'main', 'high', 'high10', 'high422', 'high444']:
videoProfile = ' -vprofile %s -level %s' % (profile.lower(), level)
vcodec += videoProfile
if mediaInfo.videoBitrate != None:
vcodec += ' -b:v %sk' % normalizeVideoBitrate(mediaInfo.videoBitrate / 1024)
elif mediaInfo.containerBitrate != None:
vcodec += ' -b:v %sk' % normalizeVideoBitrate(mediaInfo.containerBitrate / 1024)
if mediaInfo.videoWidth != None and mediaInfo.videoHeight != None:
vcodec += ' -vf scale="iw*min(%s/iw\,%s/ih):ih*min(%s/iw\,%s/ih),pad=%s:%s:(%s-iw)/2:(%s-ih)/2"' % ((mediaInfo.videoWidth, mediaInfo.videoHeight) * 4)
blackInput += ' -s %sx%s' % (mediaInfo.videoWidth, mediaInfo.videoHeight)
if mediaInfo.videoFrameRate != None:
vcodec += ' -r %s' % (mediaInfo.videoFrameRate)
blackInput += ' -r %s' % (mediaInfo.videoFrameRate)
if mediaInfo.videoReframes != None:
vcodec += ' -refs %s' % (mediaInfo.videoReframes)
else:
vcodec += ' -refs 6'
blackInput += ' -f rawvideo -pix_fmt rgb24 -i /dev/zero'
else:
blackInput = ''
vcodec = '-vn'
# audio codec
if mediaInfo.hasAudio:
silenceInput = '-t %s' % blackDuration
acodec = '-acodec libfdk_aac'
audioProfile = ' -profile:a aac_he'
AUDIO_PROFILE_MAPPING = {
'LC': 'aac_low',
'HE-AAC': 'aac_he',
'HE-AACv2': 'aac_he_v2',
'ER AAC LD': 'aac_ld',
'ER AAC ELD': 'aac_eld',
}
if AUDIO_PROFILE_MAPPING.has_key(mediaInfo.audioProfile):
audioProfile = ' -profile:a %s' % AUDIO_PROFILE_MAPPING[mediaInfo.audioProfile]
acodec += audioProfile
if mediaInfo.audioBitrate != None:
acodec += ' -b:a %sk' % normalizeAudioBitrate(mediaInfo.audioBitrate / 1024)
if mediaInfo.audioSamplingRate != None:
acodec += ' -ar %s' % (mediaInfo.audioSamplingRate)
silenceInput += ' -ar %s' % (mediaInfo.audioSamplingRate)
if mediaInfo.audioChannels != None:
acodec += ' -ac %s' % (mediaInfo.audioChannels)
silenceInput += ' -ac %s' % (mediaInfo.audioChannels)
silenceInput += ' -f s16le -acodec pcm_s16le -i /dev/zero'
else:
silenceInput = ''
acodec = '-an'
# filter / format - fixed
filter = "-bsf h264_mp4toannexb"
format = '-f mpegts'
encParams = ' '.join([vcodec, acodec, filter, format])
blackEncParams = ' '.join([blackInput, silenceInput, encParams])
return (encParams, blackEncParams)
|
All through high school, I wrote essays and papers. I wrote them in a lot of my classes – history, biology, health, art, and, of course English. In most of those classes, I got very average grades on these pieces. In English, I sometimes got lower than average grades. My English teachers would use a lot of red ink to point my egregious errors of sentence structure, awkward composition, agreement mistakes, etc., and I pretty much ignored them. Never was I asked to rewrite an essay or paper that was poorly written or included some types of plagiarism which was also often pointed out. Editing and rewriting were just not in my experience. College quickly changed all of that.
The first thing that hit me between the eyes in college was that I would be writing a lot of essays and papers – in almost every class, actually. When I submitted the same quality that was my “trademark” in high school, I didn’t receive average to just below average grades. I received “F’s.” There were no errors pointed out, only terse phrases at the top – phrases like “Proofread your work!” or “Edit before you submit!” That pretty much did it. I decided that every time I wrote a paper, I would have to review it, find the errors, and rewrite my paper before turning it in. I had the best of intentions from that point forward.
I began to work very hard to edit and rewrite my essay and paper assignments. I also put them all through a plagiarism checker, because some of my high school teachers had found some of this. A big issue was that I was not citing sources correctly. But my grades still didn’t improve that much even after I fixed that problem. What I came to realize was that I had structural errors, grammatical errors and usage error that I was not catching. In short, I didn’t have a clue about how to really review, edit and rewrite a rough draft to make it suitable for submission. I had lots of work to do.
Reading the entire piece of writing without any concern for grammar or punctuation, but for the confirmation of a good thesis statement and a logical flow of the ideas I wanted to present.
The second reading was one I read out loud. This was a pretty good activity, because I was able to find sentence fragments and run-ons and even see where maybe some commas should be placed.
Another piece of advice I got from reading some of the articles about reviewing and editing my writing was this whole thing about transitions between paragraphs. This was something I never had much instruction on in high school, and it was really an important feature for college level writing. I needed some more professional help so began to look at a rewriting tool another student recommended.
Everyone has academic strengths and weaknesses. And getting help is nothing to be ashamed about.
When students know that they have issues, and they get help, especially with editing college papers and essays, they can then study how that editing process works; they can see their errors and how they have been corrected by a professional. These become learning experiences.
With every essay or paper I send over for editing, I get back a corrected copy, with simple explanations about why the changes were made – each time I learn something that I will be able to use the next time I write.
The other nice thing about this professional editing service is that they check my writing for plagiarism. And they have provided explanations about how to avoid it. They also educated me about self-plagiarism, something that college students don’t even think about, but I will in the future.
Most students understand plagiarism and the disastrous consequences if they do it. With the sophisticated software out there today, any plagiarism can be discovered. Sometimes we plagiarize and don’t even realize it. I was famous for taking notes, using an author’s exact words and then not giving credit where I should have. Now I run a plagiarism scan on everything I write. And if I send it over to my writing service for editing, they do this too.
Self-plagiarism occurs when a student (or anyone for that matter) writes a piece and it is published somewhere online. And that student likes the piece so much, he decides to turn it in as an essay assignment to a professor. If a plagiarism scan is run, it will pop up. Why? Because it’s out there – in a blog, in the e-version of the student newspaper, in some forum, etc. This is especially important for grad students who may be contributing to online publications in order to enhance their resumes and CV’s. In these cases, it is far better to be up front and ask permission, before you have to ask for forgiveness.
Academic writing is formal. The rules of proper English must be followed, especially for essays and papers. But a lot of college coursework will require a more casual style.
Brevity of words is important. Say what you have to say as succinctly as possible.
Sentences are short and simple.
Use action/strong verbs as opposed to weak ones.
If you take a course that requires this kind of writing, you will discover that your academic writing will improve too.
College is not the place to turn in your first draft and hope for the best. Expectations are high, and you will be expected to write well, no matter what the course. If you are not practiced in editing, get the help you need, learn from that help, and over time, you will be able to do this yourself.
Having a passion for writing, Lauren started with editing her friends’ essays – a skill that evolved into a profession and a life style. Being comfortable around any topic is her boon, but she happen to specialize the most in self-improvement, motivation, coaching and any type of writing.
|
# -*- coding: utf-8 -*-
"""
======
Rmagic
======
Magic command interface for interactive work with R via rpy2
.. note::
The ``rpy2`` package needs to be installed separately. It
can be obtained using ``easy_install`` or ``pip``.
You will also need a working copy of R.
Usage
=====
To enable the magics below, execute ``%load_ext rmagic``.
``%R``
{R_DOC}
``%Rpush``
{RPUSH_DOC}
``%Rpull``
{RPULL_DOC}
``%Rget``
{RGET_DOC}
"""
from __future__ import print_function
#-----------------------------------------------------------------------------
# Copyright (C) 2012 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
import sys
import tempfile
from glob import glob
from shutil import rmtree
# numpy and rpy2 imports
import numpy as np
import rpy2.rinterface as ri
import rpy2.robjects as ro
try:
from rpy2.robjects import pandas2ri
pandas2ri.activate()
except ImportError:
pandas2ri = None
from rpy2.robjects import numpy2ri
numpy2ri.activate()
# IPython imports
from IPython.core.displaypub import publish_display_data
from IPython.core.magic import (Magics, magics_class, line_magic,
line_cell_magic, needs_local_scope)
from IPython.testing.skipdoctest import skip_doctest
from IPython.core.magic_arguments import (
argument, magic_arguments, parse_argstring
)
from IPython.external.simplegeneric import generic
from IPython.utils.py3compat import (str_to_unicode, unicode_to_str, PY3,
unicode_type)
from IPython.utils.text import dedent
class RInterpreterError(ri.RRuntimeError):
"""An error when running R code in a %%R magic cell."""
def __init__(self, line, err, stdout):
self.line = line
self.err = err.rstrip()
self.stdout = stdout.rstrip()
def __unicode__(self):
s = 'Failed to parse and evaluate line %r.\nR error message: %r' % \
(self.line, self.err)
if self.stdout and (self.stdout != self.err):
s += '\nR stdout:\n' + self.stdout
return s
if PY3:
__str__ = __unicode__
else:
def __str__(self):
return unicode_to_str(unicode(self), 'utf-8')
def Rconverter(Robj, dataframe=False):
"""
Convert an object in R's namespace to one suitable
for ipython's namespace.
For a data.frame, it tries to return a structured array.
It first checks for colnames, then names.
If all are NULL, it returns np.asarray(Robj), else
it tries to construct a recarray
Parameters
----------
Robj: an R object returned from rpy2
"""
is_data_frame = ro.r('is.data.frame')
colnames = ro.r('colnames')
rownames = ro.r('rownames') # with pandas, these could be used for the index
names = ro.r('names')
if dataframe:
as_data_frame = ro.r('as.data.frame')
cols = colnames(Robj)
_names = names(Robj)
if cols != ri.NULL:
Robj = as_data_frame(Robj)
names = tuple(np.array(cols))
elif _names != ri.NULL:
names = tuple(np.array(_names))
else: # failed to find names
return np.asarray(Robj)
Robj = np.rec.fromarrays(Robj, names = names)
return np.asarray(Robj)
@generic
def pyconverter(pyobj):
"""Convert Python objects to R objects. Add types using the decorator:
@pyconverter.when_type
"""
return pyobj
# The default conversion for lists seems to make them a nested list. That has
# some advantages, but is rarely convenient, so for interactive use, we convert
# lists to a numpy array, which becomes an R vector.
@pyconverter.when_type(list)
def pyconverter_list(pyobj):
return np.asarray(pyobj)
if pandas2ri is None:
# pandas2ri was new in rpy2 2.3.3, so for now we'll fallback to pandas'
# conversion function.
try:
from pandas import DataFrame
from pandas.rpy.common import convert_to_r_dataframe
@pyconverter.when_type(DataFrame)
def pyconverter_dataframe(pyobj):
return convert_to_r_dataframe(pyobj, strings_as_factors=True)
except ImportError:
pass
@magics_class
class RMagics(Magics):
"""A set of magics useful for interactive work with R via rpy2.
"""
def __init__(self, shell, Rconverter=Rconverter,
pyconverter=pyconverter,
cache_display_data=False):
"""
Parameters
----------
shell : IPython shell
Rconverter : callable
To be called on values taken from R before putting them in the
IPython namespace.
pyconverter : callable
To be called on values in ipython namespace before
assigning to variables in rpy2.
cache_display_data : bool
If True, the published results of the final call to R are
cached in the variable 'display_cache'.
"""
super(RMagics, self).__init__(shell)
self.cache_display_data = cache_display_data
self.r = ro.R()
self.Rstdout_cache = []
self.pyconverter = pyconverter
self.Rconverter = Rconverter
def eval(self, line):
'''
Parse and evaluate a line of R code with rpy2.
Returns the output to R's stdout() connection,
the value generated by evaluating the code, and a
boolean indicating whether the return value would be
visible if the line of code were evaluated in an R REPL.
R Code evaluation and visibility determination are
done via an R call of the form withVisible({<code>})
'''
old_writeconsole = ri.get_writeconsole()
ri.set_writeconsole(self.write_console)
try:
res = ro.r("withVisible({%s\n})" % line)
value = res[0] #value (R object)
visible = ro.conversion.ri2py(res[1])[0] #visible (boolean)
except (ri.RRuntimeError, ValueError) as exception:
warning_or_other_msg = self.flush() # otherwise next return seems to have copy of error
raise RInterpreterError(line, str_to_unicode(str(exception)), warning_or_other_msg)
text_output = self.flush()
ri.set_writeconsole(old_writeconsole)
return text_output, value, visible
def write_console(self, output):
'''
A hook to capture R's stdout in a cache.
'''
self.Rstdout_cache.append(output)
def flush(self):
'''
Flush R's stdout cache to a string, returning the string.
'''
value = ''.join([str_to_unicode(s, 'utf-8') for s in self.Rstdout_cache])
self.Rstdout_cache = []
return value
@skip_doctest
@needs_local_scope
@line_magic
def Rpush(self, line, local_ns=None):
'''
A line-level magic for R that pushes
variables from python to rpy2. The line should be made up
of whitespace separated variable names in the IPython
namespace::
In [7]: import numpy as np
In [8]: X = np.array([4.5,6.3,7.9])
In [9]: X.mean()
Out[9]: 6.2333333333333343
In [10]: %Rpush X
In [11]: %R mean(X)
Out[11]: array([ 6.23333333])
'''
if local_ns is None:
local_ns = {}
inputs = line.split(' ')
for input in inputs:
try:
val = local_ns[input]
except KeyError:
try:
val = self.shell.user_ns[input]
except KeyError:
# reraise the KeyError as a NameError so that it looks like
# the standard python behavior when you use an unnamed
# variable
raise NameError("name '%s' is not defined" % input)
self.r.assign(input, self.pyconverter(val))
@skip_doctest
@magic_arguments()
@argument(
'-d', '--as_dataframe', action='store_true',
default=False,
help='Convert objects to data.frames before returning to ipython.'
)
@argument(
'outputs',
nargs='*',
)
@line_magic
def Rpull(self, line):
'''
A line-level magic for R that pulls
variables from python to rpy2::
In [18]: _ = %R x = c(3,4,6.7); y = c(4,6,7); z = c('a',3,4)
In [19]: %Rpull x y z
In [20]: x
Out[20]: array([ 3. , 4. , 6.7])
In [21]: y
Out[21]: array([ 4., 6., 7.])
In [22]: z
Out[22]:
array(['a', '3', '4'],
dtype='|S1')
If --as_dataframe, then each object is returned as a structured array
after first passed through "as.data.frame" in R before
being calling self.Rconverter.
This is useful when a structured array is desired as output, or
when the object in R has mixed data types.
See the %%R docstring for more examples.
Notes
-----
Beware that R names can have '.' so this is not fool proof.
To avoid this, don't name your R objects with '.'s...
'''
args = parse_argstring(self.Rpull, line)
outputs = args.outputs
for output in outputs:
self.shell.push({output:self.Rconverter(self.r(output),dataframe=args.as_dataframe)})
@skip_doctest
@magic_arguments()
@argument(
'-d', '--as_dataframe', action='store_true',
default=False,
help='Convert objects to data.frames before returning to ipython.'
)
@argument(
'output',
nargs=1,
type=str,
)
@line_magic
def Rget(self, line):
'''
Return an object from rpy2, possibly as a structured array (if possible).
Similar to Rpull except only one argument is accepted and the value is
returned rather than pushed to self.shell.user_ns::
In [3]: dtype=[('x', '<i4'), ('y', '<f8'), ('z', '|S1')]
In [4]: datapy = np.array([(1, 2.9, 'a'), (2, 3.5, 'b'), (3, 2.1, 'c'), (4, 5, 'e')], dtype=dtype)
In [5]: %R -i datapy
In [6]: %Rget datapy
Out[6]:
array([['1', '2', '3', '4'],
['2', '3', '2', '5'],
['a', 'b', 'c', 'e']],
dtype='|S1')
In [7]: %Rget -d datapy
Out[7]:
array([(1, 2.9, 'a'), (2, 3.5, 'b'), (3, 2.1, 'c'), (4, 5.0, 'e')],
dtype=[('x', '<i4'), ('y', '<f8'), ('z', '|S1')])
'''
args = parse_argstring(self.Rget, line)
output = args.output
return self.Rconverter(self.r(output[0]),dataframe=args.as_dataframe)
@skip_doctest
@magic_arguments()
@argument(
'-i', '--input', action='append',
help='Names of input variable from shell.user_ns to be assigned to R variables of the same names after calling self.pyconverter. Multiple names can be passed separated only by commas with no whitespace.'
)
@argument(
'-o', '--output', action='append',
help='Names of variables to be pushed from rpy2 to shell.user_ns after executing cell body and applying self.Rconverter. Multiple names can be passed separated only by commas with no whitespace.'
)
@argument(
'-w', '--width', type=int,
help='Width of png plotting device sent as an argument to *png* in R.'
)
@argument(
'-h', '--height', type=int,
help='Height of png plotting device sent as an argument to *png* in R.'
)
@argument(
'-d', '--dataframe', action='append',
help='Convert these objects to data.frames and return as structured arrays.'
)
@argument(
'-u', '--units', type=unicode_type, choices=["px", "in", "cm", "mm"],
help='Units of png plotting device sent as an argument to *png* in R. One of ["px", "in", "cm", "mm"].'
)
@argument(
'-r', '--res', type=int,
help='Resolution of png plotting device sent as an argument to *png* in R. Defaults to 72 if *units* is one of ["in", "cm", "mm"].'
)
@argument(
'-p', '--pointsize', type=int,
help='Pointsize of png plotting device sent as an argument to *png* in R.'
)
@argument(
'-b', '--bg',
help='Background of png plotting device sent as an argument to *png* in R.'
)
@argument(
'-n', '--noreturn',
help='Force the magic to not return anything.',
action='store_true',
default=False
)
@argument(
'code',
nargs='*',
)
@needs_local_scope
@line_cell_magic
def R(self, line, cell=None, local_ns=None):
'''
Execute code in R, and pull some of the results back into the Python namespace.
In line mode, this will evaluate an expression and convert the returned value to a Python object.
The return value is determined by rpy2's behaviour of returning the result of evaluating the
final line.
Multiple R lines can be executed by joining them with semicolons::
In [9]: %R X=c(1,4,5,7); sd(X); mean(X)
Out[9]: array([ 4.25])
In cell mode, this will run a block of R code. The resulting value
is printed if it would printed be when evaluating the same code
within a standard R REPL.
Nothing is returned to python by default in cell mode::
In [10]: %%R
....: Y = c(2,4,3,9)
....: summary(lm(Y~X))
Call:
lm(formula = Y ~ X)
Residuals:
1 2 3 4
0.88 -0.24 -2.28 1.64
Coefficients:
Estimate Std. Error t value Pr(>|t|)
(Intercept) 0.0800 2.3000 0.035 0.975
X 1.0400 0.4822 2.157 0.164
Residual standard error: 2.088 on 2 degrees of freedom
Multiple R-squared: 0.6993,Adjusted R-squared: 0.549
F-statistic: 4.651 on 1 and 2 DF, p-value: 0.1638
In the notebook, plots are published as the output of the cell::
%R plot(X, Y)
will create a scatter plot of X bs Y.
If cell is not None and line has some R code, it is prepended to
the R code in cell.
Objects can be passed back and forth between rpy2 and python via the -i -o flags in line::
In [14]: Z = np.array([1,4,5,10])
In [15]: %R -i Z mean(Z)
Out[15]: array([ 5.])
In [16]: %R -o W W=Z*mean(Z)
Out[16]: array([ 5., 20., 25., 50.])
In [17]: W
Out[17]: array([ 5., 20., 25., 50.])
The return value is determined by these rules:
* If the cell is not None, the magic returns None.
* If the cell evaluates as False, the resulting value is returned
unless the final line prints something to the console, in
which case None is returned.
* If the final line results in a NULL value when evaluated
by rpy2, then None is returned.
* No attempt is made to convert the final value to a structured array.
Use the --dataframe flag or %Rget to push / return a structured array.
* If the -n flag is present, there is no return value.
* A trailing ';' will also result in no return value as the last
value in the line is an empty string.
The --dataframe argument will attempt to return structured arrays.
This is useful for dataframes with
mixed data types. Note also that for a data.frame,
if it is returned as an ndarray, it is transposed::
In [18]: dtype=[('x', '<i4'), ('y', '<f8'), ('z', '|S1')]
In [19]: datapy = np.array([(1, 2.9, 'a'), (2, 3.5, 'b'), (3, 2.1, 'c'), (4, 5, 'e')], dtype=dtype)
In [20]: %%R -o datar
datar = datapy
....:
In [21]: datar
Out[21]:
array([['1', '2', '3', '4'],
['2', '3', '2', '5'],
['a', 'b', 'c', 'e']],
dtype='|S1')
In [22]: %%R -d datar
datar = datapy
....:
In [23]: datar
Out[23]:
array([(1, 2.9, 'a'), (2, 3.5, 'b'), (3, 2.1, 'c'), (4, 5.0, 'e')],
dtype=[('x', '<i4'), ('y', '<f8'), ('z', '|S1')])
The --dataframe argument first tries colnames, then names.
If both are NULL, it returns an ndarray (i.e. unstructured)::
In [1]: %R mydata=c(4,6,8.3); NULL
In [2]: %R -d mydata
In [3]: mydata
Out[3]: array([ 4. , 6. , 8.3])
In [4]: %R names(mydata) = c('a','b','c'); NULL
In [5]: %R -d mydata
In [6]: mydata
Out[6]:
array((4.0, 6.0, 8.3),
dtype=[('a', '<f8'), ('b', '<f8'), ('c', '<f8')])
In [7]: %R -o mydata
In [8]: mydata
Out[8]: array([ 4. , 6. , 8.3])
'''
args = parse_argstring(self.R, line)
# arguments 'code' in line are prepended to
# the cell lines
if cell is None:
code = ''
return_output = True
line_mode = True
else:
code = cell
return_output = False
line_mode = False
code = ' '.join(args.code) + code
# if there is no local namespace then default to an empty dict
if local_ns is None:
local_ns = {}
if args.input:
for input in ','.join(args.input).split(','):
try:
val = local_ns[input]
except KeyError:
try:
val = self.shell.user_ns[input]
except KeyError:
raise NameError("name '%s' is not defined" % input)
self.r.assign(input, self.pyconverter(val))
if getattr(args, 'units') is not None:
if args.units != "px" and getattr(args, 'res') is None:
args.res = 72
args.units = '"%s"' % args.units
png_argdict = dict([(n, getattr(args, n)) for n in ['units', 'res', 'height', 'width', 'bg', 'pointsize']])
png_args = ','.join(['%s=%s' % (o,v) for o, v in png_argdict.items() if v is not None])
# execute the R code in a temporary directory
tmpd = tempfile.mkdtemp()
self.r('png("%s/Rplots%%03d.png",%s)' % (tmpd.replace('\\', '/'), png_args))
text_output = ''
try:
if line_mode:
for line in code.split(';'):
text_result, result, visible = self.eval(line)
text_output += text_result
if text_result:
# the last line printed something to the console so we won't return it
return_output = False
else:
text_result, result, visible = self.eval(code)
text_output += text_result
if visible:
old_writeconsole = ri.get_writeconsole()
ri.set_writeconsole(self.write_console)
ro.r.show(result)
text_output += self.flush()
ri.set_writeconsole(old_writeconsole)
except RInterpreterError as e:
print(e.stdout)
if not e.stdout.endswith(e.err):
print(e.err)
rmtree(tmpd)
return
finally:
self.r('dev.off()')
# read out all the saved .png files
images = [open(imgfile, 'rb').read() for imgfile in glob("%s/Rplots*png" % tmpd)]
# now publish the images
# mimicking IPython/zmq/pylab/backend_inline.py
fmt = 'png'
mimetypes = { 'png' : 'image/png', 'svg' : 'image/svg+xml' }
mime = mimetypes[fmt]
# publish the printed R objects, if any
display_data = []
if text_output:
display_data.append(('RMagic.R', {'text/plain':text_output}))
# flush text streams before sending figures, helps a little with output
for image in images:
# synchronization in the console (though it's a bandaid, not a real sln)
sys.stdout.flush(); sys.stderr.flush()
display_data.append(('RMagic.R', {mime: image}))
# kill the temporary directory
rmtree(tmpd)
# try to turn every output into a numpy array
# this means that output are assumed to be castable
# as numpy arrays
if args.output:
for output in ','.join(args.output).split(','):
self.shell.push({output:self.Rconverter(self.r(output), dataframe=False)})
if args.dataframe:
for output in ','.join(args.dataframe).split(','):
self.shell.push({output:self.Rconverter(self.r(output), dataframe=True)})
for tag, disp_d in display_data:
publish_display_data(tag, disp_d)
# this will keep a reference to the display_data
# which might be useful to other objects who happen to use
# this method
if self.cache_display_data:
self.display_cache = display_data
# if in line mode and return_output, return the result as an ndarray
if return_output and not args.noreturn:
if result != ri.NULL:
return self.Rconverter(result, dataframe=False)
__doc__ = __doc__.format(
R_DOC = dedent(RMagics.R.__doc__),
RPUSH_DOC = dedent(RMagics.Rpush.__doc__),
RPULL_DOC = dedent(RMagics.Rpull.__doc__),
RGET_DOC = dedent(RMagics.Rget.__doc__)
)
def load_ipython_extension(ip):
"""Load the extension in IPython."""
ip.register_magics(RMagics)
# Initialising rpy2 interferes with readline. Since, at this point, we've
# probably just loaded rpy2, we reset the delimiters. See issue gh-2759.
if ip.has_readline:
ip.readline.set_completer_delims(ip.readline_delims)
|
Ventura, California.........Both nights of the 77th running of the "Turkey Night Grand Prix" from Ventura Raceway will be LIVE on http://www.SpeedShiftTV.com/.
"Turkey Night" from the seaside oval begins Wednesday, Nov. 22 with a full program featuring the USAC West Coast and VRA Sprint Cars, plus an open practice for midgets. The following night, Thanksgiving Thursday, Nov. 23, will once again include the sprint cars as well as the prestigious, 98-lap midget race.
At press time, a hefty field of 52 drivers have entered for the famed midget race, which includes heavy hitters such as past "Turkey Night" winners Dave Darland (2007 & 2013), Kyle Larson (2012 & 2016), Christopher Bell (2014) and Tanner Thorson (2015), plus 2017 USAC National Midget winners Justin Grant, Brady Bacon, Tyler Courtney, Michael Pickens, series point leader Spencer Bayston and Chad Boat as well diverse racing standouts Brad Sweet and Chase Briscoe among many others.
The Sprint Car field is stacked as well with two handfuls of drivers preparing for double-duty by competing in both divisions. Those drivers are eligible for the Automotive Racing Products' $50,000 bonus that goes to the driver who can win both the midget and sprint feature on Thursday night.
Cars are set to hit the track around 3:30pm Pacific at Ventura Raceway both nights. As always, you can watch every USAC race, including "Turkey Night," on demand, a day following the event, at http://www.Loudpedal.TV/.
|
#
# SettingsTab
#
tab = self.notebook.settingsTab
tab.settings['Eckart flag'] = False
tab.settings['Neutral Born charges'] = False
tab.settings['Sigma value'] = 5
tab.settings['Mass definition'] = 'average'
#
# 0th Scenario tabs
#
tab = self.notebook.scenarios[0]
tab.settings['Matrix'] = 'ptfe'
tab.settings['Mass or volume fraction'] = 'volume'
tab.settings['Volume fraction'] = 0.1
tab.settings['Ellipsoid a/b'] = 0.5
tab.settings['Unique direction - h'] = 0
tab.settings['Unique direction - k'] = 0
tab.settings['Unique direction - l'] = 0
tab.settings['Effective medium method'] = 'Averaged permittivity'
tab.settings['Particle shape'] = 'Sphere'
tab.settings['Particle size(mu)'] = 0.0001
tab.settings['Legend'] = 'Averaged permittivity'
# Add new scenarios
methods = ['Bruggeman']
shapes = ['Ellipsoid','Plate']
hkls = [[0,0,1], [1,0,0]]
vfs = [0.1]
sizes = [0.0, 1.0, 3.0]
for method in methods:
for shape,hkl in zip(shapes,hkls):
for vf in vfs:
for size in sizes:
self.notebook.addScenario()
tab = self.notebook.scenarios[-1]
tab.settings['Volume fraction'] = vf
tab.settings['Particle shape'] = shape
tab.settings['Particle size(mu)'] = size
tab.settings['Effective medium method'] = method
tab.settings['Unique direction - h'] = hkl[0]
tab.settings['Unique direction - k'] = hkl[1]
tab.settings['Unique direction - l'] = hkl[2]
#tab.settings['Legend'] = method + ' ' + shape + ' vf='+str(vf)+' size='+str(size)
tab.settings['Legend'] = method + ' ' + shape + str(hkl) + ' size='+str(size)
#
# Plotting Tab
#
tab = self.notebook.plottingTab
tab.settings['Minimum frequency'] = 0.0
tab.settings['Maximum frequency'] = 300.0
tab.settings['Frequency increment'] = 0.2
tab.settings['Molar definition'] = 'Unit cells'
tab.settings['Plot title'] = 'Size Effects BaTiO3'
#
# Analysis Tab
#
tab = self.notebook.analysisTab
tab.settings['Minimum frequency'] = -1
tab.settings['Maximum frequency'] = 800
tab.settings['title'] = 'Analysis'
tab.settings['Covalent radius scaling'] = 1.1
tab.settings['Bonding tolerance'] = 0.1
tab.settings['Bar width'] = 0.5
#
|
All documents, software and descriptions which are made available for the consideration or download are protected by copyright and all rights are reserved.
By downloading, the user agrees to our general terms of business and licence agreements.
Links on the internet site from third parties are not under our control. We would expressly like to stress that we have no influence on the design or the contents of the linked sites. Similarly, we can offer no guarantee regarding data protection on these links. These links are intended only as an additional service in the form of a finding aid for our website visitors.
Alle Dokumente, Software und Beschreibungen, die zur Betrachtung oder zum Download zur Verfügung gestellt werden, unterliegen dem Urheberrecht, wobei alle Rechte vorbehalten sind.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.