text
stringlengths 29
850k
|
---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -------------------------------------- #
# Enables python3-like strings handling
from __future__ import unicode_literals
str = unicode
# -------------------------------------- #
import collections
from birda.bModel.widget import Widget
from birda.bModel import BIRDA
from birda.storage.utils import (
get_types,
get_property,
prettify,
get_co_list,
get_by_lang
)
# Consts ...
# ============================================================================ #
class FormWidget(Widget):
def __init__(self, conn, rdfw=None, uri=''):
super(FormWidget, self).__init__(
conn, rdfw=rdfw, uri=uri,
actionable=True, hierarchical=True)
self.attributes.update( self._get_specific_attributes() )
# --------------------------------- #
def _get_specific_attributes(self):
"""
Get the attributes specific for this type of widget
:return: Dictionary containing widget properties
"""
a = collections.OrderedDict()
a['maps_type'] = get_property(self.conn, self.uri, BIRDA.mapsType, rdfw=self.rdfw, lexical=True, single=True)
a['base_uri'] = get_property(self.conn, self.uri, BIRDA.hasBaseIRI, rdfw=self.rdfw, lexical=True, single=True)
a['label_property'] = get_property(self.conn, self.uri, BIRDA.usesPropertyForLabel, rdfw=self.rdfw, lexical=True, single=True)
a['descr_property'] = get_property(self.conn, self.uri, BIRDA.usesPropertyForDescription, rdfw=self.rdfw, lexical=True, single=True)
def fields2list(fields):
if fields:
return [ str(f).strip() for f in str(fields).split(',') ]
else:
return []
a['local_name'] = {}
a['local_name']['fields'] = fields2list( get_property(self.conn, self.uri, BIRDA.hasLocalNameFields, rdfw=self.rdfw, lexical=True, single=True) )
a['local_name']['separator'] = get_property(self.conn, self.uri, BIRDA.hasLocalNameSeparator, rdfw=self.rdfw, lexical=True, single=True)
a['local_name']['tokenSeparator'] = get_property(self.conn, self.uri, BIRDA.hasLocalNameTokenSeparator, rdfw=self.rdfw, lexical=True, single=True)
a['local_name']['renderer'] = get_property(self.conn, self.uri, BIRDA.hasLocalNameRenderer, rdfw=self.rdfw, lexical=True, single=True)
return a
# --------------------------------- #
def to_rdf(self, value, lang=None):
"""
See Widget.to_rdf declaration
"""
raise NotImplementedError('This method should not be invoked on a Form widget')
# --------------------------------- #
def getJSON(self, lang):
"""
Inherited from Widget
(pop and re-add fields for a better readability of output json)
"""
j = super(FormWidget, self).getJSON(lang)
fields = j.pop('fields')
j.pop('w_type')
uri = j.pop('widget_uri')
j['form_uri'] = uri
j['maps_type'] = self.attributes['maps_type']
j['base_uri'] = self.attributes['base_uri']
j['label_property'] = self.attributes['label_property']
j['descr_property'] = self.attributes['descr_property']
if self.attributes['local_name']['fields']:
j['local_name'] = self.attributes['local_name']
else:
# Will be created by the schema
#j['local_name'] = {}
pass
j['fields'] = fields
return j
# ---------------------------------------------------------------------------- #
# ============================================================================ #
if __name__ == '__main__':
pass |
South Korea's National Pension Service (NPS), the world's third-largest pension fund by assets, said on Thursday it has filed a suit against Volkswagen in a German court over the automaker's emissions scandal.
South Korea’s National Pension Service (NPS), the world’s third-largest pension fund by assets, said on Thursday it has filed a suit against Volkswagen in a German court over the automaker’s emissions scandal.
The fund is joining a raft of investors suing the German automaker, which admitted last year to selling 11 million diesel vehicles worldwide with rigged emissions systems, sending its shares plunging.
NPS, which held 26.7 billion won ($24.30 million) worth of preferred shares in Volkswagen as of the end of 2015, has made a damages claim, accusing the automaker of providing “incorrect information” to investors, an NPS spokeswoman said.
A spokeswoman at Volkswagen’s South Korean unit had no immediate comment when asked about the lawsuit.
Volkswagen faces a combined 8.2 billion euros ($9.1 billion) in damages claims from investors over its emissions scandal in the legal district where the car maker is based, a German court said last week.
About 1,400 lawsuits have been lodged at the regional court in Braunschweig near Volkswagen’s Wolfsburg headquarters, the court said. |
import collections
import datetime
import re
from copy import copy
from typing import Callable, Dict, List, Optional, Sequence, Set, Tuple, Union
from discord import ChannelType, Client, DMChannel, File, GroupChannel, TextChannel
from discord.abc import Messageable
from discord.ext import commands
from discord.member import Member
from discord.message import Message
from discordbot import emoji
from magic import card, card_price, fetcher, image_fetcher, oracle
from magic.models import Card
from magic.whoosh_search import SearchResult, WhooshSearcher
from shared import configuration, dtutil
from shared.lazy import lazy_property
DEFAULT_CARDS_SHOWN = 4
MAX_CARDS_SHOWN = 10
DISAMBIGUATION_EMOJIS = [':one:', ':two:', ':three:', ':four:', ':five:']
DISAMBIGUATION_EMOJIS_BY_NUMBER = {1: '1⃣', 2: '2⃣', 3: '3⃣', 4: '4⃣', 5: '5⃣'}
DISAMBIGUATION_NUMBERS_BY_EMOJI = {'1⃣': 1, '2⃣': 2, '3⃣': 3, '4⃣': 4, '5⃣': 5}
HELP_GROUPS: Set[str] = set()
@lazy_property
def searcher() -> WhooshSearcher:
return WhooshSearcher()
async def respond_to_card_names(message: Message, client: Client) -> None:
# Don't parse messages with Gatherer URLs because they use square brackets in the querystring.
if 'gatherer.wizards.com' in message.content.lower():
return
compat = message.channel.type == ChannelType.text and client.get_user(268547439714238465) in message.channel.members
queries = parse_queries(message.content, compat)
if len(queries) > 0:
await message.channel.trigger_typing()
results = results_from_queries(queries)
cards = []
for i in results:
(r, mode, preferred_printing) = i
if r.has_match() and not r.is_ambiguous():
cards.extend(cards_from_names_with_mode([r.get_best_match()], mode, preferred_printing))
elif r.is_ambiguous():
cards.extend(cards_from_names_with_mode(r.get_ambiguous_matches(), mode, preferred_printing))
await post_cards(client, cards, message.channel, message.author)
def parse_queries(content: str, scryfall_compatability_mode: bool) -> List[str]:
to_scan = re.sub('`{1,3}[^`]*?`{1,3}', '', content, re.DOTALL) # Ignore angle brackets inside backticks. It's annoying in #code.
if scryfall_compatability_mode:
queries = re.findall(r'(?<!\[)\[([^\]]*)\](?!\])', to_scan) # match [card] but not [[card]]
else:
queries = re.findall(r'\[?\[([^\]]*)\]\]?', to_scan)
return [card.canonicalize(query) for query in queries if len(query) > 2]
def cards_from_names_with_mode(cards: Sequence[Optional[str]], mode: str, preferred_printing: Optional[str] = None) -> List[Card]:
return [copy_with_mode(oracle.load_card(c), mode, preferred_printing) for c in cards if c is not None]
def copy_with_mode(oracle_card: Card, mode: str, preferred_printing: str = None) -> Card:
c = copy(oracle_card)
c['mode'] = mode
c['preferred_printing'] = preferred_printing
return c
def parse_mode(query: str) -> Tuple[str, str, Optional[str]]:
mode = ''
preferred_printing = None
if query.startswith('$'):
mode = '$'
query = query[1:]
if '|' in query:
query, preferred_printing = query.split('|')
preferred_printing = preferred_printing.lower().strip()
return (mode, query, preferred_printing)
def results_from_queries(queries: List[str]) -> List[Tuple[SearchResult, str, Optional[str]]]:
all_results = []
for query in queries:
mode, query, preferred_printing = parse_mode(query)
result = searcher().search(query)
all_results.append((result, mode, preferred_printing))
return all_results
def complex_search(query: str) -> List[Card]:
if query == '':
return []
_, cardnames = fetcher.search_scryfall(query)
cbn = oracle.cards_by_name()
return [cbn[name] for name in cardnames if cbn.get(name) is not None]
def roughly_matches(s1: str, s2: str) -> bool:
return simplify_string(s1).find(simplify_string(s2)) >= 0
def simplify_string(s: str) -> str:
s = ''.join(s.split())
return re.sub(r'[\W_]+', '', s).lower()
def disambiguation(cards: List[str]) -> str:
if len(cards) > 5:
return ','.join(cards)
return ' '.join([' '.join(x) for x in zip(DISAMBIGUATION_EMOJIS, cards)])
async def disambiguation_reactions(message: Message, cards: List[str]) -> None:
for i in range(1, len(cards) + 1):
await message.add_reaction(DISAMBIGUATION_EMOJIS_BY_NUMBER[i])
async def single_card_or_send_error(channel: TextChannel, args: str, author: Member, command: str) -> Optional[Card]:
if not args:
await send(channel, '{author}: Please specify a card name.'.format(author=author.mention))
return None
result, mode, preferred_printing = results_from_queries([args])[0]
if result.has_match() and not result.is_ambiguous():
return cards_from_names_with_mode([result.get_best_match()], mode, preferred_printing)[0]
if result.is_ambiguous():
message = await send(channel, '{author}: Ambiguous name for {c}. Suggestions: {s} (click number below)'.format(author=author.mention, c=command, s=disambiguation(result.get_ambiguous_matches()[0:5])))
await disambiguation_reactions(message, result.get_ambiguous_matches()[0:5])
else:
await send(channel, '{author}: No matches.'.format(author=author.mention))
return None
# pylint: disable=too-many-arguments
async def single_card_text(client: Client, channel: TextChannel, args: str, author: Member, f: Callable[[Card], str], command: str, show_legality: bool = True) -> None:
c = await single_card_or_send_error(channel, args, author, command)
if c is not None:
name = c.name
info_emoji = emoji.info_emoji(c, show_legality=show_legality)
text = emoji.replace_emoji(f(c), client)
message = f'**{name}** {info_emoji} {text}'
await send(channel, message)
async def post_cards(
client: Client,
cards: List[Card],
channel: Messageable,
replying_to: Optional[Member] = None,
additional_text: str = '',
) -> None:
if len(cards) == 0:
await post_no_cards(channel, replying_to)
return
not_pd = configuration.get_list('not_pd')
disable_emoji = str(channel.id) in not_pd or (getattr(channel, 'guild', None) is not None and str(channel.guild.id) in not_pd)
cards = uniqify_cards(cards)
if len(cards) > MAX_CARDS_SHOWN:
cards = cards[:DEFAULT_CARDS_SHOWN]
if len(cards) == 1:
text = single_card_text_internal(client, cards[0], disable_emoji)
else:
text = ', '.join('{name} {legal} {price}'.format(name=card.name, legal=((emoji.info_emoji(card)) if not disable_emoji else ''), price=((card_price.card_price_string(card, True)) if card.get('mode', None) == '$' else '')) for card in cards)
if len(cards) > MAX_CARDS_SHOWN:
image_file = None
else:
with channel.typing():
image_file = await image_fetcher.download_image_async(cards)
if image_file is None:
text += '\n\n'
if len(cards) == 1:
text += emoji.replace_emoji(cards[0].oracle_text, client)
else:
text += 'No image available.'
text += additional_text
if image_file is None:
await send(channel, text)
else:
await send_image_with_retry(channel, image_file, text)
async def post_no_cards(channel: Messageable, replying_to: Optional[Member] = None) -> None:
if replying_to is not None:
text = '{author}: No matches.'.format(author=replying_to.mention)
else:
text = 'No matches.'
message = await send(channel, text)
await message.add_reaction('❎')
async def send(channel: Messageable, content: str, file: Optional[File] = None) -> Message:
new_s = escape_underscores(content)
return await channel.send(file=file, content=new_s)
async def send_image_with_retry(channel: Messageable, image_file: str, text: str = '') -> None:
message = await send(channel, file=File(image_file), content=text)
if message and message.attachments and message.attachments[0].size == 0:
print('Message size is zero so resending')
await message.delete()
await send(channel, file=File(image_file), content=text)
def single_card_text_internal(client: Client, requested_card: Card, disable_emoji: bool) -> str:
mana = emoji.replace_emoji('|'.join(requested_card.mana_cost or []), client)
mana = mana.replace('|', ' // ')
legal = ' — ' + emoji.info_emoji(requested_card, verbose=True)
if disable_emoji:
legal = ''
if requested_card.get('mode', None) == '$':
text = '{name} {legal} — {price}'.format(name=requested_card.name, price=card_price.card_price_string(requested_card), legal=legal)
else:
text = '{name} {mana} — {type}{legal}'.format(name=requested_card.name, mana=mana, type=requested_card.type_line, legal=legal)
if requested_card.bugs:
for bug in requested_card.bugs:
text += '\n:lady_beetle:{rank} bug: {bug}'.format(bug=bug['description'], rank=bug['classification'])
if bug['last_confirmed'] < (dtutil.now() - datetime.timedelta(days=60)):
time_since_confirmed = (dtutil.now() - bug['last_confirmed']).total_seconds()
text += ' (Last confirmed {time} ago.)'.format(time=dtutil.display_time(time_since_confirmed, 1))
return text
# See #5532 and #5566.
def escape_underscores(s: str) -> str:
new_s = ''
in_url, in_emoji = False, False
for char in s:
if char == ':':
in_emoji = True
elif char not in 'abcdefghijklmnopqrstuvwxyz_':
in_emoji = False
if char == '<':
in_url = True
elif char == '>':
in_url = False
if char == '_' and not in_url and not in_emoji:
new_s += '\\_'
else:
new_s += char
return new_s
# Given a list of cards return one (aribtrarily) for each unique name in the list.
def uniqify_cards(cards: List[Card]) -> List[Card]:
# Remove multiple printings of the same card from the result set.
results: Dict[str, Card] = collections.OrderedDict()
for c in cards:
results[card.canonicalize(c.name)] = c
return list(results.values())
def guild_or_channel_id(channel: Union[TextChannel, DMChannel, GroupChannel]) -> int:
return getattr(channel, 'guild', channel).id
class MtgContext(commands.Context):
async def send_image_with_retry(self, image_file: str, text: str = '') -> None:
message = await self.send(file=File(image_file), content=text)
if message and message.attachments and message.attachments[0].size == 0:
print('Message size is zero so resending')
await message.delete()
await self.send(file=File(image_file), content=text)
async def single_card_text(self, c: Card, f: Callable, show_legality: bool = True) -> None:
not_pd = configuration.get_list('not_pd')
if str(self.channel.id) in not_pd or (getattr(self.channel, 'guild', None) is not None and str(self.channel.guild.id) in not_pd):
show_legality = False
name = c.name
info_emoji = emoji.info_emoji(c, show_legality=show_legality)
text = emoji.replace_emoji(f(c), self.bot)
message = f'**{name}** {info_emoji} {text}'
await self.send(message)
async def post_cards(self, cards: List[Card], replying_to: Optional[Member] = None, additional_text: str = '') -> None:
# this feels awkward, but shrug
await post_cards(self.bot, cards, self.channel, replying_to, additional_text)
|
Broadleaf provides the most sought-after eCommerce functionality for supporting B2C, B2B, Marketplace, Multi-Site, Multi-Tenant and API Commerce at the best value in the market. Every solution can be customized to ensure your eCommerce site is tailored to your specific requirements. Robust functionality within a lightweight framework lend to some of the characteristics that cause Broadleaf to stand out from the rest. Never feel restricted by a features list again.
Manage multi-store, multi-site, multi-currency, and multi-channel capabilities through one system. Ideal for managing multiple brands, franchises, marketplace vendor models, and merchant storefronts (similar to Etsy and Amazon), Broadleaf is the platform designed for your multi-everything needs. Read all about the platform designed for your multi-everything needs.
Manage multiple disparate websites or manage a website with multiple, independent, vendor-managed web stores housed within it. Set permissions for admin privileges based on role or user type. Manage it all within one commerce solution.
Enable customers to purchase through a multitude of channels and devices including smart phone, tablet, APIs over Smart TV, gaming consoles/in-game apps – wherever your customers are. Display targeted content based on device, or take advantage of mobile-based B2B capabilities – it’s time to boost sales through heightened accessibility.
Global commerce is knocking. Borderless eCommerce capabilities allow you to capitalize on every sales opportunity. Broadleaf offers full internationalization (i18n) enablement, as well as geographical pricing and currency adjustments – providing customers everywhere tailored shopping experiences. |
import json
__all__ = ["Response"]
class Response:
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
:param status_code: response status code
:param text: response content
:param headers: response headers
:param url: request url
"""
def __init__(self, status_code: int, text: str, headers: dict, url: str) -> None:
self.status_code: int = status_code
self.text: str = text
self.headers: dict = headers
self.url: str = url
def json(self) -> dict:
"""convert the content to dict
:return: the content from server
"""
return json.loads(self.text)
def __eq__(self, other: object) -> bool:
if not isinstance(other, self.__class__):
return NotImplemented # pragma: no cover
return (
self.status_code == other.status_code
and self.text == other.text
and self.headers == other.headers
and self.url == other.url
)
def __str__(self):
return (
f"<Response [status_code={self.status_code}, "
f"text={self.text}, "
f"headers={self.headers}, "
f"url={self.url}]>"
)
|
Most of the time we take water for granted. It’s there when we turn on the tap, take a shower, or hop in the pool. It sits quietly in the background of our lives, yet without it, we wouldn’t be here.
Water is Everywhere: Yeah, we all knew that. But of the 332,500,000 cubic miles of water on earth, only 1% is fresh and accessible—including not-so-environmentally-friendly bottled water. So what can we do to help the planet and get our hands on clean, eco-friendly water? Vero Water provides the perfect solution in its water bottling system. Vero Water filtration and bottling systems waste less energy and provide clean, fresh water that is simply the best tasting in the business.
“Fresh” is a Relative Term: Before 2009, federal regulations did not have high standards for bottled water. In fact, they didn’t even require water bottlers to check for and remove E. coli! And in 1999, the NRDC found that one brand of bottled “spring water” came from a well in an industrial parking lot near a hazardous waste facility. Fortunately, Vero Water’s proprietary purification system, Vero+, removes impurities, chemicals and imperfections on-site while leaving the crisp signature taste of Vero Water every time.
You Could Try to Make Water Yourself: By simply mixing hydrogen molecules and oxygen molecules and adding a spark you’d come up with pure, clean water. Unfortunately this is the process that brought down the Hindenburg…. Better to leave the purification to the pros at Vero Water. It’s all of the homemade satisfaction with none of the explosion. |
import re
import subprocess
from nexentaedge.settings import Settings
from nexentaedge.nedgeBlockerException import NedgeBlockerException
from baseConfigurationStep import BaseConfigurationStep
blocker_patterns = ['^.*(Less\s+then\s+\d+.*disks)$',
'^.*(Interface.*missing).*$',
'^.*(Network interface too slow)$',
'^.*(Not enough RAM memory.*GB).*$'
]
class NedeployPrecheck(BaseConfigurationStep):
def __init__(self):
pass
def process(self, environment):
neadmCmd = self.create_precheck_cmd(environment)
print("NEDEPLOY cmd is: {0}".format(' '.join(neadmCmd)))
try:
subprocess.check_output(neadmCmd,
stderr=subprocess.STDOUT,
universal_newlines=True)
except subprocess.CalledProcessError as ex:
print(" OUTPUT:\n{}".format(ex.output))
blockers = self.get_blockers(ex.output)
raise NedgeBlockerException(blockers)
def get_blockers(self, error_output):
results = []
for pattern in blocker_patterns:
m = re.search(pattern, error_output, re.MULTILINE)
if m:
results.append(m.group(1))
print('MATCHED {}'.format(m.group(1)))
return results
def create_precheck_cmd(self, environment):
node_private_ip = environment['node_private_ip']
node_type = environment['node_type']
replicast_eth = environment['replicast_eth']
nodocker = environment['nodocker']
profile = environment['profile']
exclude = environment['exclude']
reserved = environment['reserved']
print("\tnode_private_ip : {}".format(node_private_ip))
print("\tnode_type : {}".format(node_type))
print("\treplicast_eth : {}".format(replicast_eth))
print("\tnodocker : {}".format(nodocker))
print("\tprofile : {}".format(profile))
print("\texclude : {}".format(exclude))
print("\treserved : {}".format(reserved))
neadmCmd = [Settings.NEDEPLOY_CMD, 'precheck',
node_private_ip, 'root:nexenta', '-i',
replicast_eth]
if node_type == 'mgmt':
neadmCmd.append('-m')
activation_key = environment['nedge_activation_key']
if not activation_key:
raise NedgeBlockerException(['No activation key is provided'])
elif node_type == 'gateway':
# ADD GATEWAY parameter to deploy solo cmd
print("Gateway type selected!! ")
# profile section
neadmCmd.append('-t')
if profile.lower() == 'balanced':
neadmCmd.append('balanced')
elif profile.lower() == 'performance':
neadmCmd.append('performance')
else:
neadmCmd.append('capacity')
if nodocker is True:
neadmCmd.append('--nodocker')
if exclude:
neadmCmd.append('-x')
neadmCmd.append(exclude)
if reserved:
neadmCmd.append('-X')
neadmCmd.append(reserved)
return neadmCmd
|
Thursday December 20 will be another lively evening at the Globe in Military Road, led by Reg Marchant and friends. Reg plays all kinds of instruments and is famous for getting everyone present involved having fun. He has a great following so the restaurant is likely to be busy. The music will be varied but predominantly carols with audience participation.
We got to spend a nice evening with Reg and Steve at the Waterworks just after New Years. It was Franks birthday and Reg got us all singing and talking. Fantastic time with great people and wonderful ale. |
#!/usr/bin/env python
# Copyright (c) 2014 Intel Corporation. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import fnmatch
import os
import stat
import subprocess
def GetJARFilename():
# Version of YUI Compressor.
version = "2.4.8"
# yuicompressor-*.jar was gotten from http://yui.github.io/yuicompressor/.
file_name = "yuicompressor-%s.jar" % version
cur_dir = os.path.realpath(os.path.dirname(__file__))
return os.path.join(cur_dir, "libs", file_name)
def GetFileList(path, ext, sub_dir = True):
if os.path.exists(path):
file_list = []
for name in os.listdir(path):
full_name = os.path.join(path, name)
st = os.lstat(full_name)
if stat.S_ISDIR(st.st_mode) and sub_dir:
file_list += GetFileList(full_name, ext)
elif os.path.isfile(full_name):
if fnmatch.fnmatch(full_name, ext):
file_list.append(full_name)
return file_list
else:
return []
def ExecuteCmd(path, ext):
file_list = GetFileList(path, "*." + ext)
for file_full_path in file_list:
if os.path.exists(file_full_path):
cmd_args = ["java", "-jar", GetJARFilename(), "--type=" + ext,
file_full_path, "-o", file_full_path]
subprocess.call(cmd_args)
class CompressJsAndCss(object):
def __init__(self, input_path):
self.input_path = input_path
def CompressJavaScript(self):
ExecuteCmd(self.input_path, "js")
def CompressCss(self):
ExecuteCmd(self.input_path, "css")
|
Emperor Raja Laut is a distinctive, hardwood schooner, sailing the stunning seas of Indonesia. Built by one of the area’s premier boat builders in 2005. she is perfectly adapted to the island hopping nature of dive liveaboards in Indonesia. Itineraries include the areas of Komodo, Raja Ampat and Banda. She really lives up to her name “King of the Sea” Emperor Raja Laut comfortably accommodates 12 guests in 6 cabins, all below decks. There are 3 double bed cabins and 3 twin bed cabins), each with ensuite, mini safe, individually controlled air-conditioning and fan.
Includes shared cabin, 3 meals a day, afternoon snacks, 1 cocktail party on a local island (weather permitting), tea,coffee and water, 3 dive guides 12 ltr air fills, weights and local airport transfers. Flights are not included.
Enjoy this 5 star stunning dive resort on Bunaken Island. The resort itself is set amid tranquil gardens, meander between the spa and freeform infinity pool. At the end of each relaxing day, the multi-level cocktail bar is the place to be before you make your way to dinner. The restaurant is right next door. Breakfast, lunch and dinner are served here, with mouth-watering gourmet Asian and western cuisine and fantastic sea views. There’s no need to compromise here. It’s time to kick back and enjoy your stay at Bunaken Oasis Dive Resort. Fabulous diving with 50 superb dive sites around Bunaken and neighbouring islands plus the stunning house reef.
Includes flights, transfers, 7 nights traditional cottage (2 people sharing) full board, 17 boat dives and free nitrox, free house reef diving.
The Best of the Maldives is an ideal itinerary for any first time visitor to the Maldives. Divers should be comfortable with drift diving but no minimum no of dives is required. All diving is made from dhonis to give precision entry and exits points. Night diving is limited to sheltered moorings but well worth jumping in for. With so many excellent and remarkable sites in and around North – South Male Atolls and the Ari Atolls, the Best of the Maldives is a route that can be dived time and time again. Surely no one can tire of the jaw dropping diversity and life on display. From the tiny creatures that thrill macro lovers to the elegant and inspiring big fish, there more than enough to keep you coming back for more.
Includes return flights, transfers, full board, all guided diving, tanks and weights.
Not all dive holidays are restful! That’s why we created the Red Sea Relaxed™ itinerary, a very rewarding, but more relaxing alternative to the more usual scuba diving itinerary. Do as much or as little as you like! Enjoy a mix of on board activities: diving, snorkelling, yoga and freediving. Achieve mindfulness in the sunshine and scenery in the stunning Red Sea. Take advantage of the open dive deck policy, visiting fewer locations over the week, for a Red Sea liveaboard with less rushing about. This is your holiday done your way and escorted by the lovely freediver Emma Farrell.
Includes return flights, full board, 12ltr tanks , fully guided dives, plus morning and evening yoga and a try freediving course with Emma Farrell.
Previous articleThis week we have so many exciting things going on, we are introducing our new Red Hot Red weeks for the Red Sea plus lots of fantastic worldwide special offers for you to choose from. |
"""
TODO:
-) understand no boundary condition
-) validate understanding with analytical solution
"""
import nanopores, dolfin, os
from nanopores.physics.simplepnps import SimpleNernstPlanckProblem
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import force_profiles
from collections import defaultdict
nanopores.add_params(
savefig = False
)
class DiffusionProblem1D(SimpleNernstPlanckProblem):
method = SimpleNernstPlanckProblem.method
method["iterative"] = False
@staticmethod
def initial_u(V, c0):
u = dolfin.Function(V)
u.interpolate(dolfin.Constant(c0))
return u
@staticmethod
def forms(V, geo, phys, F):
dx = geo.dx()
grad = phys.grad
kT = dolfin.Constant(phys.kT)
D = dolfin.Constant(Dtarget(phys.rTarget))
lscale = dolfin.Constant(phys.lscale)
n = dolfin.FacetNormal(geo.mesh)
c = dolfin.TrialFunction(V)
d = dolfin.TestFunction(V)
FF = dolfin.as_vector([F])
J = -D*grad(c) + D/kT*FF*c
a = dolfin.inner(J, grad(d))*dx
L = dolfin.Constant(0.)*d*dx
aNoBC = -lscale*dolfin.inner(J, n*d)*geo.ds("bottom")
a += aNoBC
return a, L
@staticmethod
def bcs(V, geo, c0):
bc = dict(
top = c0,
#bottom = c0,
)
return geo.pwBC(V, "c0", value=bc)
def current(geo, phys, c, F):
dx = geo.dx()
grad = phys.grad
lscale = phys.lscale
mol = phys.mol
kT = dolfin.Constant(phys.kT)
D = dolfin.Constant(Dtarget(phys.rTarget))
FF = dolfin.as_vector([F])
print "v = %s" % (Dtarget(phys.rTarget)*F(0.)/phys.kT,)
j = -D*grad(c) + D/kT*FF*c
#dolfin.plot(j)
#dolfin.interactive()
L = 20.
r0 = 1./lscale
Across = r0**2 * dolfin.pi
# current in N/s
J = mol * Across * dolfin.assemble(j[0]/dolfin.Constant(L) * dx)
# current in N/ms
J = J * 1e-3
return J
def Dtarget(r):
return nanopores.kT/(6*dolfin.pi*nanopores.eta*r)
def J_FEM(F, c0):
geo = force_profiles.geo
phys = nanopores.Physics(geo=geo, rTarget=rMol*1e-9, lscale=1e9)
pde = nanopores.solve_pde(DiffusionProblem1D, geo=geo, phys=phys,
F=F, c0=c0, verbose=False)
c = pde.solution
return c, current(geo, phys, c, F)
def gather_currents(name, c0):
currents = defaultdict(list)
qmols = []
for results in force_profiles.Forces(name):
qmols.append(results["Q"])
for key in "F", "Fi", "Fi2":
f = results[key]
f = force_profiles.function_from_lambda(lambda z: 1e-12*f(z))
u, J = J_FEM(f, c0)
currents[key].append(J)
#force_profiles.plot_function(f, label="Q="+str(Q))
#if key=="F":
# force_profiles.plot_function(u, label="Q="+str(results["Q"]))
print "Q %s, J %s, Ji %s, Jib %s" % (
qmols[-1], currents["F"][-1], currents["Fi"][-1], currents["Fi2"][-1])
return qmols, currents
c0 = 1.6605 # [mol/m**3] = 1 molecule per (10nm)**3
#names = {0.25: "r025", 0.5: "r05", 0.2: "r02", 0.4: "r04", 0.75: "r075"}
items = (0.25, "r025"), (0.5, "r05"), (0.75, "r075")
figures = os.path.expanduser("~") + "/papers/pnps-numerics/figures/"
for rMol, name in items:
#plt.figure()
qmols, currents = gather_currents(name, c0)
#plt.legend()
fig, ax = plt.subplots()
ax.plot(qmols, currents["F"], "s-g", label="finite")
ax.plot(qmols, currents["Fi"], "s-b", label="point")
ax.plot(qmols, currents["Fi2"], "s-r", label="point, corrected")
#ax.set_aspect('equal')
tick_spacing = 1.
ax.xaxis.set_major_locator(ticker.MultipleLocator(tick_spacing))
#ax.set_ylim([-0.4, 0.]) #xaxis.set_ticks(np.arange(start, end, 0.712123))
#ax.grid(True, which='both')
#ax.axhline(y=0, color='#cccccc')
plt.title("r = %s" %rMol)
plt.xlabel("Molecule charge [q]")
plt.ylabel("Molecule current [1/ms]")
plt.legend()
if savefig:
fig = plt.gcf()
fig.set_size_inches(5, 4)
plt.savefig(figures + "molcurrent_r%.2f.eps" % rMol, bbox_inches='tight')
#plt.show()
|
What do you do when you are stuck spending four days on the couch with a sinus infection from H-E-Double Hockey Sticks? Besides whine to your dog and husband? You update your blog tabs. That’s what you do.
I updated the Home Projects, Photography/Blog Tips, Crafty Things, and Recipes tabs. I also added the Travel tab. Click around. See if you have missed anything important.
Maybe your dog needs a tie for your holiday photos?
Maybe you missed the hard hitting posts where I organized my spice racks or used burlap as wallpaper.
Important Tip #2: inlinkz made this update so much easier. I broke down and bought the subscription ($20 for the year). I am a weirdo about spending money on this blog. The only money I have previously spent is $5 for photo storage (which you are basically forced to do.) This subscription is totally worth the money.
I was trying to mess with code before. I know very little about coding. It’s just not my jam. (Unlike Reese’s Pieces. They are my jam.) It’s the site a lot of bloggers use for hosting link parties. But you can also use to it organize your projects page. Who knew?? It’s very user friendly. You enter you’re the address for your blog post. It pulls the pictures from that post. You crop the photo, add your title and you are done. You can edit the page information at any time. Me likey. I might even host a link party. Who knows.
Speaking of parties. Check back tomorrow for a big announcement. Ooooooo, the suspense!
Did you find any fun posts that you missed? Are you an organized blogger? Just starting out? Reese’s Pieces fan? Pumped for the big announcement? |
"""
File: beam.py
Purpose: Defines the Beam note construct.
"""
from structure.abstract_note_collective import AbstractNoteCollective
from structure.note import Note
from structure.tuplet import Tuplet
from fractions import Fraction
from timemodel.duration import Duration
class Beam(AbstractNoteCollective):
"""
Beam is a grouping operation, having a set scaling ratio of 1/2, but unbounded aggregate duration.
The basic idea of a beam is that for a stand alone beam, you can only add Note's of duration 1/4 or less.
That duration is retained under the beam.
However when a beam is added to a beam, it takes an additional reduction factor of 1/2.
Note that these factors aggregate multiplicatively through self.contextual_reduction_factor
"""
FACTOR = Fraction(1, 2)
NOTE_QUALIFIER_DURATION = Duration(1, 4)
def __init__(self, abstract_note_list=list()):
"""
Constructor
Args:
abstract_note_list: list of notes, beams, and tuplets to add consecutively under the beam.
"""
AbstractNoteCollective.__init__(self)
self.append(abstract_note_list)
@property
def duration(self):
"""
This is an override of AbstractNoteCollective.duration.
Tuplet and Beam override this to do a simple summation of linearly laid out notes and sub-notes.
The reason is that the layout algorithm of these subclasses cannot use the relative_position
attribute as the algorithm determines that.
"""
d = Duration(0)
for note in self.sub_notes:
d += note.duration
return d
def append(self, notes):
"""
Append a set of abstract notes to the beam
Args:
notes: either a list of notes or a single note to add to the beam.
"""
if isinstance(notes, list):
for n in notes:
self.append(n)
return
elif isinstance(notes, Note) or isinstance(notes, AbstractNoteCollective):
self.add(notes, len(self.sub_notes))
def add(self, note, index):
"""
Beams can only add less than 1/4 notes, and arbitrary beams and tuplets.
Only added beams incur a reduction factor of 1/2
For collective notes, always apply the factor.
"""
if note.parent is not None:
raise Exception('Cannot add note already assigned a parent')
if index < 0 or index > len(self.sub_notes):
raise Exception('add note, index {0} not in range[0, {1}]'.format(index, len(self.sub_notes)))
if isinstance(note, Note):
if note.base_duration >= Duration(1, 4):
raise Exception(
"Attempt to add note with duration {0} greater than or equal to {1}".
format(note.duration, Beam.NOTE_QUALIFIER_DURATION))
new_factor = self.contextual_reduction_factor
elif isinstance(note, Beam):
new_factor = self.contextual_reduction_factor * Beam.FACTOR
elif isinstance(note, Tuplet):
new_factor = self.contextual_reduction_factor
else:
raise Exception('illegal type {0}'.format(type(note)))
self.sub_notes.insert(index, note)
note.parent = self
note.apply_factor(new_factor)
# The following call will adjust layout from this point right upward
self.upward_forward_reloc_layout(note)
# see if prior note is tied, and if so, break the tie.
first_note = note
if not isinstance(note, Note):
first_note = note.get_first_note()
# If empty beam or tuplet is added, there is nothing to look for in terms of ties.
if first_note is None:
return
prior = first_note.prior_note()
if prior is not None and prior.is_tied_to:
prior.untie()
# notify up the tree of what has changed
self.notes_added([note])
def __str__(self):
base = 'Beam(Dur({0})Off({1})f={2})'.format(self.duration, self.relative_position,
self.contextual_reduction_factor)
s = base + '[' + (']' if len(self.sub_notes) == 0 else '\n')
for n in self.sub_notes:
s += ' ' + str(n) + '\n'
s += ']' if len(self.sub_notes) != 0 else ''
return s
|
Global Healthcare IT Consulting Market report is a perfect source to acquire thorough synopsis on the market study, analysis, estimation and the factors influencing the industry. It becomes easy to achieve supreme level of market insights and obtain know how of the best market opportunities into the specific markets with this market report. The report also presents with the statistics on the existing state of the industry. And thus proves to be an important source of guidance and direction for companies and investors that are interested in this market. Moreover, this Healthcare IT Consulting market report also describes a widespread evaluation of the market’s growth prospects and restrictions.
Global healthcare IT consulting market was valued at USD 15.46 billion in 2016 and is projected to reach USD 77.99 billion by 2025, growing at a CAGR of 19.7% from 2017 to 2025.
To serve the clients with the best market report in the industry, an expert team of skilled analysts, dynamic forecasters and knowledgeable researchers work carefully while preparing this Healthcare IT Consulting report. Healthcare IT Consulting Market research report delivers a close watch on leading competitors with strategic analysis, micro and macro market trend and scenarios, pricing analysis and a holistic overview of the market situations in the forecast period. The scope of the report extends from market scenarios to comparative pricing between major players, cost and profit of the specified market regions.
The Market is largely fragmented and the vast majority of the players functioning in the global Healthcare IT Consulting market are taking steps to raise their market footprint, by concentrating on product diversification and development, therefore making them seizes a larger share of the market.
Worldwide Healthcare IT Consulting Market Analysis to 2025 is a specialized and in-depth study of the Healthcare IT Consulting industry with a focus on the global market trend. The report aims to provide an overview of global Healthcare IT Consulting market with detailed market segmentation by product/application and geography. The global Healthcare IT Consulting market is expected to witness high growth during the forecast period. The report provides key statistics on the market status of the Healthcare IT Consulting players and offers key trends and opportunities in the market.
Also, key Healthcare IT Consulting Market players influencing the market are profiled in the study along with their SWOT analysis and market strategies. The report also focuses on leading industry players with information such as company profiles, products and services offered financial information of last 3 years, key development in past five years. |
from invoke import task
from shlex import quote
from colorama import Fore
import json
import os
import re
import requests
import subprocess
@task
def build(c):
"""
Build the infrastructure
"""
command = 'build'
command += ' --build-arg PROJECT_NAME=%s' % c.project_name
command += ' --build-arg USER_ID=%s' % c.user_id
with Builder(c):
for service in c.services_to_build_first:
docker_compose(c, '%s %s' % (command, service))
docker_compose(c, command)
@task
def up(c):
"""
Build and start the infrastructure
"""
build(c)
docker_compose(c, 'up --remove-orphans --detach')
@task
def start(c):
"""
Build and start the infrastructure, then install the application (composer, yarn, ...)
"""
if c.dinghy:
machine_running = c.run('dinghy status', hide=True).stdout
if machine_running.splitlines()[0].strip() != 'VM: running':
c.run('dinghy up --no-proxy')
c.run('docker-machine ssh dinghy "echo \'nameserver 8.8.8.8\' | sudo tee -a /etc/resolv.conf && sudo /etc/init.d/docker restart"')
stop_workers(c)
up(c)
cache_clear(c)
install(c)
migrate(c)
start_workers(c)
print(Fore.GREEN + 'The stack is now up and running.')
help(c)
@task
def install(c):
"""
Install the application (composer, yarn, ...)
"""
with Builder(c):
if os.path.isfile(c.root_dir + '/' + c.project_directory + '/composer.json'):
docker_compose_run(c, 'composer install -n --prefer-dist --optimize-autoloader', no_deps=True)
if os.path.isfile(c.root_dir + '/' + c.project_directory + '/yarn.lock'):
run_in_docker_or_locally_for_dinghy(c, 'yarn', no_deps=True)
elif os.path.isfile(c.root_dir + '/' + c.project_directory + '/package.json'):
run_in_docker_or_locally_for_dinghy(c, 'npm install', no_deps=True)
@task
def cache_clear(c):
"""
Clear the application cache
"""
# with Builder(c):
# docker_compose_run(c, 'rm -rf var/cache/ && php bin/console cache:warmup', no_deps=True)
@task
def migrate(c):
"""
Migrate database schema
"""
# with Builder(c):
# docker_compose_run(c, 'php bin/console doctrine:database:create --if-not-exists')
# docker_compose_run(c, 'php bin/console doctrine:migration:migrate -n --allow-no-migration')
@task
def builder(c, user="app"):
"""
Open a shell (bash) into a builder container
"""
with Builder(c):
docker_compose_run(c, 'bash', user=user, bare_run=True)
@task
def logs(c):
"""
Display infrastructure logs
"""
docker_compose(c, 'logs -f --tail=150')
@task
def ps(c):
"""
List containers status
"""
docker_compose(c, 'ps --all')
@task
def stop(c):
"""
Stop the infrastructure
"""
docker_compose(c, 'stop')
@task
def tests(c):
"""
Launch tests
"""
with Builder(c):
docker_compose_run(c, 'bin/phpunit')
@task
def qa(c):
"""
Run static analysis tools
"""
with Builder(c):
# Make tests analyses working with Symfony's PHPUnit bridge
docker_compose_run(c, 'vendor/bin/simple-phpunit install', no_deps=True)
docker_compose_run(c, 'vendor/bin/phpstan analyse', no_deps=True)
@task
def cs(c, dry_run=False):
"""
Fix coding standards in code
"""
with Builder(c):
if dry_run:
docker_compose_run(c, 'vendor/bin/php-cs-fixer fix --config=.php_cs --dry-run --diff', no_deps=True)
else:
docker_compose_run(c, 'vendor/bin/php-cs-fixer fix --config=.php_cs', no_deps=True)
docker_compose_run(c, 'pycodestyle --ignore=E501,W605,E722 invoke.py tasks.py', no_deps=True)
@task
def start_workers(c):
"""
Start the workers
"""
workers = get_workers(c)
if (len(workers) == 0):
return
c.start_workers = True
c.run('docker update --restart=unless-stopped %s' % (' '.join(workers)), hide='both')
docker_compose(c, 'up --remove-orphans --detach')
@task
def stop_workers(c):
"""
Stop the workers
"""
workers = get_workers(c)
if (len(workers) == 0):
return
c.start_workers = False
c.run('docker update --restart=no %s' % (' '.join(workers)), hide='both')
c.run('docker stop %s' % (' '.join(workers)), hide='both')
@task
def destroy(c, force=False):
"""
Clean the infrastructure (remove container, volume, networks)
"""
if not force:
ok = confirm_choice('Are you sure? This will permanently remove all containers, volumes, networks... created for this project.')
if not ok:
return
with Builder(c):
docker_compose(c, 'down --remove-orphans --volumes --rmi=local')
@task(default=True)
def help(c):
"""
Display some help and available urls for the current project
"""
print('Run ' + Fore.GREEN + 'inv help' + Fore.RESET + ' to display this help.')
print('')
print('Run ' + Fore.GREEN + 'inv --help' + Fore.RESET + ' to display invoke help.')
print('')
print('Run ' + Fore.GREEN + 'inv -l' + Fore.RESET + ' to list all the available tasks.')
c.run('inv --list')
print(Fore.GREEN + 'Available URLs for this project:' + Fore.RESET)
for domain in [c.root_domain] + c.extra_domains:
print("* " + Fore.YELLOW + "https://" + domain + Fore.RESET)
try:
response = json.loads(requests.get('http://%s:8080/api/http/routers' % (c.root_domain)).text)
gen = (router for router in response if re.match("^%s-(.*)@docker$" % (c.project_name), router['name']))
for router in gen:
if router['service'] != 'frontend-%s' % (c.project_name):
host = re.search('Host\(\`(?P<host>.*)\`\)', router['rule']).group('host')
if host:
scheme = 'https' if 'https' in router['using'] else router['using'][0]
print("* " + Fore.YELLOW + scheme + "://" + host + Fore.RESET)
print('')
except:
pass
def run_in_docker_or_locally_for_dinghy(c, command, no_deps=False):
"""
Mac users have a lot of problems running Yarn / Webpack on the Docker stack so this func allow them to run these tools on their host
"""
if c.dinghy:
with c.cd(c.project_directory):
c.run(command)
else:
docker_compose_run(c, command, no_deps=no_deps)
def docker_compose_run(c, command_name, service="builder", user="app", no_deps=False, workdir=None, port_mapping=False, bare_run=False):
args = [
'run',
'--rm',
'-u %s' % quote(user),
]
if no_deps:
args.append('--no-deps')
if port_mapping:
args.append('--service-ports')
if workdir is not None:
args.append('-w %s' % quote(workdir))
docker_compose(c, '%s %s /bin/sh -c "exec %s"' % (
' '.join(args),
quote(service),
command_name
), bare_run=bare_run)
def docker_compose(c, command_name, bare_run=False):
domains = '`' + '`, `'.join([c.root_domain] + c.extra_domains) + '`'
# This list should be in sync with the one in invoke.py
env = {
'PROJECT_NAME': c.project_name,
'PROJECT_DIRECTORY': c.project_directory,
'PROJECT_ROOT_DOMAIN': c.root_domain,
'PROJECT_DOMAINS': domains,
'PROJECT_START_WORKERS': str(c.start_workers),
'COMPOSER_CACHE_DIR': c.composer_cache_dir,
}
cmd = 'docker-compose -p %s %s %s' % (
c.project_name,
' '.join('-f "' + c.root_dir + '/infrastructure/docker/' + file + '"' for file in c.docker_compose_files),
command_name
)
# bare_run bypass invoke run() function
# see https://github.com/pyinvoke/invoke/issues/744
# Use it ONLY for task where you need to interact with the container like builder
if (bare_run):
env.update(os.environ)
subprocess.run(cmd, shell=True, env=env)
else:
c.run(cmd, pty=not c.power_shell, env=env)
def get_workers(c):
"""
Find worker containers for the current project
"""
cmd = c.run('docker ps -a --filter "label=docker-starter.worker.%s" --quiet' % c.project_name, hide='both')
return list(filter(None, cmd.stdout.rsplit("\n")))
def confirm_choice(message):
confirm = input('%s [y]es or [N]o: ' % message)
return re.compile('^y').search(confirm)
class Builder:
def __init__(self, c):
self.c = c
def __enter__(self):
self.docker_compose_files = self.c.docker_compose_files
self.c.docker_compose_files = ['docker-compose.builder.yml'] + self.docker_compose_files
def __exit__(self, type, value, traceback):
self.c.docker_compose_files = self.docker_compose_files
|
The objective of this project is to uncover and explain the escalation and non-escalation of repression and intra-state armed conflict by analysing how characteristics of the government and its formal and informal security apparatus shape the dynamics of such violence, paying attention to the role of monitoring and accountability. RATE analyzes when and under what conditions what types of human rights violations lead to the escalation or deterrence of fur- ther repression and armed conflict. It pays particular attention to the delegation of violence to and within formal and informal armed groups and the role of domestic and international mechanisms of monitoring and accountability. |
from django.conf import settings
from django.contrib.auth.forms import AuthenticationForm
from django.core.urlresolvers import reverse
from django.db.models import Count
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404, redirect, render
from django.views.decorators.csrf import ensure_csrf_cookie
from core.common import *
from posts.forms import PostForm
from posts.models import Post
from users.models import User
@ensure_csrf_cookie
def post_form(request, post_id=None):
user = User.objects.get(user=request.user)
post = Post.objects.get(pk=post_id)
if user == post.author:
form = PostForm(instance=post, request=request)
else:
return reject_user()
return render(request, 'posts/edit.html', {
'settings': settings,
'user': user,
'title': 'Edit ' + post.title,
'post': post,
'form': form,
})
@ensure_csrf_cookie
def gallery(request, query=None):
return render(request, 'posts/booru.html', {
'settings': settings,
'user': get_user(request.user),
'posts': Post.objects.exclude(media=False, hidden=False),
})
|
Join 4H leader Erin Mayer for some crafting fun this February! Discover Arts and Crafts SPIN club is open to Forest County Youth in the 4th and 5th grades. The SPIN club will meet on Sundays, February 10th, 17th and 24th at the Crandon Public Library from 3:00 – 5:00 p.m. The cost is a one-time $5.00 fee payable at the 1st meeting.
Space is LIMITED to 10 youth. Please complete the initial registration here. Parents/guardians will be contacted by Michelle Gobert to complete the full application process.
For more information about the program or if t here are questions, please call Michelle Gobert at 715-478-7797. |
from django.conf import settings
from djangotoolbox.http import JSONResponse
def live_search_results(request, model, search_index='search_index', limit=30,
result_item_formatting=None, query_converter=None,
converter=None, redirect=False):
"""
Performs a search in searched_model and prints the results as
text, so it can be used by auto-complete scripts.
limit indicates the number of results to be returned.
A JSON file is sent to the browser. It contains a list of
objects that are created by the function indicated by
the parameter result_item_formatting. It is executed for every result
item.
Example:
result_item_formatting=lambda course: {
'value': course.name + '<br />Prof: ' + course.prof.name,
'result': course.name + ' ' + course.prof.name,
'data': redirect=='redirect' and
{'link': course.get_absolute_url()} or {},
}
"""
query = request.GET.get('query', '')
try:
limit_override = int(request.GET.get('limit', limit))
if limit_override < limit:
limit = limit_override
except:
pass
search_index = getattr(model, search_index)
language = getattr(request, 'LANGUAGE_CODE', settings.LANGUAGE_CODE)
results = search_index.search(query, language=language)
if query_converter:
results = query_converter(request, results)
results = results[:limit]
if converter:
results = converter(results)
data = []
for item in results:
if result_item_formatting:
entry = result_item_formatting(item)
else:
value = getattr(item, search_index.fields_to_index[0])
entry = {'value': force_escape(value), 'result': value}
if 'data' not in entry:
entry['data'] = {}
if redirect:
if 'link' not in entry['data']:
entry['data']['link'] = item.get_absolute_url()
data.append(entry)
return JSONResponse(data) |
This quarter our social mixer will be showing the the best of the Space-Time Animation Competition. The ACM SIGGRAPH Eduation Committee sponsors the Space-Time Animation Competition which is open to students, primary grades through college, worldwide.
behind the building, on the corner of 9th and Madison.
so that we can plan for attendance. |
"""
Simple SFTP client using Paramiko
"""
from __future__ import print_function
import logging
import paramiko
LOG = logging.getLogger('sftplib')
class SFTPClient(object):
"""
Simple SFTP client using Paramiko
"""
def __init__(self, hostname, port, username, key_file):
"""
Create the SFTPLib object for connection
"""
self.sftp = None
self.transport = None
self.hostname = hostname
self.port = port
self.username = username
self.key_file = key_file
def login(self):
"""
Log into SFTP server and establish the connection
"""
try:
rsa_key = paramiko.RSAKey.from_private_key_file(self.key_file)
self.transport = paramiko.Transport((self.hostname, self.port))
self.transport.connect(username=self.username, pkey=rsa_key)
self.sftp = paramiko.SFTPClient.from_transport(self.transport)
except Exception as exception:
print('Caught exception: {}'.format(exception))
LOG.error('Caught exception: %s', exception)
self.transport.close()
def list_files(self):
"""
Get list of files on SFTP server
"""
file_list = self.sftp.listdir('.')
return file_list
def get_file(self, remotename, dst_dir):
"""
Download file from SFTP server
"""
try:
self.sftp.get(remotename, dst_dir)
return True
except Exception as exception:
LOG.error("Exception raised: %s", exception)
return False
def remove_file(self, remotename):
"""
Delete a file on the remote server
"""
try:
self.sftp.remove(remotename)
return True
except Exception as exception:
LOG.error("Exception raised: %s", exception)
return False
def close(self):
"""
Close the SFTP connection
"""
self.sftp.close()
self.transport.close()
|
Keeping my carpets & rugs clean aren't easy since we have 2 dogs. Thanks to Stainmaster Carpet Pet Stain Remover, stains are removed easily & painlessly!... Carpet Cleaners. STAINMASTER ® carpet cleaners lift and remove tough stains and grime without leaving a sticky residue. Our efficient, powerful solutions provide an invisible shield that repels dirt and protects against resoiling to keep your carpet looking newer longer.
How to Clean Carpet Stains General carpet stains Fresh carpet stains from food, beverages, pets, grease, and other substances will usually come up warm …... However, when you buy Stainmaster PetProtect carpet and cushion; you get flooring with a breathable moisture barrier that helps prevent wet messes from setting in to the padding and subfloor, allowing you to more thoroughly clean and cut back on odor.
STAINMASTER® carpets are designed for easy care. Just a bit of regular maintenance will keep your carpet looking its best. Discover carpet cleaning solutions for tough stains and real messes. Accidents do happen. Future-proof your carpet and erase the evidence.... Are you a pet-loving owner? Do you consider your pets part of the family? We do, and there’s no denying who owns the house! We adore these three, but we also love a clean home and stain-free carpet!
STAINMASTER® carpets are designed for easy care. Just a bit of regular maintenance will keep your carpet looking its best. Discover carpet cleaning solutions for tough stains and real messes. Accidents do happen. Future-proof your carpet and erase the evidence.... STAINMASTER flooring is durable, easy to clean and great for kids and pets. We’ll help you find the perfect carpet or luxury vinyl flooring for your home. We’ll help you find the perfect carpet or luxury vinyl flooring for your home.
Stainmaster Carpet Stain Remover or Pet Stain Remover Coupon Codes Explained: S =SmartSource circular; RP =RedPlum, PG =Procter & Gamble, CV =CVS store coupon, WG =Walgreens store coupon book, TG =Target store coupon.
If your pet happens to have an accident on the carpet, the trick is STAINMASTER Carpet Pet Stain Remover. I purchased it at my local Target in the cleaning aisle and it has worked wonders in my home. I purchased it at my local Target in the cleaning aisle and it has worked wonders in my home. |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2011 Associazione OpenERP Italia
# (<http://www.openerp-italia.org>).
# Copyright (C) 2014 Didotech SRL
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.report import report_sxw
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT
from datetime import datetime
class Parser(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
'raggruppa': self._raggruppa,
'raggruppaddt': self._raggruppaddt,
'righe': self._righe,
'righeddt': self._righeddt,
'indirizzo': self._indirizzo,
'div': self._div,
'italian_number': self._get_italian_number,
'pallet_sum': self._get_pallet_sum,
'get_description': self._get_description,
})
def _get_description(self, order_name):
order_obj = self.pool['sale.order']
description = []
if order_name and not self.pool['res.users'].browse(
self.cr, self.uid, self.uid).company_id.disable_sale_ref_invoice_report:
order_ids = order_obj.search(self.cr, self.uid, [('name', '=', order_name)])
if len(order_ids) == 1:
order = order_obj.browse(self.cr, self.uid, order_ids[0])
order_date = datetime.strptime(order.date_order, DEFAULT_SERVER_DATE_FORMAT)
if order.client_order_ref:
description.append(u'Rif. Ns. Ordine {order} del {order_date}, Vs. Ordine {client_order}'.format(order=order.name, order_date=order_date.strftime("%d/%m/%Y"), client_order=order.client_order_ref))
else:
description.append(u'Rif. Ns. Ordine {order} del {order_date}'.format(order=order.name, order_date=order_date.strftime("%d/%m/%Y")))
return ' / '.join(description)
def _div(self, up, down):
res = 0
if down:
res = up / down
return res
def _get_italian_number(self, number, precision=2, no_zero=False):
if not number and no_zero:
return ''
elif not number:
return '0,00'
if number < 0:
sign = '-'
else:
sign = ''
## Requires Python >= 2.7:
#before, after = "{:.{digits}f}".format(number, digits=precision).split('.')
## Works with Python 2.6:
if precision:
before, after = "{0:10.{digits}f}".format(number, digits=precision).strip('- ').split('.')
else:
before = "{0:10.{digits}f}".format(number, digits=precision).strip('- ').split('.')[0]
after = ''
belist = []
end = len(before)
for i in range(3, len(before) + 3, 3):
start = len(before) - i
if start < 0:
start = 0
belist.append(before[start: end])
end = len(before) - i
before = '.'.join(reversed(belist))
if no_zero and int(number) == float(number) or precision == 0:
return sign + before
else:
return sign + before + ',' + after
def _raggruppa(self, righe_fattura):
indice_movimenti = {}
movimenti_filtrati = []
for riga in righe_fattura:
if riga.origin in indice_movimenti and riga.origin in indice_movimenti[riga.origin]:
print riga
print riga.origin
else:
if riga.origin:
print 'Riga Buona'
if riga.ddt_origin in indice_movimenti:
indice_movimenti[riga.ddt_origin][riga.sale_origin] = riga.sale_origin
else:
indice_movimenti[riga.ddt_origin] = {riga.sale_origin: riga.sale_origin}
movimenti_filtrati.append(riga)
else:
continue
print indice_movimenti
print movimenti_filtrati
return movimenti_filtrati
def _righe(self, righe_fattura, filtro):
righe_filtrate = []
print filtro
print righe_fattura
for riga in righe_fattura:
if ((riga.origin == filtro.origin)):
righe_filtrate.append(riga)
return righe_filtrate
def _raggruppaddt(self, righe_ddt):
indice_movimenti = {}
movimenti_filtrati = []
print righe_ddt
for riga in righe_ddt:
if riga.origin in indice_movimenti:
print riga.origin
else:
indice_movimenti[riga.origin] = riga.origin
movimenti_filtrati.append(riga)
print indice_movimenti
return movimenti_filtrati
def _righeddt(self, righe_ddt, filtro):
righe_filtrate = []
print filtro
print righe_ddt
for riga in righe_ddt:
if riga.origin == filtro.origin:
righe_filtrate.append(riga)
return righe_filtrate
def _indirizzo(self, partner):
address = self.pool['res.partner'].address_get(self.cr, self.uid, [partner.id], ['default', 'invoice'])
return self.pool['res.partner.address'].browse(self.cr, self.uid, address['invoice'] or address['default'])
def _get_pallet_sum(self, product_ul_id, partner_id):
pallet_sum = self.pool['product.ul'].get_pallet_sum(
self.cr, self.uid, [product_ul_id], 'pallet_sum', None, context={'partner_id': partner_id}
)
return pallet_sum[product_ul_id]
|
Located in Hawaii, the Panoramic Survey Telescope and Rapid Response System (Pan-STARRS) telescope, like many, studies the depths of the universe. Able to observe 75 percent of the sky while using a 1.4 billion-pixel camera, the data collected is now accessible to everyone. This telescope differs from others in that it does not focus on only one part of the sky, rather, its wide visibility and continuous movement allow us to see the ever-changing universe in action.
Click the link below to start exploring and discovering with Pan-STARRS! |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2014, Trond Hindenes <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_chocolatey
version_added: "1.9"
short_description: Manage packages using chocolatey
description:
- Manage packages using Chocolatey (U(http://chocolatey.org/)).
- If Chocolatey is missing from the system, the module will install it.
- List of packages can be found at U(http://chocolatey.org/packages).
requirements:
- chocolatey >= 0.10.5 (will be upgraded if older)
options:
name:
description:
- Name of the package to be installed.
- This must be a single package name.
required: yes
state:
description:
- State of the package on the system.
choices:
- absent
- downgrade
- latest
- present
- reinstalled
default: present
force:
description:
- Forces install of the package (even if it already exists).
- Using C(force) will cause ansible to always report that a change was made.
type: bool
default: 'no'
upgrade:
description:
- If package is already installed it, try to upgrade to the latest version or to the specified version.
- As of Ansible v2.3 this is deprecated, set parameter C(state) to C(latest) for the same result.
type: bool
default: 'no'
version:
description:
- Specific version of the package to be installed.
- Ignored when C(state) is set to C(absent).
source:
description:
- Specify source rather than using default chocolatey repository.
install_args:
description:
- Arguments to pass to the native installer.
version_added: '2.1'
params:
description:
- Parameters to pass to the package
version_added: '2.1'
allow_empty_checksums:
description:
- Allow empty checksums to be used.
type: bool
default: 'no'
version_added: '2.2'
ignore_checksums:
description:
- Ignore checksums altogether.
type: bool
default: 'no'
version_added: '2.2'
ignore_dependencies:
description:
- Ignore dependencies, only install/upgrade the package itself.
type: bool
default: 'no'
version_added: '2.1'
timeout:
description:
- The time to allow chocolatey to finish before timing out.
default: 2700
version_added: '2.3'
aliases: [ execution_timeout ]
skip_scripts:
description:
- Do not run I(chocolateyInstall.ps1) or I(chocolateyUninstall.ps1) scripts.
type: bool
default: 'no'
version_added: '2.4'
proxy_url:
description:
- Proxy url used to install chocolatey and the package.
version_added: '2.4'
proxy_username:
description:
- Proxy username used to install chocolatey and the package.
- When dealing with a username with double quote characters C("), they
need to be escaped with C(\) beforehand. See examples for more details.
version_added: '2.4'
proxy_password:
description:
- Proxy password used to install chocolatey and the package.
- See notes in C(proxy_username) when dealing with double quotes in a
password.
version_added: '2.4'
allow_prerelease:
description:
- Allow install of prerelease packages.
- If state C(state) is C(latest) the highest prerelease package will be installed.
type: bool
default: 'no'
version_added: '2.6'
notes:
- Provide the C(version) parameter value as a string (e.g. C('6.1')), otherwise it
is considered to be a floating-point number and depending on the locale could
become C(6,1), which will cause a failure.
- When using verbosity 2 or less (C(-vv)) the C(stdout) output will be restricted.
- When using verbosity 4 (C(-vvvv)) the C(stdout) output will be more verbose.
- When using verbosity 5 (C(-vvvvv)) the C(stdout) output will include debug output.
- This module will install or upgrade Chocolatey when needed.
- Some packages need an interactive user logon in order to install. You can use (C(become)) to achieve this.
- Even if you are connecting as local Administrator, using (C(become)) to become Administrator will give you an interactive user logon, see examples below.
- Use (M(win_hotfix) to install hotfixes instead of (M(win_chocolatey)) as (M(win_hotfix)) avoids using wusa.exe which cannot be run remotely.
author:
- Trond Hindenes (@trondhindenes)
- Peter Mounce (@petemounce)
- Pepe Barbe (@elventear)
- Adam Keech (@smadam813)
- Pierre Templier (@ptemplier)
'''
# TODO:
# * Better parsing when a package has dependencies - currently fails
# * Time each item that is run
# * Support 'changed' with gems - would require shelling out to `gem list` first and parsing, kinda defeating the point of using chocolatey.
# * Version provided not as string might be translated to 6,6 depending on Locale (results in errors)
EXAMPLES = r'''
- name: Install git
win_chocolatey:
name: git
state: present
- name: Upgrade installed packages
win_chocolatey:
name: all
state: latest
- name: Install notepadplusplus version 6.6
win_chocolatey:
name: notepadplusplus.install
version: '6.6'
- name: Install git from specified repository
win_chocolatey:
name: git
source: https://someserver/api/v2/
- name: Uninstall git
win_chocolatey:
name: git
state: absent
- name: Install multiple packages
win_chocolatey:
name: '{{ item }}'
state: present
with_items:
- procexp
- putty
- windirstat
- name: uninstall multiple packages
win_chocolatey:
name: '{{ item }}'
state: absent
with_items:
- procexp
- putty
- windirstat
- name: Install curl using proxy
win_chocolatey:
name: curl
proxy_url: http://proxy-server:8080/
proxy_username: joe
proxy_password: p@ssw0rd
- name: Install curl with proxy credentials that contain quotes
win_chocolatey:
name: curl
proxy_url: http://proxy-server:8080/
proxy_username: user with \"escaped\" double quotes
proxy_password: pass with \"escaped\" double quotes
- name: Install a package that requires 'become'
win_chocolatey:
name: officepro2013
become: yes
become_user: Administrator
become_method: runas
'''
RETURN = r'''
choco_bootstrap_output:
description: DEPRECATED, will be removed in 2.6, use stdout instead.
returned: changed, choco task returned a failure
type: str
sample: Chocolatey upgraded 1/1 packages.
choco_error_cmd:
description: DEPRECATED, will be removed in 2.6, use command instead.
returned: changed, choco task returned a failure
type: str
sample: choco.exe install -r --no-progress -y sysinternals --timeout 2700 --failonunfound
choco_error_log:
description: DEPRECATED, will be removed in 2.6, use stdout instead.
returned: changed, choco task returned a failure
type: str
sample: sysinternals not installed. The package was not found with the source(s) listed
command:
description: The full command used in the chocolatey task.
returned: changed
type: str
sample: choco.exe install -r --no-progress -y sysinternals --timeout 2700 --failonunfound
rc:
description: The return code from the chocolatey task.
returned: changed
type: int
sample: 0
stdout:
description: The stdout from the chocolatey task. The verbosity level of the
messages are affected by Ansible verbosity setting, see notes for more
details.
returned: changed
type: str
sample: Chocolatey upgraded 1/1 packages.
'''
|
Made on special request for the HFT and UK shooters. Huma Air in close cooperation with Airborne Arms NZ, developed this tune kit for the Edgun Leshiy to convert your rifle into a 12 ft/lbs (16 Joule) HFT version with an enormous shotcount.
Our kit contains our micro regulator, a special plenum with a matching valve return spring, a power sleeve and a new softer hammerspring.
If you want to do some more tuning; the regulator has a clear pressure scale so you can easy adjust it to your wishes.
These regulators are also vented to the atmosphere without any modification to your pressure tube needed so no creeping up in power.
His instructions video will give a good indication of the setup, to tune and adjust your rifle to the best performance you do need some skills in adjusting your hammerspring tension and regulator pressure combination.
Beside the Gen1-Gen2 differences, there are also some different strengths/types of hammersprings used in some leshiy rifles. These rifles might need some different setup and can give another performance.
Please check the tab "Reviews" of this regulator were we share the user feedback and their setup used. Per 1-2018 we include an new softer hammerpring to the set.
Excellent quality product. It took some experiments and lots of hand pumping to figure out the most optimal configuration. With a .177 caliber 250mm barrel, I get about 60 shots from 250 bars down to 80 bars, 11 FPE each shot. Reg. is set to 85 bars and the delrin insert is left out. At least with a .177 barrel, increasing reg. pressure to 120 bars and putting the delrin insert in did not improve shot count from 200 bars to 150 bars compared to 85 bars and no insert. A .22 gun is probably a bit more efficient due to the increased cross section area and utilize the higher pressure better.
More shots, tighter strings, excellent support with installation which was very easy. Delrin insert offers greater tuning flexibility depending on personal preferences. A shortened valve stem would be a good addition to the kit though as this would allow you to keep the original in tact if ever required. Even if it increases the cost slightly. Really pleased.
Very well made regulator, easy to install. Getting 90 shots from a 250 bar fill, regulator set at 100 bar in my .22 Leshiy. Fantastic consistency. I do think that the reduced plenum should be one piece though, negating the need for the delrin insert.
I received my sub 12 reg sooner than I expected .. it took me a few days to gain the courage to install it as I had to cut down the valve stem.. it would have been great and stress free if a shortened machine cut valve stem would come with the kit.. that would be my only gripe..
I still have some more testing to do but I’m very happy I made the choice to install it already.. way more number of shots that I ever expected from this little goblin and consistency on par with the original.. ave spread 10fps..
All round Def a happy customer..
I bought this tuning kit prior to the delivery of my Edgun Leshiy which arrived last Wednesday. I had ordered a 12ftlb version as I live in France whose power limit is 20 joules and to stay legal I could only buy a sub 20 joules air rifle.
Prior to purchase I watched the installation video several times and assessed that the modifaation to the valve was within my capabilities and in my comfort zone.
The kit arrived very quickly and it waited until the Leshiy was delivered.
I did the first chrono runs as it was delivered and it had been very conservatively but never the less legaly set to sub 11 ftlbs.
Hi i have a Gen2 Leshiy ,serial number 057X .20 cal 250mm short barrel.
My sufficient needs require no more than about 11ft/lbs / 600fps , so that was the aim for my tune.
after lots of testing i have got it just right.
Using the new Huma sub12 hft regulator kit, i have the regulator set at 82bar and my hst is 12clicks from lowest.
I am extremely happy with these results!
85 shots for such a small rifle with small cylinder Amazing!
I have a Gen2 Leshiy, serial number 062X.
I have tested the regulator with three barrels between 90 bar and 120 bar and have found 105 bar reg setting to be best on mine.
The short .22 barrel gave me +100 consistent shots using 18.1gr JSB 5.52.
The short .177 barrel gave me +80 consistent shots using 8.4gr JSB 4.51.
The long .177 barrel gave me +90 consistent shots using 8.4gr JSB 4.51.
I kept the power around 11.5 ft/lb via the hammer spring adjuster, which was normally 2 or three clicks either way depending on which barrel I used.
With the regulator set at 120 bar the power did creep just over 12 ft/lb after it fell off the reg with all of the barrels but it is fine set at 105 bar.
Double the shot count and very little deviation compared to the original regulator. |
#!/usr/bin/env python
#coding: utf-8
import sys
from util import core
from util import adjust
from util.git import *
from argparse import ArgumentParser, SUPPRESS
# usage
# git ref b81fe395c0bf28c4be8 -> ハッシュ[b81fe395c0bf28c4be8]の[hash値]を出力
# git ref tag -> タグ[tag]の[hash値]を出力
# git ref -t b81fe395c0bf28c4be8 -> ハッシュ[b81fe395c0bf28c4be8]の[type値]を出力
# git ref b81fe395c0bf28c4be8 --file git-todo.py -> ハッシュ[b81fe395c0bf28c4be8]の[git-todo.py]の[hash値]を出力
# git ref b81fe395c0bf28c4be8 --cat-file git-todo.py -> ハッシュ[b81fe395c0bf28c4be8]の[git-todo.py]の[中身]を出力
# git ref --ls HEAD -> コミット[HEAD]の[ls-tree -r]を表示
# git ref --detail HEAD -> コミット[HEAD]の[git show]を表示
parser = ArgumentParser(prog="git ref",
description="This script can show reference hash or files easyly.")
parser.add_argument("reference", action="store",
help="Please set hash of reference.\
If you not set other options, \
script show full hash value.")
parser.add_argument("-l", "--ls", action="store_true",
help="Show all files with hash in commit.")
parser.add_argument("-t", "--type", action="store_true",
help="Show type of hash.")
parser.add_argument("-f", "--file", action="store",
help="Show file object hash in commit.")
parser.add_argument("-p", "--pretty-print", action="store", dest="pretty_print",
metavar="FILE", help="Show file contents.")
class ArgumentNamespace:
def __setattr__(self, key, value):
self.__dict__[key] = value
def __repr__(self):
return "ArgumentNamespace(%s)" % ", ".join(
[k+"='%s'"%v for k,v in vars(self).iteritems()])
def is_only_ref(self):
"""Return True if there is only reference argument"""
if args.file == None and args.type == False and args.ls == False and args.pretty_print == None:
return True
def is_only_ls(self):
"""Return True if there is only --ls option"""
if args.ls == False:
return False
if args.ls == True:
if args.file == None and args.type == False and args.pretty_print == None:
return True
else:
parser.error("this option can't use concomitantly.")
def is_only_type(self):
"""Return True if there is only --type option"""
if args.type == False:
return False
if args.type == True:
if args.file == None and args.ls == False and args.pretty_print == None:
return True
else:
parser.error("this option can't use concomitantly.")
def is_only_file(self):
"""Return True is there is only --file option"""
if args.file == None:
return False
if args.file:
if args.type == False and args.ls == False and args.pretty_print == None:
return True
else:
parser.error("this option can't use concomitantly.")
def is_only_pretty_print(self):
"""Return True is there is only --pretty-print option"""
if args.pretty_print == None:
return False
if args.pretty_print:
if args.type == False and args.ls == False and args.file == None:
return True
else:
parser.error("this option can't use concomitantly.")
def check_ref(reference):
"""This function check reference whether it's valid ref, Return True"""
try:
# Run git rev-parse --verify [ref]
verified = git("rev-parse", "--verify", reference)
except:
# If it is invalid, Call error
parser.error("invalid reference.")
sys.exit(1)
return True
def main(args):
ref = args.reference
check_ref(ref)
# User set reference only
if args.is_only_ref():
print git("rev-parse", ref)
return 0
# User set --ls
elif args.is_only_ls():
print git("ls-tree", "-r", ref)
return 0
# User set --type
elif args.is_only_type():
print git("cat-file", "-t", ref)
return 0
# User set --file
elif args.is_only_file():
body = git("ls-tree", ref, args.file)
if len(body) == 0:
parser.error("%s file does not found." % args.file)
print body.split(" ")[-1].split("\t")[0]
return 0
# User set --pretty-print
elif args.is_only_pretty_print():
body = git("ls-tree", ref, args.pretty_print)
if len(body) == 0:
parser.error("%s file does not found." % args.file)
hash_var = body.split(" ")[-1].split("\t")[0]
print git("cat-file", "-p", hash_var)
return 0
if __name__ == "__main__":
if len(sys.argv) == 1:
parser.parse_args(["-h"])
#args = parser.parse_args()
args = parser.parse_args(namespace=ArgumentNamespace())
#print args
sys.exit(main(args))
|
Izumi feels guilty about hurting Ryouma's feeling while Ryouma feels stupid for not taking the only chance he had to do things with Izumi.
The next day Izumi decides to leave Ryouma's place. Just when he was about to leave Ryouma comes. It seems that he went to the author of lalalulu and asked him to look at Izumi's manga and the author wrote a special note for him. Izumi says that he will try harder and goes back to his house with Ryouma.
At Izumi's house. Rei starts crying out of happines, because Izumi is finally back and falls a sleep afterwards (He didn't sleep for days, because he was really worried).
Before Ryouma wanted to leave, he and Izumi had a conversation. Izumi told him he was really thankful and tells Ryouma that he is like a big brother for him. Ryouma kisses Izumi saying he is nothing like a big brother and leaves. But Izumi's heart starts beating fast and he starts blushing, maybe this is love? |
from django.db import models
# Create your models here.
class Article(models.Model):
titre = models.CharField(max_length=100)
auteur = models.CharField(max_length=42)
contenu = models.TextField(null=True)
date = models.DateTimeField(auto_now_add=True, auto_now=False,
verbose_name="Date de parution")
image_couverture = models.ImageField(upload_to="images_articles/")
image1 = models.ImageField(upload_to="images_articles/", blank=True)
image2 = models.ImageField(upload_to="images_articles/", blank=True)
image3 = models.ImageField(upload_to="images_articles/", blank=True)
image4 = models.ImageField(upload_to="images_articles/", blank=True)
image5 = models.ImageField(upload_to="images_articles/", blank=True)
image6 = models.ImageField(upload_to="images_articles/", blank=True)
categorie = models.ForeignKey('Categorie')
def __str__(self):
return self.titre
class Categorie(models.Model):
nom = models.CharField(max_length=30)
def __str__(self):
return self.nom
class Prices(models.Model):
titre = models.CharField(max_length=50)
english_title = models.CharField(max_length=50, default="NULL")
price = models.IntegerField(default=0)
category = models.BooleanField(default=False)
def __str__(self):
return self.titre
|
As to why Should I Care?
Creating a website is much simpler as opposed to it were, as a consequence of web-site building resources such as Reddit, Weebly, blogger etc. But not just do individuals sites allow you to build the blog, they host it to suit your needs likewise. Nice job! Additionally, internet sites can also be highly favorable on the internet algorithms in order that they have a tendency to standing well inside SEARCH ENGINES (web advertising pages). A high look ranking will support benefit your website considering that more followers = more sales. Simple.
Domains parking is really url “monetization” business. Domain automobile may be the straight forward concept of having a url that you just unique, placing it which includes a coordinate, and preparing a website full of advertising campaigns. If a person should go wrong with “stumble-upon” web site, it will have affiliated content (just for the url of your website) that would make them everything that they are looking for. Url Parking is definitely an selling practise used mostly through domain registrars and net advertising editors to earn revenue from type-in people to an blank domain. The domain name will typically resolve to your document containing straight up AdSense ads or links to make sure you web products.
Glance, I merely said… The most important two ploys illustrate a great attribute prevalent among crypto-prescription ploys. Come up with them as single-spaced ideas. Like single-spaced formatting, your single-spaced strategy denies all room notebook read and write associated with the lines. If challenged (“My, Jeremy, you’re awfully bossy! “) I can slide away through claiming the fact that this is is at the language themselves, the fact that my personal orchestrated tone and body language could be 100 % ignored. “Hey, don’t try notebook read on the topic of the creases, I merely said smoking cigarettes shortens expected life (or whatever). ” That should close up them up. |
import logging
from behave import given, when, then
from penfold import PenfoldPluginManager
from penfold import InputOutputFactory
log = logging.getLogger(__name__)
@then(u'execute the plugin')
def execute_the_plugin(context):
input_name1, input_value1 = context._first_input
context._plugin.set_input_value(input_name1, input_value1)
if context._second_input is not None:
input_name2, input_value2 = context._second_input
context._plugin.set_input_value(input_name2, input_value2)
context._plugin.execute()
context._output_values = context._plugin.output_values
@given(u'the name "{name}" of a valid plugin')
def the_name_name__of_a_valid_plugin(context, name):
context._first_input = None
context._second_input = None
context._plugin_name = name
app = PenfoldPluginManager()
plugin = app.get_plugin_by_name(context._plugin_name)
context._plugin = plugin
@then(u'test that the output named "{output_name}" has the value "{output_value}"')
def test_that_the_output_named_outputname_has_the_value_outputvalue(context, output_name, output_value):
log.debug("output={}".format(context._output_values[output_name]))
assert str(context._output_values[output_name]) == output_value
@given(u'the value "{value}" of the first input "{name}"')
def the_value_value_of_the_first_input_input(context, name, value):
context._first_input = (name, value)
@given(u'the value "{value}" of the second input "{name}"')
def the_value_of_the_second_input(context, value, name):
context._second_input = (name, value)
@when(u'a set if input values is set')
def a_set_if_input_values_is_set(context):
iof = InputOutputFactory()
all_input_rows = []
for row in context.table:
input_set = []
for i in range(len(row)):
input_set.append(iof.create(row.headings[i], row[i]))
all_input_rows.append(input_set)
context._input_rows = all_input_rows
|
Gail Herrington has lived in Santa Clarita for most of her life, experiencing its growth first hand. She has seen the valley evolve from a region of expansive farm and ranch land to where it stands today. This unique perspective enables Gail to have a clear view of the economic influences of the region, and its dynamic market.
A forward-thinking Real Estate Broker and experienced financial analyst, Gail can distill complex financial data into a clear and actionable strategic plan. This enables her to identify investment properties and prepare pro forma financial modeling for those desiring such services, or give confidence to the home buyer that their most cherished investment is a smart one.
Gail holds a Bachelor’s degree in Business, with a specialty in Real Estate, from California State University, Northridge and an Associate’s degree in Accounting. She also has first-hand experience in diverse environments, such as investment management, lending, and business-to-consumer transactions, all of which make her a unique and qualified Broker.
After living in several areas in the valley over the years and raising her children within its boundaries, Gail has an appreciation for those looking for a cherished family home. She has an innate understanding for the needs of her clients and the commitment to help them reach their goals. When working with first-time homebuyers and investors, Gail will always be professional, dedicated, and authentic. Most importantly, she is dedicated to helping her clients meet their needs! |
#!/usr/bin/python
#
"""
LightLayers module for Holiday by Moorescloud
Copyright (c) 2013, Wade Bowmer
License: ..
"""
__author__ = 'Wade Bowmer'
__version__ = '0.01-dev'
__license__ = 'MIT'
import time
import colours
class LightLayer:
remote = False
addr = ''
NUM_GLOBES = 50
stream = { }
current_time = 0
furthest_edge = 0
time_step = 50 # milli-seconds
stream = { }
def __init__(self, remote=False, addr=''):
"""Remote mode only, at the moment."""
if remote:
self.remote = True
self.addr = addr
self.stream[self.current_time] = [ [0, 0, 0, 0] for g in range(self.NUM_GLOBES) ] # red green blue transparency
def setcolour(self, col):
"""Generic colour checks. Also does a lookup if you've provided a name."""
if type(col) is list and len(col) == 3:
return [ self.limit(col[i], 0, 0xff) for i in [0, 1, 2] ]
if type(col) is str:
col = col.lower().replace(' ','')
if col in colours.colourMap:
return list(colours.colourMap[col])
return False
def limit(self, value, bottom, top):
"""Helper function to range limit values."""
return max(bottom, min(value, top))
def setglobe(self, globe, col, trans=100):
"""Set a single globe"""
self.setblock(globe, globe, col, trans)
def setblock(self, globe_start, globe_end, col, trans=100):
"""Set a range of lights to the same colour. If you want to _not_ set the transparancy, use gradient()."""
col = self.setcolour(col)
if col:
col.append(self.limit(trans, 0, 100))
globe_start = self.limit(globe_start, 0, self.NUM_GLOBES-1)
globe_end = self.limit(globe_end, 0, self.NUM_GLOBES-1)
for g in range(globe_start, globe_end+1):
self.stream[self.current_time][g] = list(col)
def ramp(self, globe_start, globe_end, first_time, overlap=100, mode="up"): # aka raise
"""Set an increasing brightness/transparency ramp
This call does NOT set colour.
first_time is how long the first light will take in milliseconds
overlap is the percentage overlap subsequent lights will take. Overlap of 100 will bring them all up at once.
"""
globe_start = self.limit(globe_start, 0, self.NUM_GLOBES-1)
globe_end = self.limit(globe_end, 0, self.NUM_GLOBES-1)
overlap = self.limit(overlap, 0, 100)
if first_time == 0:
for g in range(globe_start, globe_end+1):
self.stream[current_time][g][3] = 0xff
else:
time_advance = first_time
time_overlap = int(time_advance * (100 - overlap)/100 / self.time_step) * self.time_step
gtime = self.current_time
first_time = float(first_time)
for g in range(globe_start, globe_end+1):
# print "Setting %d from %d" % (g, gtime)
self.fill_to(gtime + time_advance)
t = gtime
while t <= gtime + time_advance:
# print "Setting %f:%d to %f" % (t, g, (t - gtime) / first_time)
if mode == "down":
self.stream[t][g][3] = int(0xff - (t - gtime) / first_time * 0xff)
else:
self.stream[t][g][3] = int((t - gtime) / first_time * 0xff)
t += self.time_step
gtime = gtime + time_overlap
return
def gradient(self, globe_start, globe_end, colour_from, colour_to):
"""Set a gradient across a section of lights.
"""
globe_start = self.limit(globe_start, 0, self.NUM_GLOBES-1)
globe_end = self.limit(globe_end, 0, self.NUM_GLOBES-1)
span = globe_end - globe_start
colour_from = self.setcolour(colour_from)
colour_to = self.setcolour(colour_to)
here = self.current_time + 0
g = globe_start
while g <= globe_end:
factor = (g - globe_start)*100 / span
unfactor = float(100 - factor) / 100
factor = float(factor) / 100
# print "Wash of %f:%f" % (factor, unfactor)
self.stream[here][g] = [
int(colour_from[0] * unfactor + colour_to[0] * factor),
int(colour_from[1] * unfactor + colour_to[1] * factor),
int(colour_from[2] * unfactor + colour_to[2] * factor),
self.stream[self.current_time][g][3] ]
t = here
while t <= self.furthest_edge:
self.stream[t][g] = [ self.stream[here][g][0], self.stream[here][g][1], self.stream[here][g][2], self.stream[t][g][3] ]
t += self.time_step
g += 1
def wash(self, globe_start, globe_end, steps, delay, start_from, colour_list):
"""Set a moving gradient."""
globe_start = self.limit(globe_start, 0, self.NUM_GLOBES-1)
globe_end = self.limit(globe_end, 0, self.NUM_GLOBES-1)
if delay < 0:
delay = 0
# Setup the raw colours
colours = [ ]
c = self.setcolour(colour_list.pop())
while c and len(colour_list) > 1:
d = max(0, colour_list.pop())
from_c = c
c = self.setcolour(colour_list)
if c:
x = 0
while x < d:
factor = (x - d)*100 / d
unfactor = float(100 - factor)/100
factor = float(factor)/100
colours.append( [
int(from_c[0] * unfactor + c[0] * factor),
int(from_c[1] * unfactor + c[1] * factor),
int(from_c[2] * unfactor + c[2] * factor) ])
x += 1
if c:
colours.append(c)
# Now paint them
span = globe_end - globe_start
here = self.current_time + 0
self.fill_to(here + steps * delay)
inner_step = delay
while steps > 0:
c = start_from
for g in range(globe_start, globe_end+1):
self.stream[here][g] = [ colours[c][0], colours[c][1], colours[c][2], self.stream[here][g][3] ]
c += 1
inner_step -= self.time_step
if inner_step <= 0:
inner_step = delay
steps -= 1
start_from += 1
here += self.time_step
def rotate(self, globe_start, globe_end, steps, distance, delay):
"""Rotate the colours of a subset of globes."""
globe_start = self.limit(globe_start, 0, self.NUM_GLOBES-1)
globe_end = self.limit(globe_end, 0, self.NUM_GLOBES-1)
span = globe_end - globe_start
if delay < 0:
delay = 0
if distance == 0:
return # whoops nothing to do!
colours = []
for g in range(globe_start, globe_end+1):
colours.append([ self.stream[self.current_time][g][0], self.stream[self.current_time][g][1], self.stream[self.current_time][g][2] ])
self.fill_to(self.current_time + steps * delay)
# while steps > 0:
# def shift(self,
def wait(self, delay=False):
"""Move the "current" time forward by this amount in milliseconds.
Called without an argument will move 'now' to the latest that's been recorded.
"""
if delay == False:
self.current_time = self.furthest_edge
else:
distance = self.current_time + delay
self.fill_to(distance)
self.current_time = distance
def fill_to(self, target):
"""Extends the light storage forward in time, copying the most-recent values.
Calling with a target before the furthest extent will do nothing.
"""
here = self.furthest_edge
# print "Filling %d to %d" % (here, target)
current_globes = self.stream[here]
self.furthest_edge += self.time_step
while self.furthest_edge <= target:
self.stream[self.furthest_edge] = [ current_globes[g][:] for g in range(self.NUM_GLOBES) ]
self.furthest_edge += self.time_step
self.furthest_edge -= self.time_step
return
def go(self):
"""This is intended for debugging."""
t = 0
times = self.stream.keys()
times.sort()
for t in times:
print "%d: " % t,
for g in self.stream[t]:
if g[3] > 0:
print '%02x%02x%02x_%d' % (g[0], g[1], g[2], g[3]),
else:
print '-',
print
def render(self):
self.render_rest()
def render_udp(self):
"""Renders the output to a Holiday device using UDP
Local rendering is currently not supported.
"""
t = 0
delay = float(self.time_step) / 1000 / 2
"""The render routine sends out a UDP packet using the SecretAPI"""
if (self.remote == True):
import socket, array
port = 9988
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP
while t < self.current_time:
packet = array.array('B', [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) # initialize basic packet, ignore first 10 bytes
for c in self.stream[t]:
if c[3] == 0xff:
packet.append(c[0])
packet.append(c[1])
packet.append(c[2])
else:
packet.append((c[0] * c[3])>> 8)
packet.append((c[1] * c[3])>> 8)
packet.append((c[2] * c[3])>> 8)
sock.sendto(packet, (self.addr, port))
time.sleep(delay)
t += self.time_step
else:
self.go()
def render_rest(self):
"""Renders the output to a Holiday device using HTTP/REST
Local rendering is currently not supported.
"""
t = 0
delay = float(self.time_step) / 1000 / 2
if (self.remote == True):
import requests, json
while t < self.current_time:
globes = []
for c in self.stream[t]:
if c[3] == 100:
globes.append("#%02x%02x%02x" % (c[0], c[1], c[2]))
else:
globes.append("#%02x%02x%02x" % ((c[0] * c[3])>> 8, (c[1] * c[3])>> 8, (c[2] * c[3])>> 8))
message = json.dumps({ "lights": globes })
r = requests.put('http://%s/iotas/0.1/device/moorescloud.holiday/localhost/setlights' % self.addr, data=message)
time.sleep(delay)
t += self.time_step
else:
self.go()
if __name__ == '__main__':
layer = LightLayer(remote=False)
print layer
|
ICHS is calling upon state lawmakers to protect Basic Health, which is a state safety net that provides medical insurance to low-income people not eligible for Medicaid. ICHS also backs state support for multilingual interpretation for patients and other health services.
As of Sept. 30, 2011, ICHS had 2,284 patients who are Basic Health enrollees. If Basic Health is eliminated in January 2012, ICHS would see a projected shortfall of about $800,000. This takes into account projected revenue for 2012.
If Basic Health dollars from Olympia are cut, these 2,284 people would become uninsured “self-pay” patients. As a federally qualified health care center, ICHS does not decline services to people.
In the year-to-date period for 2011, 30 percent of medical and dental patients at ICHS are uninsured. In 2010, about 20 percent of ICHS patients were medically uninsured and 16 percent were Basic Health enrollees. The rate difference for the medically uninsured, from 20 percent to 36 percent, is 80 percent.
For more information, visit www.ichs.com. |
# System
import json,re
# SBaaS
from .stage01_quantification_peakInformation_query import stage01_quantification_peakInformation_query
from .stage01_quantification_MQResultsTable_query import stage01_quantification_MQResultsTable_query
# Resources
from io_utilities.base_importData import base_importData
from io_utilities.base_exportData import base_exportData
from matplotlib_utilities.matplot import matplot
from SBaaS_base.sbaas_template_io import sbaas_template_io
from ddt_python.ddt_container import ddt_container
class stage01_quantification_peakInformation_io(stage01_quantification_peakInformation_query,
stage01_quantification_MQResultsTable_query,
sbaas_template_io):
def export_scatterLinePlot_peakInformation_matplot(self,experiment_id_I,sample_names_I=[],
sample_types_I=['Standard'],
component_names_I=[],
peakInfo_I = ['retention_time'],
acquisition_date_and_time_I=[None,None],
x_title_I='Time [hrs]',y_title_I='Retention Time [min]',y_data_type_I='acquisition_date_and_time',
plot_type_I='single',
filename_O = 'tmp',
figure_format_O = 'png'):
'''Analyze retention-time, height, s/n, and assymetry'''
#INPUT:
# experiment_id_I
# sample_names_I
# sample_types_I
# component_names_I
# peakInfo_I
# acquisition_date_and_time_I = ['%m/%d/%Y %H:%M','%m/%d/%Y %H:%M']
# y_data_type_I = 'acquisition_date_and_time' or 'count'
# plot_type_I = 'single', 'multiple', or 'sub'
print('export_peakInformation...')
#TODO: remove after refactor
mplot = matplot();
#convert string date time to datetime
# e.g. time.strptime('4/15/2014 15:51','%m/%d/%Y %H:%M')
acquisition_date_and_time = [];
if acquisition_date_and_time_I and acquisition_date_and_time_I[0] and acquisition_date_and_time_I[1]:
for dateandtime in acquisition_date_and_time_I:
time_struct = strptime(dateandtime,'%m/%d/%Y %H:%M')
dt = datetime.fromtimestamp(mktime(time_struct))
acquisition_date_and_time.append(dt);
else: acquisition_date_and_time=[None,None]
data_O = [];
component_names_all = [];
# get sample names
if sample_names_I and sample_types_I and len(sample_types_I)==1:
sample_names = sample_names_I;
sample_types = [sample_types_I[0] for sn in sample_names];
else:
sample_names = [];
sample_types = [];
for st in sample_types_I:
sample_names_tmp = [];
sample_names_tmp = self.get_sampleNames_experimentIDAndSampleType(experiment_id_I,st);
sample_names.extend(sample_names_tmp);
sample_types_tmp = [];
sample_types_tmp = [st for sn in sample_names_tmp];
sample_types.extend(sample_types_tmp);
for sn in sample_names:
print('analyzing peakInformation for sample_name ' + sn);
# get sample description
desc = {};
desc = self.get_description_experimentIDAndSampleID_sampleDescription(experiment_id_I,sn);
# get component names
if component_names_I:
component_names = component_names_I;
else:
component_names = [];
component_names = self.get_componentsNames_experimentIDAndSampleName(experiment_id_I,sn);
component_names_all.extend(component_names);
for cn in component_names:
# get rt, height, s/n
sst_data = {};
sst_data = self.get_peakInfo_sampleNameAndComponentName(sn,cn,acquisition_date_and_time);
if sst_data:
tmp = {};
tmp.update(sst_data);
tmp.update(desc);
tmp.update({'sample_name':sn});
data_O.append(tmp);
# Plot data over time
if component_names_I:
# use input order
component_names_unique = component_names_I;
else:
# use alphabetical order
component_names_unique = list(set(component_names_all));
component_names_unique.sort();
if plot_type_I == 'single':
for cn in component_names_unique:
data_parameters = {};
data_parameters_stats = {};
for parameter in peakInfo_I:
data_parameters[parameter] = [];
acquisition_date_and_times = [];
acquisition_date_and_times_hrs = [];
sample_names_parameter = [];
sample_types_parameter = [];
component_group_name = None;
for sn_cnt,sn in enumerate(sample_names):
for d in data_O:
if d['sample_name'] == sn and d['component_name'] == cn and d[parameter]:
data_parameters[parameter].append(d[parameter]);
acquisition_date_and_times.append(d['acquisition_date_and_time'])
acquisition_date_and_times_hrs.append(d['acquisition_date_and_time'].year*8765.81277 + d['acquisition_date_and_time'].month*730.484 + d['acquisition_date_and_time'].day*365.242 + d['acquisition_date_and_time'].hour + d['acquisition_date_and_time'].minute / 60. + d['acquisition_date_and_time'].second / 3600.); #convert using datetime object
sample_names_parameter.append(sn);
sample_types_parameter.append(sample_types[sn_cnt])
component_group_name = d['component_group_name'];
# normalize time
acquisition_date_and_times_hrs.sort();
t_start = min(acquisition_date_and_times_hrs);
for t_cnt,t in enumerate(acquisition_date_and_times_hrs):
if y_data_type_I == 'acquisition_date_and_time':acquisition_date_and_times_hrs[t_cnt] = t - t_start;
elif y_data_type_I == 'count':acquisition_date_and_times_hrs[t_cnt] = t_cnt;
title = cn + '\n' + parameter;
filename = filename_O + '_' + experiment_id_I + '_' + cn + '_' + parameter + figure_format_O;
mplot.scatterLinePlot(title,x_title_I,y_title_I,acquisition_date_and_times_hrs,data_parameters[parameter],fit_func_I='lowess',show_eqn_I=False,show_r2_I=False,filename_I=filename,show_plot_I=False);
if plot_type_I == 'multiple':
for parameter in peakInfo_I:
data_parameters = [];
acquisition_date_and_times = [];
acquisition_date_and_times_hrs = [];
sample_names_parameter = [];
sample_types_parameter = [];
component_group_names = [];
component_names = [];
for cn_cnt,cn in enumerate(component_names_unique):
data = [];
acquisition_date_and_time = [];
acquisition_date_and_time_hrs = [];
sample_name_parameter = [];
sample_type_parameter = [];
for sn_cnt,sn in enumerate(sample_names):
for d in data_O:
if d['sample_name'] == sn and d['component_name'] == cn and d[parameter]:
data.append(d[parameter])
acquisition_date_and_time.append(d['acquisition_date_and_time'])
acquisition_date_and_time_hrs.append(d['acquisition_date_and_time'].year*8765.81277 + d['acquisition_date_and_time'].month*730.484 + d['acquisition_date_and_time'].day*365.242 + d['acquisition_date_and_time'].hour + d['acquisition_date_and_time'].minute / 60. + d['acquisition_date_and_time'].second / 3600.); #convert using datetime object
sample_name_parameter.append(sn);
sample_type_parameter.append(sample_types[sn_cnt])
if sn_cnt == 0:
component_group_names.append(d['component_group_name']);
component_names.append(d['component_name']);
# normalize time
acquisition_date_and_time_hrs.sort();
t_start = min(acquisition_date_and_time_hrs);
for t_cnt,t in enumerate(acquisition_date_and_time_hrs):
if y_data_type_I == 'acquisition_date_and_time':acquisition_date_and_time_hrs[t_cnt] = t - t_start;
elif y_data_type_I == 'count':acquisition_date_and_time_hrs[t_cnt] = t_cnt;
data_parameters.append(data);
acquisition_date_and_times.append(acquisition_date_and_time)
acquisition_date_and_times_hrs.append(acquisition_date_and_time_hrs);
sample_names_parameter.append(sample_name_parameter);
sample_types_parameter.append(sample_type_parameter)
title = parameter;
filename = filename_O + '_' + experiment_id_I + '_' + parameter + figure_format_O;
mplot.multiScatterLinePlot(title,x_title_I,y_title_I,acquisition_date_and_times_hrs,data_parameters,data_labels_I=component_group_names,fit_func_I=None,show_eqn_I=False,show_r2_I=False,filename_I=filename,show_plot_I=False);
def export_scatterLinePlot_peakResolution_matplot(self,experiment_id_I,sample_names_I=[],sample_types_I=['Standard'],component_name_pairs_I=[],
peakInfo_I = ['rt_dif','resolution'],
acquisition_date_and_time_I=[None,None],
x_title_I='Time [hrs]',y_title_I='Retention Time [min]',y_data_type_I='acquisition_date_and_time',
plot_type_I='single'):
'''Analyze resolution for critical pairs'''
#Input:
# experiment_id_I
# sample_names_I
# sample_types_I
# component_name_pairs_I = [[component_name_1,component_name_2],...]
# acquisition_date_and_time_I = ['%m/%d/%Y %H:%M','%m/%d/%Y %H:%M']
#TODO: remove after refactor
mplot = matplot();
print('export_peakInformation_resolution...')
#convert string date time to datetime
# e.g. time.strptime('4/15/2014 15:51','%m/%d/%Y %H:%M')
acquisition_date_and_time = [];
if acquisition_date_and_time_I and acquisition_date_and_time_I[0] and acquisition_date_and_time_I[1]:
for dateandtime in acquisition_date_and_time_I:
time_struct = strptime(dateandtime,'%m/%d/%Y %H:%M')
dt = datetime.fromtimestamp(mktime(time_struct))
acquisition_date_and_time.append(dt);
else: acquisition_date_and_time=[None,None]
data_O = [];
component_names_pairs_all = [];
# get sample names
if sample_names_I and sample_types_I and len(sample_types_I)==1:
sample_names = sample_names_I;
sample_types = [sample_types_I[0] for sn in sample_names];
else:
sample_names = [];
sample_types = [];
for st in sample_types_I:
sample_names_tmp = [];
sample_names_tmp = self.get_sampleNames_experimentIDAndSampleType(experiment_id_I,st);
sample_names.extend(sample_names_tmp);
sample_types_tmp = [];
sample_types_tmp = [st for sn in sample_names_tmp];
sample_types.extend(sample_types_tmp);
for sn in sample_names:
print('analyzing peakInformation for sample_name ' + sn);
for component_name_pair in component_name_pairs_I:
# get critical pair data
cpd1 = {};
cpd2 = {};
cpd1 = self.get_peakInfo_sampleNameAndComponentName(sn,component_name_pair[0],acquisition_date_and_time);
cpd2 = self.get_peakInfo_sampleNameAndComponentName(sn,component_name_pair[1],acquisition_date_and_time);
# calculate the RT difference and resolution
rt_dif = 0.0;
rt_dif = abs(cpd1['retention_time']-cpd2['retention_time'])
resolution = 0.0;
resolution = rt_dif/(0.5*(cpd1['width_at_50']+cpd2['width_at_50']));
# record data
data_O.append({'component_name_pair':component_name_pair,
'rt_dif':rt_dif,
'resolution':resolution,
'component_group_name_pair':[cpd1['component_group_name'],cpd2['component_group_name']],
'sample_name':sn,
'acquisition_date_and_time':cpd1['acquisition_date_and_time']});
if plot_type_I == 'single':
for cnp in component_name_pairs_I:
data_parameters = {};
data_parameters_stats = {};
for parameter in peakInfo_I:
data_parameters[parameter] = [];
acquisition_date_and_times = [];
acquisition_date_and_times_hrs = [];
sample_names_parameter = [];
sample_types_parameter = [];
component_group_name_pair = None;
for sn_cnt,sn in enumerate(sample_names):
for d in data_O:
if d['sample_name'] == sn and d['component_name_pair'] == cnp and d[parameter]:
data_parameters[parameter].append(d[parameter]);
acquisition_date_and_times.append(d['acquisition_date_and_time'])
acquisition_date_and_times_hrs.append(d['acquisition_date_and_time'].year*8765.81277 + d['acquisition_date_and_time'].month*730.484 + d['acquisition_date_and_time'].day*365.242 + d['acquisition_date_and_time'].hour + d['acquisition_date_and_time'].minute / 60. + d['acquisition_date_and_time'].second / 3600.); #convert using datetime object
sample_names_parameter.append(sn);
sample_types_parameter.append(sample_types[sn_cnt])
component_group_name_pair = d['component_group_name_pair'];
# normalize time
acquisition_date_and_times_hrs.sort();
t_start = min(acquisition_date_and_times_hrs);
for t_cnt,t in enumerate(acquisition_date_and_times_hrs):
if y_data_type_I == 'acquisition_date_and_time':acquisition_date_and_times_hrs[t_cnt] = t - t_start;
elif y_data_type_I == 'count':acquisition_date_and_times_hrs[t_cnt] = t_cnt;
title = cn + '\n' + parameter;
filename = 'data/_output/' + experiment_id_I + '_' + cn + '_' + parameter + '.png'
mplot.scatterLinePlot(title,x_title_I,y_title_I,acquisition_date_and_times_hrs,data_parameters[parameter],fit_func_I='lowess',show_eqn_I=False,show_r2_I=False,filename_I=filename,show_plot_I=False);
if plot_type_I == 'multiple':
for parameter in peakInfo_I:
data_parameters = [];
acquisition_date_and_times = [];
acquisition_date_and_times_hrs = [];
sample_names_parameter = [];
sample_types_parameter = [];
component_group_names_pair = [];
component_names_pair = [];
for cnp_cnt,cnp in enumerate(component_name_pairs_I):
data = [];
acquisition_date_and_time = [];
acquisition_date_and_time_hrs = [];
sample_name_parameter = [];
sample_type_parameter = [];
for sn_cnt,sn in enumerate(sample_names):
for d in data_O:
if d['sample_name'] == sn and d['component_name_pair'] == cnp and d[parameter]:
data.append(d[parameter])
acquisition_date_and_time.append(d['acquisition_date_and_time'])
acquisition_date_and_time_hrs.append(d['acquisition_date_and_time'].year*8765.81277 + d['acquisition_date_and_time'].month*730.484 + d['acquisition_date_and_time'].day*365.242 + d['acquisition_date_and_time'].hour + d['acquisition_date_and_time'].minute / 60. + d['acquisition_date_and_time'].second / 3600.); #convert using datetime object
sample_name_parameter.append(sn);
sample_type_parameter.append(sample_types[sn_cnt])
if sn_cnt == 0:
component_group_names_pair.append(d['component_group_name_pair']);
component_names_pair.append(d['component_name_pair']);
# normalize time
acquisition_date_and_time_hrs.sort();
t_start = min(acquisition_date_and_time_hrs);
for t_cnt,t in enumerate(acquisition_date_and_time_hrs):
if y_data_type_I == 'acquisition_date_and_time':acquisition_date_and_time_hrs[t_cnt] = t - t_start;
elif y_data_type_I == 'count':acquisition_date_and_time_hrs[t_cnt] = t_cnt;
data_parameters.append(data);
acquisition_date_and_times.append(acquisition_date_and_time)
acquisition_date_and_times_hrs.append(acquisition_date_and_time_hrs);
sample_names_parameter.append(sample_name_parameter);
sample_types_parameter.append(sample_type_parameter)
# create data labels
data_labels = [];
for component_group_names in component_group_names_pair:
data_labels.append(component_group_names[0] + '/' + component_group_names[1]);
title = parameter;
filename = 'data/_output/' + experiment_id_I + '_' + parameter + '.eps'
mplot.multiScatterLinePlot(title,x_title_I,y_title_I,acquisition_date_and_times_hrs,data_parameters,data_labels_I=data_labels,fit_func_I=None,show_eqn_I=False,show_r2_I=False,filename_I=filename,show_plot_I=False);
def export_boxAndWhiskersPlot_peakInformation_matplot(self,experiment_id_I,
peakInfo_parameter_I = ['height','retention_time','width_at_50','signal_2_noise'],
component_names_I=[],
filename_O = 'tmp',
figure_format_O = '.png'):
'''generate a boxAndWhiskers plot from peakInformation table'''
#TODO: remove after refactor
mplot = matplot();
print('export_boxAndWhiskersPlot...')
if peakInfo_parameter_I:
peakInfo_parameter = peakInfo_parameter_I;
else:
peakInfo_parameter = [];
peakInfo_parameter = self.get_peakInfoParameter_experimentID_dataStage01PeakInformation(experiment_id_I);
for parameter in peakInfo_parameter:
data_plot_mean = [];
data_plot_cv = [];
data_plot_ci = [];
data_plot_parameters = [];
data_plot_component_names = [];
data_plot_data = [];
data_plot_units = [];
if component_names_I:
component_names = component_names_I;
else:
component_names = [];
component_names = self.get_componentNames_experimentIDAndPeakInfoParameter_dataStage01PeakInformation(experiment_id_I,parameter);
for cn in component_names:
print('generating boxAndWhiskersPlot for component_name ' + cn);
# get the data
data = {};
data = self.get_row_experimentIDAndPeakInfoParameterComponentName_dataStage01PeakInformation(experiment_id_I,parameter,cn)
if data and data['peakInfo_ave']:
# record data for plotting
data_plot_mean.append(data['peakInfo_ave']);
data_plot_cv.append(data['peakInfo_cv']);
data_plot_ci.append([data['peakInfo_lb'],data['peakInfo_ub']]);
data_plot_data.append(data['peakInfo_data']);
data_plot_parameters.append(parameter);
data_plot_component_names.append(data['component_group_name']);
data_plot_units.append('Retention_time [min]');
# visualize the stats:
data_plot_se = [(x[1]-x[0])/2 for x in data_plot_ci]
filename = filename_O + '_' + experiment_id_I + '_' + parameter + figure_format_O;
mplot.boxAndWhiskersPlot(data_plot_parameters[0],data_plot_component_names,data_plot_units[0],'samples',data_plot_data,data_plot_mean,data_plot_ci,filename_I=filename,show_plot_I=False);
def export_boxAndWhiskersPlot_peakResolution_matplot(self,experiment_id_I,component_name_pairs_I=[],
peakInfo_parameter_I = ['rt_dif','resolution'],
filename_O = 'tmp',
figure_format_O = '.png'):
'''generate a boxAndWhiskers plot from peakResolution table'''
#TODO: remove after refactor
mplot = matplot();
print('export_boxAndWhiskersPlot...')
if peakInfo_parameter_I:
peakInfo_parameter = peakInfo_parameter_I;
else:
peakInfo_parameter = [];
peakInfo_parameter = self.get_peakInfoParameter_experimentID_dataStage01PeakResolution(experiment_id_I);
for parameter in peakInfo_parameter:
data_plot_mean = [];
data_plot_cv = [];
data_plot_ci = [];
data_plot_parameters = [];
data_plot_component_names = [];
data_plot_data = [];
data_plot_units = [];
if component_name_pairs_I:
component_name_pairs = component_name_pairs_I;
else:
component_name_pairs = [];
component_name_pairs = self.get_componentNamePairs_experimentIDAndPeakInfoParameter_dataStage01PeakResolution(experiment_id_I,parameter);
for cn in component_name_pairs:
# get the data
data = {};
data = self.get_row_experimentIDAndPeakInfoParameterComponentName_dataStage01PeakResolution(experiment_id_I,parameter,cn)
if data and data['peakInfo_ave']:
# record data for plotting
data_plot_mean.append(data['peakInfo_ave']);
data_plot_cv.append(data['peakInfo_cv']);
data_plot_ci.append([data['peakInfo_lb'],data['peakInfo_ub']]);
data_plot_data.append(data['peakInfo_data']);
data_plot_parameters.append(parameter);
data_plot_component_names.append(data['component_group_name_pair'][0]+'/'+data['component_group_name_pair'][0]);
data_plot_units.append('Retention_time [min]');
# visualize the stats:
data_plot_se = [(x[1]-x[0])/2 for x in data_plot_ci]
filename = filename_O + '_' + experiment_id_I + '_' + parameter + figure_format_O;
mplot.boxAndWhiskersPlot(data_plot_parameters[0],data_plot_component_names,data_plot_units[0],'samples',data_plot_data,data_plot_mean,data_plot_ci,filename_I=filename,show_plot_I=False);
def export_boxAndWhiskersPlot_peakInformation_js(
self,
experiment_id_I=[],
analysis_id_I=[],
sample_name_abbreviations_I=[],
component_names_I=[],
component_group_names_I=[],
peakInfo_I = ['height','retention_time','width_at_50','signal_2_noise'],
data_dir_I='tmp'):
'''Export data for a box and whiskers plot from peakInformation
INPUT:
#TODO add in template for box and whiskers plot from stats
'''
print('export_boxAndWhiskersPlot...')
data_O = [];
#if peakInfo_parameter_I:
# peakInfo_parameter = peakInfo_parameter_I;
#else:
# peakInfo_parameter = [];
# peakInfo_parameter = self.get_peakInfoParameter_experimentID_dataStage01PeakInformation(experiment_id_I);
#for parameter in peakInfo_parameter:
# if component_names_I:
# component_names = component_names_I;
# else:
# component_names = [];
# component_names = self.get_componentNames_experimentIDAndPeakInfoParameter_dataStage01PeakInformation(experiment_id_I,parameter);
# for cn in component_names:
# print('generating boxAndWhiskersPlot for component_name ' + cn);
# # get the data
# row = [];
# row = self.get_row_experimentIDAndPeakInfoParameterComponentName_dataStage01PeakInformation(experiment_id_I,parameter,cn);
# if row:
# #TODO: fix type in database 'acqusition_date_and_times'
# tmp_list = [];
# for d in row['acqusition_date_and_times']:
# tmp = None;
# tmp = self.convert_datetime2string(d);
# tmp_list.append(tmp);
# row['acqusition_date_and_times'] = tmp_list;
# row['component_name'] = re.escape(row['component_name']);
# data_O.append(row);
data_O = self.get_row_analysisID_dataStage01PeakInformation(
analysis_id_I=analysis_id_I,
experiment_id_I=experiment_id_I,
peakInfo_parameter_I=peakInfo_I,
component_name_I=component_names_I,
component_group_name_I=component_group_names_I,
sample_name_abbreviation_I=sample_name_abbreviations_I
)
# dump chart parameters to a js files
data1_keys = ['experiment_id',
'component_group_name',
'component_name',
'peakInfo_parameter',
#'peakInfo_ave',
#'peakInfo_cv',
#'peakInfo_lb',
#'peakInfo_ub',
#'peakInfo_units',
'sample_name_abbreviation',
#'sample_names',
#'sample_types',
#'acqusition_date_and_times'
];
data1_nestkeys = ['component_name'];
data1_keymap = {'xdata':'component_name',
'ydatamean':'peakInfo_ave',
'ydatalb':'peakInfo_lb',
'ydataub':'peakInfo_ub',
#'ydatamin':None,
#'ydatamax':None,
#'ydataiq1':None,
#'ydataiq3':None,
#'ydatamedian':None,
'serieslabel':'peakInfo_parameter',
'featureslabel':'component_name'};
# make the data object
dataobject_O = [{"data":data_O,"datakeys":data1_keys,"datanestkeys":data1_nestkeys}];
# make the tile parameter objects
formtileparameters_O = {'tileheader':'Filter menu','tiletype':'html','tileid':"filtermenu1",'rowid':"row1",'colid':"col1",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-4"};
formparameters_O = {'htmlid':'filtermenuform1',"htmltype":'form_01',"formsubmitbuttonidtext":{'id':'submit1','text':'submit'},"formresetbuttonidtext":{'id':'reset1','text':'reset'},"formupdatebuttonidtext":{'id':'update1','text':'update'}};
formtileparameters_O.update(formparameters_O);
svgparameters_O = {"svgtype":'boxandwhiskersplot2d_02',"svgkeymap":[data1_keymap],
'svgid':'svg1',
"svgmargin":{ 'top': 50, 'right': 150, 'bottom': 50, 'left': 50 },
"svgwidth":500,"svgheight":350,
"svgx1axislabel":"component_name",
"svgy1axislabel":"parameter_value",
'svgformtileid':'filtermenu1','svgresetbuttonid':'reset1','svgsubmitbuttonid':'submit1'};
svgtileparameters_O = {'tileheader':'Custom box and whiskers plot',
'tiletype':'svg',
'tileid':"tile2",
'rowid':"row1",
'colid':"col2",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-8"};
svgtileparameters_O.update(svgparameters_O);
tableparameters_O = {"tabletype":'responsivetable_01',
'tableid':'table1',
"tablefilters":None,
"tableclass":"table table-condensed table-hover",
'tableformtileid':'filtermenu1','tableresetbuttonid':'reset1','tablesubmitbuttonid':'submit1'};
tabletileparameters_O = {'tileheader':'peakInformation','tiletype':'table','tileid':"tile3",'rowid':"row2",'colid':"col1",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-12"};
tabletileparameters_O.update(tableparameters_O);
parametersobject_O = [formtileparameters_O,svgtileparameters_O,tabletileparameters_O];
tile2datamap_O = {"filtermenu1":[0],"tile2":[0],"tile3":[0]};
# dump the data to a json file
ddtutilities = ddt_container(parameters_I = parametersobject_O,data_I = dataobject_O,tile2datamap_I = tile2datamap_O,filtermenu_I = None);
if data_dir_I=='tmp':
filename_str = self.settings['visualization_data'] + '/tmp/ddt_data.js'
elif data_dir_I=='data_json':
data_json_O = ddtutilities.get_allObjects_js();
return data_json_O;
with open(filename_str,'w') as file:
file.write(ddtutilities.get_allObjects());
def export_boxAndWhiskersPlot_peakResolution_js(self,experiment_id_I,
component_name_pairs_I=[],
peakInfo_parameter_I = ['rt_dif','resolution'],
data_dir_I='tmp'):
'''Export data for a box and whiskers plot'''
print('export_boxAndWhiskersPlot...')
data_O=[];
if peakInfo_parameter_I:
peakInfo_parameter = peakInfo_parameter_I;
else:
peakInfo_parameter = [];
peakInfo_parameter = self.get_peakInfoParameter_experimentID_dataStage01PeakResolution(experiment_id_I);
for parameter in peakInfo_parameter:
if component_name_pairs_I:
component_name_pairs = component_name_pairs_I;
else:
component_name_pairs = [];
component_name_pairs = self.get_componentNamePairs_experimentIDAndPeakInfoParameter_dataStage01PeakResolution(experiment_id_I,parameter);
for cn in component_name_pairs:
# get the data
row = {};
row = self.get_row_experimentIDAndPeakInfoParameterComponentName_dataStage01PeakResolution(experiment_id_I,parameter,cn)
if row and row['peakInfo_ave']:
#TODO: fix type in database 'acqusition_date_and_times'
tmp_list = [];
for d in row['acqusition_date_and_times']:
tmp = None;
tmp = self.convert_datetime2string(d);
tmp_list.append(tmp);
row['acqusition_date_and_times'] = tmp_list;
data_O.append(row);
# dump chart parameters to a js files
data1_keys = ['experiment_id',
'component_group_name_pair',
'component_name_pair',
'peakInfo_parameter',
#'peakInfo_ave',
#'peakInfo_cv',
#'peakInfo_lb',
#'peakInfo_ub',
#'peakInfo_units',
'sample_names',
'sample_types',
#'acqusition_date_and_times'
];
data1_nestkeys = ['component_name_pair'];
data1_keymap = {'xdata':'component_name_pair',
'ydatamean':'peakInfo_ave',
'ydatalb':'peakInfo_lb',
'ydataub':'peakInfo_ub',
#'ydatamin':None,
#'ydatamax':None,
#'ydataiq1':None,
#'ydataiq3':None,
#'ydatamedian':None,
'serieslabel':'peakInfo_parameter',
'featureslabel':'component_name_pair'};
# make the data object
dataobject_O = [{"data":data_O,"datakeys":data1_keys,"datanestkeys":data1_nestkeys}];
# make the tile parameter objects
formtileparameters_O = {'tileheader':'Filter menu','tiletype':'html','tileid':"filtermenu1",'rowid':"row1",'colid':"col1",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-4"};
formparameters_O = {'htmlid':'filtermenuform1',"htmltype":'form_01',"formsubmitbuttonidtext":{'id':'submit1','text':'submit'},"formresetbuttonidtext":{'id':'reset1','text':'reset'},"formupdatebuttonidtext":{'id':'update1','text':'update'}};
formtileparameters_O.update(formparameters_O);
svgparameters_O = {"svgtype":'boxandwhiskersplot2d_01',"svgkeymap":[data1_keymap],
'svgid':'svg1',
"svgmargin":{ 'top': 50, 'right': 150, 'bottom': 50, 'left': 50 },
"svgwidth":500,"svgheight":350,
"svgx1axislabel":"component_name_pair","svgy1axislabel":"parameter_value",
'svgformtileid':'filtermenu1','svgresetbuttonid':'reset1','svgsubmitbuttonid':'submit1'};
svgtileparameters_O = {'tileheader':'Custom box and whiskers plot','tiletype':'svg','tileid':"tile2",'rowid':"row1",'colid':"col2",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-8"};
svgtileparameters_O.update(svgparameters_O);
tableparameters_O = {"tabletype":'responsivetable_01',
'tableid':'table1',
"tablefilters":None,
"tableclass":"table table-condensed table-hover",
'tableformtileid':'filtermenu1','tableresetbuttonid':'reset1','tablesubmitbuttonid':'submit1'};
tabletileparameters_O = {'tileheader':'peakResolution','tiletype':'table','tileid':"tile3",'rowid':"row2",'colid':"col1",
'tileclass':"panel panel-default",'rowclass':"row",'colclass':"col-sm-12"};
tabletileparameters_O.update(tableparameters_O);
parametersobject_O = [formtileparameters_O,svgtileparameters_O,tabletileparameters_O];
tile2datamap_O = {"filtermenu1":[0],"tile2":[0],"tile3":[0]};
# dump the data to a json file
ddtutilities = ddt_container(parameters_I = parametersobject_O,data_I = dataobject_O,tile2datamap_I = tile2datamap_O,filtermenu_I = None);
if data_dir_I=='tmp':
filename_str = self.settings['visualization_data'] + '/tmp/ddt_data.js'
elif data_dir_I=='data_json':
data_json_O = ddtutilities.get_allObjects_js();
return data_json_O;
with open(filename_str,'w') as file:
file.write(ddtutilities.get_allObjects());
|
CNN/Stylemagazine.com Newswire | 3/28/2018, 6:23 a.m.
(CNN) -- Searching for the cheapest airfare may be the most popular way of deciding on a flight, but secret extras baked right into the ticket may bring extra value to your booking, no matter how much you paid.
Follow these three lessons to ensure that you're maximizing the value of a travel booking for more than just the flight alone.
A single letter of the alphabet lets the airline know where in the hierarchy of airfares your ticket ranks.
This is known as the fare basis code, and though passengers typically overlook it, this miniscule mark is the key to unlocking important information regarding your booking, including how easy it is to make a change to the ticket, how eligible you are for an upgrade, and how many frequent flier miles may be earned.
Airlines do not make it easy to research fare codes, as they are often in the fine print, but taking an extra minute to locate and research what it means for your ticket can make all the difference.
Matthew Ma, co-founder of airfare sale website The Flight Deal, is an advocate for understanding fare codes, and makes a point of including the code information with each deal post.
As Ma tells CNN Travel: "Knowing the fare code means knowing whether or not the ticket you just purchased will earn frequent flier miles, which help you to see more of the world for less."
Ma cites a flight he booked to Jordan as an example.
"Most of Royal Jordanian's cheap fares are 'R' or 'N' fare codes, which earn no frequent flier miles in [its alliance partner] American Airlines' program," he says.
"When the airline offered a sale with higher 'L' fares on offer, I booked that, and I'll earn American Airlines miles. This means miles for either future travel and upgrades, and a chance at elite status for further airline perks."
While deciphering a fare code and what it means for your ticket may take a bit of squinting at your computer screen, there's always the option to call the airline and have an agent explain the fare code and fare rules in plain terms.
They'll also be able to advise on higher codes for the same flight, for a little more money, but potentially yielding more frequent flier miles and fewer change restrictions.
In the realm of commercial air travel, there are layovers and there are stopovers.
A layover involves the typical rigamarole of getting off one flight, waiting a few hours at an interim airport, and boarding another flight to your destination. You may have time for a meal or to catch up on a good book, but layovers are hardly a vacation.
A stopover, on the other hand, is a full break between flights, where the passenger gets off of a flight at an airport, and has meaningful time, often multiple days, to explore that city before rejoining their itinerary to continue on to the true destination. |
# -*- coding: UTF-8 -*-
"""Morse Codec - morse content encoding.
This codec:
- en/decodes strings from str to str
- en/decodes strings from bytes to bytes
- decodes file content to str (read)
- encodes file content from str to bytes (write)
"""
from ._utils import *
ENCMAP = {
# letters
'a': ".-", 'b': "-...", 'c': "-.-.", 'd': "-..", 'e': ".", 'f': "..-.",
'g': "--.", 'h': "....", 'i': "..", 'j': ".---", 'k': "-.-", 'l': ".-..",
'm': "--", 'n': "-.", 'o': "---", 'p': ".--.", 'q': "--.-", 'r': ".-.",
's': "...", 't': "-", 'u': "..-", 'v': "...-", 'w': ".--", 'x': "-..-",
'y': "-.--", 'z': "--..",
# digits
'1': ".----", '2': "..---", '3': "...--", '4': "....-", '5': ".....",
'6': "-....", '7': "--...", '8': "---..", '9': "----.", '0': "-----",
# punctuation
',': "--..--", '.': ".-.-.-", ':' : "---...", '?': "..--..", '/': "-..-.",
'-': "-....-", '=' : "-...-", '(': "-.--.", ')': "-.--.-", '@' : ".--.-.",
'\'': ".----.", '_': "..--.-", '!': "-.-.--", '&': ".-...", '"': ".-..-.",
';': "-.-.-.", '$': "...-..-",
# word separator
' ' : "/",
}
DECMAP = {v: k for k, v in ENCMAP.items()}
REPLACE_CHAR = "#"
class MorseError(ValueError):
pass
class MorseDecodeError(MorseError):
pass
class MorseEncodeError(MorseError):
pass
def morse_encode(text, errors="strict"):
r = ""
for i, c in enumerate(ensure_str(text)):
try:
r += ENCMAP[c] + " "
except KeyError:
if errors == "strict":
raise MorseEncodeError("'morse' codec can't encode character "
"'{}' in position {}".format(c, i))
elif errors == "replace":
r += REPLACE_CHAR + " "
elif errors == "ignore":
continue
else:
raise ValueError("Unsupported error handling {}".format(errors))
return r[:-1], len(text)
def morse_decode(text, errors="strict"):
r = ""
for i, c in enumerate(ensure_str(text).split()):
try:
r += DECMAP[c]
except KeyError:
if errors == "strict":
raise MorseDecodeError("'morse' codec can't decode character "
"'{}' in position {}".format(c, i))
elif errors == "replace":
r += REPLACE_CHAR
elif errors == "ignore":
continue
else:
raise ValueError("Unsupported error handling {}".format(errors))
return r, len(text)
codecs.add_codec("morse", morse_encode, morse_decode)
|
The ''Improving performances through attention and self-control training'' study is seeking your participation in a research project to test training effects using brain stimulation in college students. This is a federally funded study led by Professor Yi-Yuan Tang of the Department of Psychological Sciences. The study will enable us to examine the potential mechanisms of brain stimulation training and its effects on performance and behavior.
What would I do if I participate?
The laboratory visits will take place in the English building at TTU campus. We will schedule you for brain stimulation appointments via email or call.
You will sit down a chair comfortably while receiving brain stimulation. Transcranial magnetic stimulation (TMS) is a magnetic method used to stimulate small regions of the brain. This procedure is well-defined in the extant literature. During a TMS procedure, a magnetic field generator, or "coil", is placed near your head. The coil produces small, safe stimulation in the region of the brain just under the coil via electromagnetic induction. TMS has been approved by the FDA and is widely used in many research institutions, hospitals, and community service centers. You will complete 4 sessions lasting approximately 45 minutes each. The study is expected to take no more than 4 hours.
We would like to show our appreciation for your participation by giving you $15 per hour (up to $60). If you do not complete the study, the payment will be prorated based on the time of task completed. You can also learn valuable information about how training may affect performance and behavior.
Can I quit if I become uncomfortable?
Your participation is completely voluntary, and you can choose to end the session at any time without penalty. While you will not receive the full incentive if you do not complete the study, you will receive a prorated incentive based on the time of task completed.
What are the risks and/or discomforts to me if I join this study?
There is very little risk to you for participation in this study. You may feel a slight vibration, skin irritation, dizziness, and itching under the electrode. Many individuals report experiencing no sensation from the stimulation. It is not advised to administer this stimulation to people susceptible to seizures, such as people with epilepsy. However, seizures do not seem to be a risk for healthy individuals and individuals who engage in substance use (our study population).
How are you protecting privacy?
Your file will be given a code number that will be used for identification, rather than a name. Only staff members working on this project have access to data, which will be used strictly for research purposes.
I have some questions about this study. Who can I ask?
If you have any questions about the research, please email Gavin Ueland at [email protected].
If you have any questions about your rights as a participant, or in the event of a research-related problem or concern, please contact TTU Human Research Protection at 806-742-2064. |
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# streamondemand - XBMC Plugin
# Conector para gamovideo
# http://www.mimediacenter.info/foro/viewforum.php?f=36
#------------------------------------------------------------
import re
from core import jsunpack
from core import logger
from core import scrapertools
headers = [["User-Agent","Mozilla/5.0 (Windows NT 10.0; WOW64; rv:45.0) Gecko/20100101 Firefox/45.0"]]
def test_video_exists( page_url ):
logger.info("streamondemand.servers.gamovideo test_video_exists(page_url='%s')" % page_url)
data = scrapertools.cache_page(page_url, headers=headers)
if ("File was deleted" or "Not Found") in data:
return False, "[Gamovideo] El archivo no existe o ha sido borrado"
return True, ""
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("streamondemand.servers.gamovideo get_video_url(page_url='%s')" % page_url)
data = scrapertools.cache_page(page_url,headers=headers)
packer = scrapertools.find_single_match(data,"<script type='text/javascript'>(eval.function.p,a,c,k,e,d..*?)</script>")
unpacker = jsunpack.unpack(data) if packer != "" else ""
if unpacker != "": data = unpacker
data = re.sub(r'\n|\t|\s+', '', data)
host = scrapertools.get_match(data, '\[\{image:"(http://[^/]+/)')
mediaurl = host+scrapertools.get_match(data, ',\{file:"([^"]+)"').split("=")[1]+"/v.flv"
rtmp_url = scrapertools.get_match(data, 'file:"(rtmp[^"]+)"')
playpath = scrapertools.get_match(rtmp_url, 'vod\?h=[\w]+/(.*$)')
rtmp_url = rtmp_url.split(playpath)[0]+" playpath="+playpath+" swfUrl=http://gamovideo.com/player61/jwplayer.flash.swf"
video_urls = []
video_urls.append([scrapertools.get_filename_from_url(mediaurl)[-4:]+" [gamovideo]",mediaurl])
video_urls.append(["RTMP [gamovideo]",rtmp_url])
for video_url in video_urls:
logger.info("streamondemand.servers.gamovideo %s - %s" % (video_url[0],video_url[1]))
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
# http://gamovideo.com/auoxxtvyoy
# http://gamovideo.com/h1gvpjarjv88
# http://gamovideo.com/embed-sbb9ptsfqca2-588x360.html
patronvideos = 'gamovideo.com/(?:embed-|)([a-z0-9]+)'
logger.info("streamondemand.servers.gamovideo find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[gamovideo]"
url = "http://gamovideo.com//embed-%s.html" % match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'gamovideo' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
|
All the benefits of bonafide skincare and professional slip, with a bit of flirtatious flavor. 100% natural blend of skin oils (including Hemp, Almond, Grapeseed, Apricot and Vitamin E) provides a slick, professional glide. Better yet, the Massage Oil absorbs to moisturize and condition your skin, without any greasy residue. |
# Copyright (c) 2013 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import falcon
import jsonschema
from oslo_log import log as logging
import six
from zaqar.i18n import _
from zaqar.transport import utils
from zaqar.transport.wsgi import errors
JSONObject = dict
"""Represents a JSON object in Python."""
JSONArray = list
"""Represents a JSON array in Python."""
LOG = logging.getLogger(__name__)
#
# TODO(kgriffs): Create Falcon "before" hooks adapters for these functions
#
def deserialize(stream, len):
"""Deserializes JSON from a file-like stream.
This function deserializes JSON from a stream, including
translating read and parsing errors to HTTP error types.
:param stream: file-like object from which to read an object or
array of objects.
:param len: number of bytes to read from stream
:raises HTTPBadRequest: if the request is invalid
:raises HTTPServiceUnavailable: if the http service is unavailable
"""
if len is None:
description = _(u'Request body can not be empty')
raise errors.HTTPBadRequestBody(description)
try:
# TODO(kgriffs): read_json should stream the resulting list
# of messages, returning a generator rather than buffering
# everything in memory (bp/streaming-serialization).
return utils.read_json(stream, len)
except utils.MalformedJSON as ex:
LOG.debug(ex)
description = _(u'Request body could not be parsed.')
raise errors.HTTPBadRequestBody(description)
except utils.OverflowedJSONInteger as ex:
LOG.debug(ex)
description = _(u'JSON contains integer that is too large.')
raise errors.HTTPBadRequestBody(description)
except Exception:
# Error while reading from the network/server
description = _(u'Request body could not be read.')
LOG.exception(description)
raise errors.HTTPServiceUnavailable(description)
def sanitize(document, spec=None, doctype=JSONObject):
"""Validates a document and drops undesired fields.
:param document: A dict to verify according to `spec`.
:param spec: (Default None) Iterable describing expected fields,
yielding tuples with the form of:
(field_name, value_type, default_value)
Note that value_type may either be a Python type, or the
special string '*' to accept any type. default_value is the
default to give the field if it is missing, or None to require
that the field be present.
If spec is None, the incoming documents will not be validated.
:param doctype: type of document to expect; must be either
JSONObject or JSONArray.
:raises HTTPBadRequestBody: if the request is invalid
:returns: A sanitized, filtered version of the document. If the
document is a list of objects, each object will be filtered
and returned in a new list. If, on the other hand, the document
is expected to contain a single object, that object's fields will
be filtered and the resulting object will be returned.
"""
if doctype is JSONObject:
if not isinstance(document, JSONObject):
raise errors.HTTPDocumentTypeNotSupported()
return document if spec is None else filter(document, spec)
if doctype is JSONArray:
if not isinstance(document, JSONArray):
raise errors.HTTPDocumentTypeNotSupported()
if spec is None:
return document
return [filter(obj, spec) for obj in document]
raise TypeError('doctype must be either a JSONObject or JSONArray')
def filter(document, spec):
"""Validates and retrieves typed fields from a single document.
Sanitizes a dict-like document by checking it against a
list of field spec, and returning only those fields
specified.
:param document: dict-like object
:param spec: iterable describing expected fields, yielding
tuples with the form of: (field_name, value_type). Note that
value_type may either be a Python type, or the special
string '*' to accept any type.
:raises HTTPBadRequest: if any field is missing or not an
instance of the specified type
:returns: A filtered dict containing only the fields
listed in the spec
"""
filtered = {}
for name, value_type, default_value in spec:
filtered[name] = get_checked_field(document, name,
value_type, default_value)
return filtered
def get_checked_field(document, name, value_type, default_value):
"""Validates and retrieves a typed field from a document.
This function attempts to look up doc[name], and raises
appropriate HTTP errors if the field is missing or not an
instance of the given type.
:param document: dict-like object
:param name: field name
:param value_type: expected value type, or '*' to accept any type
:param default_value: Default value to use if the value is missing,
or None to make the value required.
:raises HTTPBadRequest: if the field is missing or not an
instance of value_type
:returns: value obtained from doc[name]
"""
try:
value = document[name]
except KeyError:
if default_value is not None:
value = default_value
else:
description = _(u'Missing "{name}" field.').format(name=name)
raise errors.HTTPBadRequestBody(description)
# PERF(kgriffs): We do our own little spec thing because it is way
# faster than jsonschema.
if value_type == '*' or isinstance(value, value_type):
return value
description = _(u'The value of the "{name}" field must be a {vtype}.')
description = description.format(name=name, vtype=value_type.__name__)
raise errors.HTTPBadRequestBody(description)
def load(req):
"""Reads request body, raising an exception if it is not JSON.
:param req: The request object to read from
:type req: falcon.Request
:return: a dictionary decoded from the JSON stream
:rtype: dict
:raises HTTPBadRequestBody: if JSON could not be parsed
"""
try:
return utils.read_json(req.stream, req.content_length)
except (utils.MalformedJSON, utils.OverflowedJSONInteger):
message = 'JSON could not be parsed.'
LOG.exception(message)
raise errors.HTTPBadRequestBody(message)
# TODO(cpp-cabrera): generalize this
def validate(validator, document):
"""Verifies a document against a schema.
:param validator: a validator to use to check validity
:type validator: jsonschema.Draft4Validator
:param document: document to check
:type document: dict
:raises HTTPBadRequestBody: if the request is invalid
"""
try:
validator.validate(document)
except jsonschema.ValidationError as ex:
raise errors.HTTPBadRequestBody(
'{0}: {1}'.format(ex.args, six.text_type(ex))
)
def message_url(message, base_path, claim_id=None):
path = "/".join([base_path, 'messages', message['id']])
if claim_id:
path += falcon.to_query_str({'claim_id': claim_id})
return path
def format_message_v1(message, base_path, claim_id=None):
return {
'href': message_url(message, base_path, claim_id),
'ttl': message['ttl'],
'age': message['age'],
'body': message['body'],
}
def format_message_v1_1(message, base_path, claim_id=None):
url = message_url(message, base_path, claim_id)
res = {
'id': message['id'],
'href': url,
'ttl': message['ttl'],
'age': message['age'],
'body': message['body']
}
if message.get('checksum'):
res['checksum'] = message.get('checksum')
return res
|
Our Resort, located in the quaint, fishing and artists' commune village of Matlacha, Florida right on Matlacha Pass, is the perfect place to enjoy fishing or boating from our large dock, launch your Kayak, walk to Shops and Restaurants, or just RELAX under a swaying palm tree and watch the Bottlenose Dolphin play.
Our goal is to make all of our guests happy. At Sugar Sand Beach RV Resort we really care about the people we meet and we'd love to share our unique island atmosphere with you. We look forward to hearing from you soon! |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# Copyright (c) 2012 Michael Hull.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------
from morphforgecontrib.traces.tracetools import SpikeFinder
from morphforge.traces.eventset import EventSet
import itertools
class PopAnalSpiking(object):
@classmethod
def evset_nth_spike(cls, res, tag_selector, n, comment=None, comment_incl_nspikes=False, evset_tags=None, evset_name=None):
comment = comment or ''
if evset_tags is None:
evset_tags = []
evset_tags.extend( ['Spike', 'Event'] )
traces = [trace for trace in res.get_traces()
if tag_selector(trace)]
spike_list = [SpikeFinder.find_spikes(tr, crossingthresh=0,
firingthres=None) for tr in traces]
spike_list = [spl[n] for spl in spike_list if len(spl) > n]
comment = '%s (%dth Spike)' % (comment, n)
if comment_incl_nspikes:
comment += ' (NSpikes: %d'%len(spike_list)
spikes = EventSet(spike_list, tags=evset_tags, name=evset_name, comment=comment)
return spikes
@classmethod
def evset_first_spike(cls, **kwargs):
return cls.evset_nth_spike(n=0, **kwargs)
@classmethod
def evset_all_spikes(cls, res, tag_selector, comment=None, comment_incl_nspikes=False, evset_tags=None, evset_name=None):
if evset_tags is None:
evset_tags = []
evset_tags.extend( ['Spike', 'Event'] )
comment = comment or ''
traces = [trace for trace in res.get_traces()
if tag_selector(trace)]
spike_list = [SpikeFinder.find_spikes(tr, crossingthresh=0, firingthres=None) for tr in traces]
spike_list = list(itertools.chain(*spike_list) )
#print ' -- SL', spike_list
comment='%s (All Spike)' if not comment else comment
if comment_incl_nspikes:
comment += ' (NSpikes: %d'%len(list(spike_list) )
spikes = EventSet(spike_list, tags=evset_tags, comment=comment, name=evset_name)
return spikes
|
Good morning everyone! Hope you all had a great weekend.
I’ve upgraded PmWiki to version 2.2.57. A list of changes included in this upgrade can be found in the ChangeLog. Let me know if you notice any problems with your wiki.
This entry was posted in admin stuff on November 4, 2013 by admin. |
import re
import unidecode
from functools import wraps
from flask import session, redirect, url_for
from datetime import datetime
from werkzeug.contrib.cache import SimpleCache
from wtforms.validators import regexp
from pytz import UTC
is_name = regexp(
# not using \w since it allows for unlimited underscores
r'^[a-zA-Z0-9]+([ \-\_][a-zA-Z0-9]+)*$',
message='Field characters can only be letters and digits with one space, \
underscore or hyphen as separator.'
)
def slugify(timenow, str):
"""Return slug genereated from date and specified unicoded string."""
date = datetime.date(timenow)
unistr = unidecode.unidecode(str).lower()
title = re.sub(r'\W+', '-', unistr).strip('-')
return '%i/%i/%i/%s' % (date.year, date.month, date.day, title)
def utcnow():
return datetime.utcnow().replace(tzinfo=UTC)
cache = SimpleCache()
def cached(timeout=5 * 60, key='cached/%s'):
# ~200 req/s => ~600-800 req/s
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
cache_key = key % request.path
rv = cache.get(cache_key)
if rv is not None:
return rv
rv = f(*args, **kwargs)
cache.set(cache_key, rv, timeout=timeout)
return rv
return decorated_function
return decorator
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
logged = session.get('logged_in', None)
if not logged:
return redirect(url_for('login'))
return f(*args, **kwargs)
return decorated_function |
Pro-install has been in the Marine Window Tinting business since 1995 specializing in the best installations in the Lighthouse Point FL Marine Window Tinting industry , All of our Marine Window Tinting installs are backed by workmanship and the Marine Window Tinting manufacture's warranty for product failure ! ..
Pro-Install is a certified Marine Window Tinting installer with all of our film manufacturers so you know you have nothing to worry about with the investment you are making ! We have several different Marine Window Tinting brands for you to choose from to fit within your budget ! Call for details in Lighthouse Point FL .
Pro Install offers a complete line of Lighthouse Point FL commercial window films to help solve your energy efficiency needs. Our top Marine Window Tinting industry professionals will provide you a complete analysis on your facility. We will make recommendations from all technologies available in the Industry to best suit your needs. |
# -*- coding: utf-8 -*-
"""
maybe a better solution
http://code.djangoproject.com/wiki/CookBookThreadlocalsAndUser
"""
try:
import thread
except ImportError:
import dummy_thread as thread
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.3, 2.4 fallback.
from django.conf import settings
from pyerp.fnd.utils.version import get_svn_revision, get_version
__svnid__ = '$Id$'
__svn__ = get_svn_revision(__name__)
class GlobalManagementError(Exception):
"""
This exception is thrown when something bad happens with global
management.
"""
pass
class ThreadFndGlobal(object):
def __init__(self):
self.thread_context = {}
def get_user_id(self):
"""
取得当前线程用户ID.
"""
return self.get_attr('user_id', -1)
user_id = property(get_user_id)
def get_user(self):
"""
取得当前线程用户.
"""
usr = self.get_attr('user', None)
if usr is None:
from pyerp.fnd.models import AnonymousUser, User
if self.user_id==-1:
usr = AnonymousUser()
else:
try:
usr = User.objects.get(pk=self.user_id)
except User.DoesNotExist:
usr = AnonymousUser()
self.set_attr('user', usr)
return usr
user = property(get_user)
def get_session(self):
"""
取得当前线程Http回话.
"""
return self.get_attr('session', None)
session = property(get_session)
def get_resp_id(self):
return self.get_attr('resp_id', -1)
resp_id = property(get_resp_id)
def get_menu_id(self):
return self.get_attr('menu_id', -1)
menu_id = property(get_menu_id)
def get_function_id(self):
return self.get_attr('function_id', -1)
function_id = property(get_function_id)
def get_function(self):
"""
取得当前线程访问的function.
"""
return self.get_attr('function', None)
function = property(get_function)
def set_resp_id(self, value):
self.set_attr('resp_id', value)
def set_menu_id(self, value):
self.set_attr('menu_id', value)
def set_function(self, value):
self.set_attr('function_id', value and value.id or -1)
self.set_attr('function', value)
def get_org_id(self):
if 'org_id' in self.thread_context[thread_ident]:
return self.thread_context[thread_ident]['org_id']
# TODO 从profile中取得组织ID
orgid = 120
self.thread_context[thread_ident]['org_id'] = orgid
return orgid
org_id = property(get_org_id)
def get_language(self):
"""
返回用户使用的语言.
"""
return self.session['django_language']
language = property(get_language)
def get_appl_id(self):
return -1
appl_id = property(get_appl_id)
def get_site_id(self):
"""
这里的site指的是,使用多站点时,各个站点的ID.而非site控制器.
"""
return settings.SITE_ID
site_id = property(get_site_id)
def get_server_id(self):
return -1
server_id = property(get_server_id)
def get_context_prefix(self):
"""
取得当前context_prefix前缀, 使用mod_python时设定,context前缀
"""
return self.get_attr('context_prefix', '/')
context_prefix = property(get_context_prefix)
def get_site_prefix(self):
"""
取得当前site控制器前缀
"""
return self.get_attr('site_prefix', '')
site_prefix = property(get_site_prefix)
def get_thread_id(self):
"""
取得当前的线程ID
"""
return thread.get_ident()
thread_id = property(get_thread_id)
def get_attrs(self):
"""
取得当前线程级变量字典
"""
return self.thread_context[thread.get_ident()]
attrs = property(get_attrs)
def get_attr(self, key, default=None):
"""
根据指定key从当前线程变量中取得数据
"""
thread_ident = thread.get_ident()
if thread_ident in self.thread_context:
if key in self.thread_context[thread_ident]: # and self.thread_context[thread_ident][key]:
return self.thread_context[thread_ident][key]
else:
return default
else:
raise GlobalManagementError("This code isn't under global management. Please execute <gbl.enter_global_management> first.")
def set_attr(self, key, value):
"""
设定一个变量到线程级变量字典中
"""
thread_ident = thread.get_ident()
if thread_ident in self.thread_context:
self.thread_context[thread_ident][key] = value
else:
raise GlobalManagementError("This code isn't under global management")
def enter_global_management(self, user_id=-1, user=None, session=None):
"""
Enters global management for a running thread. It must be balanced with
the appropriate leave_global_management call, since the actual state is
managed as a stack.
The state and dirty flag are carried over from the surrounding block or
from the settings, if there is no surrounding block (dirty is always false
when no current block is running).
"""
thread_ident = thread.get_ident()
if thread_ident not in self.thread_context:
self.thread_context[thread_ident] = {}
if user is not None:
self.set_attr('user_id', user.id)
self.set_attr('user', user)
else:
# self.thread_context[thread_ident]["user_id"] = user_id
self.set_attr('user_id', user_id)
if session is not None:
self.set_attr('session', session)
def leave_global_management(self):
"""
Leaves transaction management for a running thread. A dirty flag is carried
over to the surrounding block, as a commit will commit all changes, even
those from outside. (Commits are on connection level.)
"""
thread_ident = thread.get_ident()
if thread_ident in self.thread_context:
del self.thread_context[thread_ident]
else:
raise GlobalManagementError("This code isn't under global management")
fnd_global = ThreadFndGlobal()
|
Despite my post on Hints and Tips: Wet Weather Riding, sometimes enough is enough and the turbo (a.k.a. weapon of boredom) has to come out in the winter months.
Warm up for 20 minutes.
Select the big ring at the front, and a gear close to the top of the cassette at the back.
Pedal for one minute at 80 RPM followed by easy, relaxed pedalling for another minute.
Next pedal at 85 RPM followed by easy, relaxed pedalling for another minute. And so on, adding 5 RPM per minute.
Each Session extent the "on" interval so that is 1:20 mins, 1:40 mins etc.
You then progress this session week-on-week by increasing the duration of the intervals or using progressively harder gears and trying to match the cadence, thus producing more power. You could also vary the length of the intervals within the session in order to be more event specific. The duration of this kind of effort in a race is highly variable as other riders rarely telegraph how long they are going to go on the offensive.
Please note: I cannot claim these sessions as my own invention; they have come from someone else's little book of pain. Unfortunately, I've had them so long I don't know who! I know... worst reference ever...I would probably get thrown out of university for that...but I apologise sincerely to the original author. |
'''
This module contains utility functions using in Smartmove
'''
def mask_from_noncontiguous_indices(n, start_ind, stop_ind):
'''Create boolean mask from start stop indices of noncontiguous regions
Args
----
n: int
length of boolean array to fill
start_ind: numpy.ndarray
start index positions of non-contiguous regions
stop_ind: numpy.ndarray
stop index positions of non-contiguous regions
Returns
-------
mask: numpy.ndarray, shape (n,), dtype boolean
boolean mask array
'''
import numpy
mask = numpy.zeros(n, dtype=bool)
for i in range(len(start_ind)):
mask[start_ind[i]:stop_ind[i]] = True
return mask
def get_n_lines(file_path):
'''Get number of lines by calling bash command wc
Args
----
file_path: str
File whose lines to count
Returns
-------
n_lines: int
Number of lines in file
'''
import os
import subprocess
cmd = 'wc -l {0}'.format(file_path)
output = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout
n_lines = int((output).readlines()[0].split()[0])
return n_lines
def get_versions(module_name):
'''Return versions for repository and packages in requirements file
Args
----
module_name: str
Name of module calling this routine, stored with local git hash
Returns
-------
versions: OrderedDict
Dictionary of module name and dependencies with versions
'''
from collections import OrderedDict
import importlib
import os
versions = OrderedDict()
module = importlib.util.find_spec(module_name)
# Get path to pylleo requirements file
module_path = os.path.split(module.origin)[0]
requirements = os.path.join(module_path, 'requirements.txt')
# Add git hash for module to dict
cwd = os.getcwd()
os.chdir(module_path)
try:
versions[module_name] = get_githash('long')
except:
versions[module_name] = module.__version__
os.chdir(cwd)
return versions
def get_githash(hash_type):
'''Add git commit for reference to code that produced data
Args
----
hash_type: str
keyword determining length of has. 'long' gives full hash, 'short'
gives 6 character hash
Returns
-------
git_hash: str
Git hash as a 6 or 40 char string depending on keywork `hash_type`
'''
import subprocess
cmd = dict()
cmd['long'] = ['git', 'rev-parse', 'HEAD']
cmd['short'] = ['git', 'rev-parse', '--short', 'HEAD']
return subprocess.check_output(cmd[hash_type]).decode('ascii').strip()
def symlink(src, dest):
'''Failsafe creation of symlink if symlink already exists
Args
----
src: str
Path or file to create symlink to
dest: str
Path of new symlink
'''
import os
# Attempt to delete existing symlink
try:
os.remove(dest)
except:
pass
os.symlink(src, dest)
return None
def cat_path(d, ignore):
'''Concatenate dictionary key, value pairs to a single string
Args
----
d: dict
Dictionary for which key, value pairs should be concatenated to str
ignore: iterable
List of keys to exclude from concatenated string
Returns
-------
s: str
String with concatenated key, value pairs
'''
items = list(d.items())
s = ''
for i in range(len(items)):
key, value = items[i]
if key not in set(ignore):
s += '{}_{}__'.format(key, value)
return s[:-2]
def _parse_subdir(path):
'''Parse parameters in names of child directories to pandas dataframe
Child directories in `path` are parsed so that the parameter values in
their directory names can be easily searched using a pandas.DataFrame.
Parameters are separated by double `__` and values by single `_`. Names
that include an `_` are joined back together after they are split
Args
----
path: str
Parent path with directories names with parameters to parse
Returns
-------
paths_df: pandas.DataFrame
Dataframe with one row for each respective child directory and one
column for each parameter.
'''
import os
import numpy
import pandas
dir_list = numpy.asarray(os.listdir(path), dtype=object)
# Search root directory for directories to parse
for i in range(len(dir_list)):
if os.path.isdir(os.path.join(path,dir_list[i])):
name = dir_list[i]
# Split parameters in name
dir_list[i] = dir_list[i].split('__')
for j in range(len(dir_list[i])):
param = dir_list[i][j].split('_')
# Join names with `_` back together, make key/value tuple
key = '_'.join(param[:-1])
value = param[-1]
if value == 'None':
value = numpy.nan
param = (key, float(value))
dir_list[i][j] = param
# Convert list of tuples to dictionary
dir_list[i] = dict(dir_list[i])
# Add directory name to dict for later retrieval
dir_list[i]['name'] = name
else:
dir_list[i] = ''
# Remove entries that are files
dir_list = dir_list[~(dir_list == '')]
# Convert list of dictionaries to dictionary of lists
keys = dir_list[0].keys()
params = dict()
for i in range(len(dir_list)):
for key in dir_list[i]:
if key not in params:
params[key] = numpy.zeros(len(dir_list), object)
params[key][i] = dir_list[i][key]
return pandas.DataFrame(params)
def get_subdir(path, cfg):
'''Get path to glide output data for a given `cfg_glide`
Args
----
path: str
Tag data parent path
cfg: OrderedDict
Composite dictions of cfg dicts
Returns
-------
path_data: str
Absolute path to glide data output path
'''
import os
import pyotelem
def match_subdir(path, cfg):
import numpy
n_subdirs = 0
for d in os.listdir(path):
if os.path.isdir(os.path.join(path, d)):
n_subdirs += 1
if n_subdirs == 0:
raise SystemError('No data subdirectories in {}'.format(path))
params = _parse_subdir(path)
mask = numpy.zeros(n_subdirs, dtype=bool)
# Evalute directory params against configuration params
# Set directory mask to True where all parameters are matching
for i in range(len(params)):
match = list()
for key, val in cfg.items():
if params[key].iloc[i] == val:
match.append(True)
else:
match.append(False)
mask[i] = all(match)
idx = numpy.where(mask)[0]
if idx.size > 1:
raise SystemError('More than one matching directory found')
else:
idx = idx[0]
return params['name'].iloc[idx]
subdir_glide = match_subdir(path, cfg['glides'])
path = os.path.join(path, subdir_glide)
subdir_sgl = match_subdir(path, cfg['sgls'])
path = os.path.join(path, subdir_sgl)
subdir_filt = match_subdir(path, cfg['filter'])
return os.path.join(subdir_glide, subdir_sgl, subdir_filt)
def filter_sgls(n_samples, exp_ind, sgls, max_pitch, min_depth,
max_depth_delta, min_speed, max_speed, max_speed_delta):
'''Create mask filtering only glides matching criterea
Args
----
n_samples: int
Total number of samples in tag data
exp_ind: ndarray
Boolean array to slice tag data to only experimental period
sgls: pandas.DataFrame
A dataframe of subglide indices and summary information obtained
in `glideid`
max_pitch: float
Maximum allowable pitch during sub-glide
min_depth: float
Minimum allowable depth during sub-glide
max_depth_delta: float
Maximum allowable change in depth during sub-glide
min_speed: float
Minimum allowable speed during sub-glide
max_speed: float
Maximum allowable speed during sub-glide
max_speed_delta: float
Maximum allowable change in speed during sub-glide
Returns
-------
mask_data_sgl: ndarray
Boolean mask to slice tag dataframe to filtered sub-glides
mask_sgls: ndarray
Boolean mask to slice sgls dataframe to filtered sub-glides
'''
import numpy
import pyotelem
# Defined experiment indices
mask_exp = (sgls['start_idx'] >= exp_ind[0]) & \
(sgls['stop_idx'] <= exp_ind[-1])
# Found within a dive
mask_divid = ~numpy.isnan(sgls['dive_id'].astype(float))
# Uniformity in phase (dive direction)
mask_phase = (sgls['dive_phase'] == 'descent') | \
(sgls['dive_phase'] == 'ascent')
# Depth change and minimum depth constraints
mask_depth = (sgls['total_depth_change'] < max_depth_delta) & \
(sgls['total_depth_change'] > min_depth)
# Pitch angle constraint
mask_deg = (sgls['mean_pitch'] < max_pitch) & \
(sgls['mean_pitch'] > -max_pitch)
# Speed constraints
mask_speed = (sgls['mean_speed'] > min_speed) & \
(sgls['mean_speed'] < max_speed) & \
(sgls['total_speed_change'] < max_speed_delta)
# Concatenate masks
mask_sgls = mask_divid & mask_phase & mask_exp & \
mask_deg & mask_depth & mask_speed
# Extract glide start/stop indices within above constraints
start_ind = sgls[mask_sgls]['start_idx'].values
stop_ind = sgls[mask_sgls]['stop_idx'].values
# Create mask for all data from valid start/stop indices
mask_data_sgl = mask_from_noncontiguous_indices(n_samples, start_ind,
stop_ind)
# Catch error with no matching subglides
num_valid_sgls = len(numpy.where(mask_sgls)[0])
if num_valid_sgls == 0:
raise SystemError('No sublides found meeting filter criteria')
return mask_data_sgl, mask_sgls
|
When making changes to an existing SR and trying to close and also opening and closing Task Notes field and when closing completed tasks after re-opening a completed task.
Error when closing completed tasks after re-opening a completed task.
While running Generate form files for JTF,CS,CSC faced error for below mentioned forms . |
from os import environ
from os.path import join, dirname, realpath
from subprocess import check_output
from setuptools import Extension
from setuptools.command.build_ext import build_ext
import numpy
import petsc4py
import slepc4py
extension_names = [
'bsubspace',
'bbuild',
'bpetsc'
]
header_only = {
'bsubspace',
}
cython_only = {
'bbuild',
}
def extensions():
paths = configure_paths()
exts = []
for name in extension_names:
depends = []
object_files = []
extra_args = paths
if name not in cython_only:
depends += ['dynamite/_backend/{name}_impl.h'.format(name=name)]
if name not in header_only:
depends += ['dynamite/_backend/{name}_impl.c'.format(name=name)]
object_files = ['dynamite/_backend/{name}_impl.o'.format(name=name)]
if name == 'bpetsc':
depends += ['dynamite/_backend/bsubspace.pxd'
'dynamite/_backend/bcuda_impl.h',
'dynamite/_backend/bcuda_impl.cu',
'dynamite/_backend/shellcontext.h',
'dynamite/_backend/bsubspace_impl.h']
if check_cuda():
object_files += ['dynamite/_backend/bcuda_impl.o'.format(name=name)]
exts += [
Extension('dynamite._backend.{name}'.format(name=name),
sources = ['dynamite/_backend/{name}.pyx'.format(name=name)],
depends = depends,
extra_objects = object_files,
**extra_args)
]
return exts
USE_CUDA = None
def check_cuda():
'''
Whether PETSc was built with CUDA support
'''
global USE_CUDA
if USE_CUDA is not None:
return USE_CUDA
with open(join(environ['PETSC_DIR'],
environ['PETSC_ARCH'],
'include/petscconf.h')) as f:
for line in f:
if 'PETSC_HAVE_CUDA' in line:
USE_CUDA = True
break
else:
USE_CUDA = False
return USE_CUDA
def write_build_headers():
'''
Write a Cython include file with some constants that become
hardcoded into the backend build.
'''
print('Writing header files...')
with open(join(dirname(__file__), 'dynamite', '_backend', 'config.pxi'), 'w') as f:
f.write('DEF USE_CUDA = %d\n' % int(check_cuda()))
dnm_version = check_output(['git', 'describe', '--always'],
cwd = dirname(realpath(__file__)),
universal_newlines = True).strip()
f.write('DEF DNM_VERSION = "%s"\n' % dnm_version)
dnm_version = check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
cwd = dirname(realpath(__file__)),
universal_newlines = True).strip()
f.write('DEF DNM_BRANCH = "%s"\n' % dnm_version)
def configure_paths():
if any(e not in environ for e in ['PETSC_DIR', 'PETSC_ARCH', 'SLEPC_DIR']):
raise ValueError('Must set environment variables PETSC_DIR, '
'PETSC_ARCH and SLEPC_DIR before installing! '
'If executing with sudo, you may want the -E '
'flag to pass environment variables through '
'sudo.')
PETSC_DIR = environ['PETSC_DIR']
PETSC_ARCH = environ['PETSC_ARCH']
SLEPC_DIR = environ['SLEPC_DIR']
includes = []
libs = []
includes += [join(PETSC_DIR, PETSC_ARCH, 'include'),
join(PETSC_DIR, 'include')]
libs += [join(PETSC_DIR, PETSC_ARCH, 'lib')]
includes += [join(SLEPC_DIR, PETSC_ARCH, 'include'),
join(SLEPC_DIR, 'include')]
libs += [join(SLEPC_DIR, PETSC_ARCH, 'lib')]
# python package includes
includes += [petsc4py.get_include(),
slepc4py.get_include(),
numpy.get_include()]
return dict(
include_dirs = includes,
library_dirs = libs,
runtime_library_dirs = libs,
libraries = ['petsc', 'slepc']
)
class MakeBuildExt(build_ext):
def run(self):
# build the object files
for name in extension_names:
if name in header_only | cython_only:
continue
make = check_output(['make', '{name}_impl.o'.format(name=name)],
cwd='dynamite/_backend')
print(make.decode())
if check_cuda():
make = check_output(['make', 'bcuda_impl.o'], cwd='dynamite/_backend')
print(make.decode())
# get the correct compiler from SLEPc
# there is probably a more elegant way to do this
makefile = 'include ${SLEPC_DIR}/lib/slepc/conf/slepc_common\n' + \
'print_compiler:\n\t$(CC)'
CC = check_output(['make', '-n', '-f', '-', 'print_compiler'],
input = makefile, encoding = 'utf-8')
# now set environment variables to that compiler
if 'CC' in environ:
_old_CC = environ['CC']
else:
_old_CC = None
environ['CC'] = CC
try:
build_ext.run(self)
finally:
# set CC back to its old value
if _old_CC is not None:
environ['CC'] = _old_CC
else:
environ.pop('CC')
|
England Lions kickstart their tour of United Arab Emirates with a one-off unofficial Test against Pakistan A at Abu Dhabi. It will be followed by the five unofficial ODIs and two T20s.
Pakistan A played a two-match unofficial Test series against New Zealand A earlier last month which ended in 0-0 draw. Keeping faith in the team, Pakistan Cricket Board has made a couple of tweaks in the squad for this one-off fixture. A couple of new faces in the bowling department have been named.
England Lions, on the other hand, have lost a couple of members of the initial squad to the injuries. However, they still look quite balanced with the familiar names like Sam Billings, Dominic Bess and Mark Wood in the ranks.
Jason Roy is unavailable for his Msanzi Super League commitment.
Matt Parkinson and Craig Overton have been ruled out with injuries.
Here’s how the two sides in Pakistan A vs New Zealand A 1st unofficial Test.
England Lions squad: Danny Briggs, Sam Billings (c & wk), Mark Wood, Jamie Overton, Nick Gubbins, Amar Virdi, Joe Clarke, Tom Kohler-Cadmore, Liam Livingstone, James Porter, Saqib Mahmood, Max Holden, Ollie Pope and Dominic Bess.
Pakistan A squad: Khurram Manzoor, Shan Masood, Usman Salahuddin, Mohammad Saad, Mohammad Rizwan, Saud Shakeel, Mohammad Irfan, Ehsan Adil, Taj Wali, Sameen Gul, Ali Shafiq and Mohammad Asghar.
Shan Masood, Abid Ali, Usman Salahuddin and Mohammad Rizwan scored heavily in the recently concluded series against New Zealand A.
Ehsan Adil had decent returns with the ball for Pakistan A.
Ollie Pope, Sam Billings, Dominic Bess and Mark Wood are the safest picks among England Lions’ unit. |
# -*- coding: utf-8 -*-
import os
import logging
from flask import Flask, abort, request, jsonify, g, url_for, render_template
from flask.ext.script import Manager
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.httpauth import HTTPBasicAuth
from passlib.apps import custom_app_context as pwd_context
from itsdangerous import (TimedJSONWebSignatureSerializer
as Serializer, BadSignature, SignatureExpired)
from datetime import datetime
# initialization
app = Flask(__name__)
app.config['SECRET_KEY'] = 'supermegasecret'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
app.config['EXPIRATION'] = 30
# extensions
manager = Manager(app)
db = SQLAlchemy(app)
http_auth = HTTPBasicAuth()
@manager.command
def db_reset():
'''Resetear BD'''
db.drop_all()
db.create_all()
@manager.command
def create_data():
'''Agregar datos a BD'''
db_reset()
create_users()
create_tickets()
def create_users():
# rut, rutdv, name, is_admin, password, is_enabled, username=None
User.create('6', 'K', 'Administrador', True, 'admin.passwd', True)
User.create('1', '9', 'Usuario 1', False, 'usuario1.passwd', True)
User.create('2', '7', 'Usuario 2', False, 'usuario2.passwd', True)
def create_tickets():
Ticket.create('ticket 01', 1, u'descripción ticket 01')
Ticket.create('ticket 02', 2, u'descripción ticket 02')
Ticket.create('ticket 03', 3, u'descripción ticket 03')
Ticket.create('ticket 04', 1, u'descripción ticket 04')
Ticket.create('ticket 05', 2, u'descripción ticket 05')
Ticket.create('ticket 06', 3, u'descripción ticket 06')
Ticket.create('ticket 07', 1, u'descripción ticket 07')
Ticket.create('ticket 08', 2, u'descripción ticket 08')
Ticket.create('ticket 09', 3, u'descripción ticket 09')
Ticket.create('ticket 10', 1, u'descripción ticket 10')
Ticket.create('ticket 11', 2, u'descripción ticket 11')
Ticket.create('ticket 12', 3, u'descripción ticket 12')
class Ticket(db.Model):
__tablename__ = 'tickets'
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime(), default=datetime.utcnow)
name = db.Column(db.String(64), nullable=False)
description = db.Column(db.String(255))
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
@classmethod
def create(cls, name, user_id, description=u""):
try:
ticket = Ticket(
name=name,
user_id=user_id,
description=description
)
db.session.add(ticket)
db.session.commit()
except Exception as e:
logging.exception(e)
def serialize(self):
return {
'id': self.id,
'timestamp': self.timestamp,
'name': self.name,
'description': self.description,
'user_id': self.user_id
}
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
rut = db.Column(db.String(64), nullable=False)
rutdv = db.Column(db.String(1), nullable=False)
username = db.Column(db.String(255), nullable=False, unique=True, index=True)
name = db.Column(db.String(255), nullable=False)
is_admin = db.Column(db.Boolean)
is_enabled = db.Column(db.Boolean)
member_since = db.Column(db.DateTime(), default=datetime.utcnow)
password_hash = db.Column(db.String(128))
def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password)
def verify_password(self, password):
return pwd_context.verify(password, self.password_hash)
def generate_auth_token(self, expiration=app.config['EXPIRATION']):
s = Serializer(app.config['SECRET_KEY'], expires_in=expiration)
return s.dumps({'id': self.id})
@classmethod
def create(cls, rut, rutdv, name, is_admin, password, is_enabled, username=None):
try:
if not username:
username = "{0}-{1}".format(rut, rutdv).upper()
user = User(
username=username,
is_admin=is_admin,
name=name,
rut=rut,
rutdv=rutdv,
is_enabled=is_enabled
)
user.hash_password(password)
db.session.add(user)
db.session.commit()
except Exception as e:
logging.exception(e)
@staticmethod
def verify_auth_token(token):
s = Serializer(app.config['SECRET_KEY'])
try:
data = s.loads(token)
except SignatureExpired:
return None # valid token, but expired
except BadSignature:
return None # invalid token
user = User.query.get(data['id'])
return user
def serialize(self):
return {
'id': self.id,
'rut': self.rut,
'rutdv': self.rutdv,
'username': self.username,
'name': self.name,
'is_admin': self.is_admin,
'is_enabled': self.is_enabled,
'member_since': self.member_since,
}
@http_auth.verify_password
def verify_password(username_or_token, password):
# first try to authenticate by token
user = User.verify_auth_token(username_or_token)
if not user:
# try to authenticate with username/password
user = User.query.filter_by(username=username_or_token).first()
if not user or not user.verify_password(password):
return False
g.user = user
return True
@app.route('/')
def get_main():
return render_template('index.html')
@app.route('/api/users/<int:id>')
def get_user(id):
user = User.query.get(id)
if not user:
abort(400)
return jsonify({'username': user.username})
@app.route('/api/token')
@http_auth.login_required
def get_auth_token():
token = g.user.generate_auth_token(app.config['EXPIRATION'])
return jsonify({'token': token.decode('ascii'), 'duration': app.config['EXPIRATION']})
@app.route('/api/current')
@http_auth.login_required
def get_resource():
return jsonify({'data': g.user.serialize()})
@app.route('/api/tickets')
@http_auth.login_required
def get_tickets():
tickets = Ticket.query.all()
return jsonify({'data': [t.serialize() for t in tickets]})
if __name__ == '__main__':
manager.run()
|
"Between You and Me" is a mixed media wall sculpture. It is constructed from solid mahogany, acrylic and acrylic washes. This piece is at once both rustic and modern feeling. Distressed black, white, yellow and pinks accent the mahogany's rich brown/reddish color. The result is a striking piece that can accent any room. The subject matter is abstract, so I leave it up to the viewer to decide the meaning of the piece and the title. What do you see in it? |
from flask import Flask, render_template, request, redirect, session, url_for
import login_utils, tasks_utils
application = Flask(__name__)
@application.route("/", methods=['GET','POST'])
@application.route("/home", methods=['GET','POST'])
def home():
if 'logged_in' not in session:
session['logged_in'] = False
if 'user' not in session:
session['user'] = 'Guest'
# return render_template('home.html')
if request.method=="GET":
return render_template('home.html')
else:
button = request.form['button']
if button == "Create Account":
user = request.form['new_username']
password = request.form['new_password']
confirm = request.form['new_confirm_password']
#password match check
if (password == confirm):
#username and password lengths check
if "@" not in user:
return render_template('home.html',errorC="Username must be a valid email")
if len(password)<8:
return render_template('home.html',errorC="Password must be longer than 8 characters")
#account created successfully
if login_utils.create_user(user,password):
return render_template('home.html',successC="Account successfully created! Login to access DailyDos.")
#username taken error
else:
return render_template('home.html',errorC="Username already in use. Please chose a different username")
else:
return render_template('home.html',errorC="Passwords do not match")
#Login
#if credentials valid, log them in with session
if button == "Login":
user = request.form['login_username']
password = request.form['login_password']
if login_utils.authenticate(user,password):
session['user'] = user
session['logged_in'] = True
return redirect(url_for('tasks'))
#else renders login w/ error message
else:
return render_template("home.html",errorL="Invalid Username or Password")
@application.route("/tasks", methods=["GET","POST"])
def tasks():
tasks_list = tasks_utils.get_tasks(session['user'])
if session['logged_in'] == False:
return redirect('/home')
if request.method == "GET":
return render_template("tasks.html", tasks = tasks_list)
if request.method == "POST":
button = request.form['button']
if button == "Remove These":
#page_ids = request.form.get("do_delete")
checked = request.form.getlist("checks")
#checked = []
#for items in request.form.get("do_delete"):
# checked.append(items)
#selected = bool(checked)
#f = open('log_file', 'w')
#for keys in selected:
# f.write(selected)
#f.close()
tasks_utils.remove_tasks(checked)
tasks_list = tasks_utils.get_tasks(session['user'])
return render_template("tasks.html", tasks = tasks_list)
if button == "Clear All":
tasks_utils.clear_tasks(session['user'])
tasks_list = tasks_utils.get_tasks(session['user'])
return render_template("tasks.html", tasks = tasks_list)
else:
return render_template("tasks.html", tasks = tasks_list)
@application.route("/logout")
def logout():
session['user'] = "Guest"
session['logged_in'] = False
return redirect('/home')
application.secret_key = "pleasework"
if __name__=="__main__":
application.run(host='0.0.0.0')
"""
application.debug = True
application.secret_key = "onetwothreefour"
application.run(host='0.0.0.0', port = 5000)
"""
|
Specifications include, but are not limited to: Char Broiled Hamburger Patty, IQF, 3 oz., round in shape, precooked, with char marks, all beef from IMPS #136. No mechanically separated meat. Zero tolerance for foreign matter including bone, cartilage, or gristle. No added binders, extenders or water. Potassium chloride allowed. Maximum fat 20 grams and 250 mg of sodium per 3 oz. cooked wt. Net case weight approximately 15 pounds. |
"""Add missing indexes.
Revision ID: 32e8060589b8
Revises: a3fe8c8a344
Create Date: 2014-02-11 17:21:00.718449
"""
# revision identifiers, used by Alembic.
revision = '32e8060589b8'
down_revision = 'a3fe8c8a344'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_index('ix_group_created_at', 'group', ['created_at'], unique=False)
op.create_index('ix_grouprequest_created_at', 'grouprequest', ['created_at'], unique=False)
op.create_index('ix_grouprequest_from_user_id', 'grouprequest', ['from_user_id'], unique=False)
op.create_index('ix_grouprequest_project_id', 'grouprequest', ['project_id'], unique=False)
op.create_index('ix_grouprequest_to_user_id', 'grouprequest', ['to_user_id'], unique=False)
op.create_index('ix_testableresult_created_at', 'testableresult', ['created_at'], unique=False)
op.create_index('ix_user_to_group_group_id', 'user_to_group', ['group_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_user_to_group_group_id', table_name='user_to_group')
op.drop_index('ix_testableresult_created_at', table_name='testableresult')
op.drop_index('ix_grouprequest_to_user_id', table_name='grouprequest')
op.drop_index('ix_grouprequest_project_id', table_name='grouprequest')
op.drop_index('ix_grouprequest_from_user_id', table_name='grouprequest')
op.drop_index('ix_grouprequest_created_at', table_name='grouprequest')
op.drop_index('ix_group_created_at', table_name='group')
### end Alembic commands ###
|
Meanwhile, clean the vegetables in the water and cut the sweet potatoes lengthwise into two parts. Then, with a knife, draw some lines on the flesh of the sweet potatoes.
Then, put them ( use the baking paper) on the oven tray. Season them with a drizzle of olive oil, herbs and/or oregano, and salt & pepper.
In a saucepan with salty boiling water, cook the bulgur following the instructions for the cooking time marked on the package. Then, remove the water and set aside.
Next, cut the tomato into small cubes, crumble the broccoli and cut the garlic clove into thin slices.
Chop the parsley and crumble the feta. Put on the side.
In a frying pan, heat the olive oil over low heat, and then add the garlic for approx. 1 minute, until it gets golden brown.
Then add the broccoli and cook over low heat for about 7 minutes. Finally, add the tomato, miso and apple vinegar. Cook for 5 more minutes, mixing it well with a wooden spoon (if necessary, add some more olive oil).
The preparation of the yogurt sauce is very simple: In a small saucepan, heat all the ingredients (yogurt, tahini, lime juice, salt and pepper) over low heat and mix everything slowly.
When the sweet potatoes are ready, take them out from the oven and serve them on the plate.
To finish, add a few tablespoons of bulgur ( in the center of the sweet potato), and 2 large spoons of yogurt sauce. Then, add the vegetables and finish with a touch of feta, parsley, salt & pepper. |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from . import wsmanModule
AMT_REDIRECTION_STATE_MAP = {
0: 'Unknown',
1: 'Other',
2: 'Enabled',
3: 'Disabled',
4: 'Shutting Down',
5: 'Not Applicable',
6: 'Enabled but Offline',
7: 'In Test',
8: 'Deferred',
9: 'Quiesce',
10: 'Starting',
11: 'DMTF Reserved',
32768: (),
32769: ('IDER'),
32770: ('SoL'),
32771: ('IDER', 'SoL'),
}
class AMTRedirection(wsmanModule.wsmanModule):
'''Control over Serial-over-LAN and storage redirection.'''
_RESOURCES = {
'redirectionService': 'AMT_RedirectionService',
}
@property
def enabled_features(self):
state = self.RESOURCES['redirectionService'].get('EnabledState')
return AMT_REDIRECTION_STATE_MAP[state]
@enabled_features.setter
def enabled_features(self, features):
if not features:
value = 32768
elif 'SoL' in features and 'IDER' in features:
value = 32771
elif 'SoL' in features:
value = 32770
elif 'IDER' in features:
value = 32769
else:
raise ValueError('Invalid data provided. Please provide a list comprising only of the following elements: %s' % ', '.join([value.__repr__ for value in self.enabled.values]))
self.RESOURCES['redirectionService'].put(EnabledState = value)
|
Peterborough City Council has announced Atkins as its preferred bidder to deliver highways services in a deal worth £7.6 million a year.
Atkins will carry out road maintenance, street lighting and gulley cleansing under a new 10-year contract which could save in the region of £750,000 a year.
Council leader Marco Cereste has been asked to sign off the decision to appoint Atkins to provide highways services for the city council from 1 October 2013.
The announcement of Atkins as the preferred bidder follows a year-long tender process. The new contract worth £7.6 million annually will run for 10 years, with the ability to extend it for two further five-year periods.
Councillor Marco Cereste, leader of the council and cabinet member for growth, strategic planning, housing, economic development and business engagement, said: “This new contract has the potential to save the council and therefore its taxpayers in the region of £7.5 million in the next 10 years, which at a time when we are receiving less money from Government, can be re-invested into providing other services for residents. |
from pyramid_frontend.theme import Theme
from pyramid_frontend.images import FilterChain
from pyramid_frontend.assets.less import LessAsset
from pyramid_frontend.assets.requirejs import RequireJSAsset
class LightTheme(Theme):
key = 'light'
image_filters = (
FilterChain(
'thumb', width=330, height=220, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'square', width=300, height=300, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'about', width=400, height=300, extension='jpg',
quality=80, sharpness=1.5),
FilterChain(
'large', width=800, height=600, extension='jpg', resize=True,
quality=85, sharpness=1.5),
FilterChain(
'huge', width=2400, height=500, extension='jpg', quality=90,
sharpness=1.5),
)
assets = {
'main-less': LessAsset('/_light/css/main.less'),
'main-js': RequireJSAsset(
'/_light/js/main.js',
require_config_path='/_light/js/require_config.js',
require_base_url='/_light/js/vendor/',
),
}
|
These beautiful, unique and collectible sterling silver earrings feature an image from a collage by Tiffini Elektra of Anne Boleyn holding The Tudor Rose with pieces of an antique anemone botanical and ornamental papers and bookplates. One of the symbolic meanings of the anemone flower is "forsaken".
Dimensions: Earring Size is approx. 1 3/5" long x 4/5" wide (40mm long x 20mm wide) , Image size is approx. 4/5" (20mm) diameter. |
#!/usr/bin/env python
# encoding: utf-8
import io
import struct
class ByteBuffer(io.BytesIO):
def makefmt(self, width, littleEndian=False):
fmt = '<' if littleEndian else '>'
if width == 8: fmt += 'B'
elif width == 16: fmt += 'H'
elif width == 32: fmt += 'I'
else: fmt += 'L'
return fmt
def maskValue(self, width, value):
if width == 8: return value & 0xFF
elif width == 16: return value & 0xFFFF
elif width == 32: return value & 0xFFFFFFFF
return value
def getuint(self, width, pos):
oldpos = self.tell()
self.seek(pos)
fmt = self.makefmt(width, False)
value = struct.unpack(fmt, self.read(width / 8))
self.seek(oldpos)
return value[0]
def setuint(self, width, pos, value, littleEndian=False):
self.seek(pos)
fmt = self.makefmt(width, littleEndian)
maskedValue = self.maskValue(width, value)
self.write(struct.pack(fmt, maskedValue))
def writeuint(self, width, value, littleEndian=False):
fmt = self.makefmt(width, littleEndian)
self.write(struct.pack(fmt, value))
def readat(self, pos, length):
oldpos = self.tell()
self.seek(pos)
value = self.read(length)
self.seek(oldpos)
return value
|
Includes DEA-controlled substances, prescription drugs, alcohol & tobacco, firearms and explosives, radiation, lasers, etc.
The following rules apply to research using hazardous chemicals, devices and activities. These include substances and devices that are regulated by local, state, country, or international law, most often with restrictions of their use by minors such as DEA-controlled substances, prescription drugs, alcohol, tobacco, firearms and explosives. Hazardous activities are those that involve a level of risk above and beyond that encountered in the student’s everyday life.
These rules are intended to protect the student researcher by ensuring proper supervision and the consideration of all potential risks so that the appropriate safety precautions are taken. Students are required to meet all standards imposed by Intel ISEF, school, local, and/or regional fair(s).
1) All projects involving hazardous chemicals, activities or devices must describe in the research plan the risk assessment process, supervision, safety precautions and methods of disposal.
2. The use of hazardous chemicals, activities or devices and involvement in hazardous activities require direct supervision by a Designated Supervisor, except those involving DEA-controlled substances, which require supervision by a Qualified Scientist.
3. The student researcher must conduct a risk assessment in collaboration with a Designated Supervisor or Qualified Scientist prior to experimentation. This risk assessment is documented on the Risk Assessment Form 3.
4. Student researchers must acquire and use regulated substances in accordance with all local, state, U.S. federal and country laws. For further information or classification for these laws and regulations, contact the appropriate regulatory agencies.
5. For all chemicals, devices or activities requiring a Federal and/or State Permit, the student/supervisor must obtain the permit prior to the onset of experimentation. A copy of the permit must be available for review by adults supervising the project and the local, affiliated, and Intel ISEF SRCs in their review prior to competition.
6. The student researcher must minimize the impact of an experiment on the environment. Examples include using minimal quantities of chemicals that will require subsequent disposal; ensuring that all disposal is done in an environmentally safe manner and in accordance with good laboratory practices.
The U.S. Drug Enforcement Administration (DEA) regulates chemicals that can be diverted from their intended use to make illegal drugs. Other countries may have similar regulatory bodies; students outside of the U.S. must adhere to their own country’s drug regulatory agency requirements in addition to U.S. DEA regulations. DEA-controlled substances and their schedule number are at the DEA website under Sources of Information. It is the responsibility of the student to consult this list if there is a possibility that substances used in experimentation could be regulated.
1. All studies using DEA-controlled substances must be supervised by a Qualified Scientist who is licensed by the DEA (or other international regulatory body) for use of the controlled substance.
2. All studies using DEA Schedule 1 substances (including marijuana) must have the research protocol approved by DEA before research begins. Schedule 2, 3 and 4 substances do not require protocol approval by DEA.
1. Students are prohibited from administering prescription drugs to human participants.
2. A veterinarian must supervise student administration of any prescription drugs to vertebrate animals.
The U.S. Alcohol and Tobacco Tax and Trade Bureau (TTB) regulates the production of alcohol and distribution of alcohol and tobacco products. Many such products are restricted by age for purchase, possession and consumption.
1. Fermentation studies in which minute quantities of ethyl alcohol are produced are permitted.
2. The Designated Supervisor is responsible for the acquisition, usage and appropriate disposal of the alcohol or tobacco used in the study.
3. Production of wine or beer by adults is allowable in the home and must meet TTB home production regulations. Students are allowed to design and conduct a research project, under direct parental supervision, involving the legal production of the wine or beer.
4. Students are prohibited from conducting experiments where consumable ethyl alcohol is produced by distillation. However, students are allowed to distill alcohol for fuel or other non-consumable products. To do so, the work must be conducted at school or a Regulated Research Institution and follow all local and country laws. See the Alcohol and Tobacco Tax and Trade Bureau (TTB) website for details.
The U.S. Bureau of Alcohol, Tobacco, Firearms and Explosives (ATF), along with state agencies, regulates the purchase and use of firearms and explosives. A firearm is defined as a small arms weapon from which a projectile is fired by gunpowder. An explosive is any chemical compound, mixture or device, the primary purpose of which is to function by explosion. Explosives include, but are not limited to, dynamite, black powder, pellet powder, detonators, and igniters.
The purchase of a firearm by a minor is generally unlawful. The use of a firearm, without proper state certification, is illegal. Students should check the training and certification requirements of individual states and countries.
Projects involving firearms and explosives are allowable when conducted with the direct supervision of a Designated Supervisor and when in compliance with all federal, state and local laws.
A fully assembled rocket motor, reload kit or propellant modules containing more than 62.5 grams of propellant are subject to the permitting, storage and other requirements of federal explosive laws and regulations.
Potato guns and paintball guns are not firearms unless they are intended to be used as weapons. However, they must be treated as hazardous devices.
Projects involving unmanned aircraft systems (UAS)/drones must follow all state, Federal, and country laws. See the Federal Aviation Administration(FAA) for more details (www.faa.gov.uas/registration).
Projects involving radionuclides (radioisotopes) and X-rays must involve a careful examination of the risks associated with the study and appropriate safety precautions must be taken. Depending upon the level of exposure, radiation released from these sources can be a health hazard.
1. All studies may not exceed the dose limits set by the Nuclear Regulatory Commission of 0.5 mrem/hr or 100 mrem/year of exposure.
2. If the voltage needed in the study is <10 kvolts, a risk assessment must be conducted. The study may be done at home or school, and SRC preapproval is not required.
3. A study using 10-25 kvolts must have a risk assessment conducted and must be preapproved by the SRC to assess safety. Such a study must be conducted in a metal chamber using a camera only, not direct view through glass. A dosimeter or radiation survey meter is required to measure radiation exposure.
4. All studies using > 25 kvolts must be conducted at an institution with a Licensed Radiation Program and must be preapproved by the Institutions’ Radiation Safety Officer or the Committee which oversees the use of ionizing radiation to ensure compliance with state and federal regulations.
a. Toxicity – the tendency of a chemical to be hazardous to health when inhaled, swallowed, injected or in contact with the skin.
b. Reactivity — the tendency of a chemical to undergo chemical change.
c. Flammability — the tendency of a chemical to give off vapors which readily ignite when used under normal working conditions.
d .Corrosiveness — the tendency of a chemical, upon physical contact, to harm or destroy living tissues or physical equipment.
When assessing risk, the type and amount of exposure to a chemical must be considered. For example, an individual’s allergic and genetic disposition may have an influence on the overall effect of the chemical. The student researcher must refer to Material Safety Data Sheets provided by the vendor (SDS) to ensure that proper safety precautions are taken. Some SDS sheets (e.g., Flinn) rank the degree of hazard associated with a chemical. This rating may assist students and adult sponsors in determining risk associated with the use of a chemical.
A risk assessment must include proper disposal methods for the chemicals used in an experiment. The Flinn Catalog (referenced in the Sources of Information section) provides information for the proper disposal of chemicals. If applicable, the student researcher must incorporate in the research plan disposal procedure required by federal and state guidelines.
The mission of environmentally responsible (green) chemistry is to avoid the use or production of hazardous substances during chemical process. The principles of green chemistry are described on the EPA website in the Sources of Information section. Whenever possible the following principles should be incorporated into the research plan.
The documentation of risk assessment (Form 3) is required when a student researcher works with potentially hazardous/dangerous equipment and/or other devices, in or outside a laboratory setting that require a moderate to high level of expertise to ensure their safe usage. Some commonly used devices (Bunsen burners, hot plates, saws, drills, etc.) may not require a documented risk assessment, assuming that the student researcher has experience working with the device. Use of other potentially dangerous devices such as high vacuum equipment, heated oil baths, NMR equipment, and high temperature ovens must have documentation of a risk assessment. It is recommended that all student designed inventions also have documentation of a risk assessment.
A risk assessment (documented on Form 3) must be conducted when a student’s project involves radiation beyond that normally encountered in everyday life. Non- ionizing radiation includes the spectrum of ultraviolet (UV), visible light, infrared (IR), microwave (NW), radiofrequency (RF) and extremely low frequency (ELF).
1) Safety in Academic Chemistry Laboratories, Volumes 1 and 2, 2003. Washington, DC: American Chemical Society.
Howard Hughes Medical Institute as a resource forn working with cell cultures, radioactive materials and other laboratory materials.
Describes the various types of pesticides and the legal requirements for labelling. Provides links and phone numbers to get additional information.
A database of product labels. Enter the product name or company name to view the approved label information of pesticides which are registered with the agency. |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import time
class BallisticDeposition:
"""store all heights as a 1 dim array"""
def __init__(self, L_x, Periodic_BCs=None):
"""need to enter parameters as floats; actual system width is L_x-2 as edge columns are not used;
Insert True if you want to impose periodic boundary conditions"""
self.__xsize = L_x
self.__Periodic_BCs=Periodic_BCs
m = 0
n = 0
self.propagation_number=m #keeps track of how many times the instance has been propagated
self.total_particles=n
roughness_array=np.array([])
self.roughness_array=roughness_array #empty array used for roughness values
time_array=np.array([])
self.time_array=time_array #empty array used for corresponding time values
if isinstance(L_x, int) == True:
system_array = np.zeros((1, self.__xsize)) #indices go from 0 to self.__xsize-1
self.system_array = system_array
else:
raise Exception("need to insert an integer")
def __repr__(self):
return "%s(number of columns = %g)" % ("system size", self.__xsize)
def __str__(self):
return "[%s]" % (self.__xsize)
def size_of_system(self):
return self.__xsize
def current_array(self):
return self.system_array
def random_columns(self, n): #where n is the number of iterations; function generates random column numbers
#ONLY METHOD THAT CHANGES WHEN IMPOSING PERIODIC BOUNDARY CONDITIONS
if self.__Periodic_BCs == True:
self.chosen_columns=np.random.random_integers(0,self.__xsize-1, n) #inclusive of upper and lower bounds
else:
self.chosen_columns=np.random.random_integers(1,self.__xsize-2, n) #inclusive of upper and lower bounds
return self.chosen_columns
def array_search(self, j):
"""returns the height for a particular column"""
return self.system_array[0][j]
def update_array(self, h, j): #turns a site from 0 to 1 in a matrix
self.system_array.itemset((0,j),h)
return self.system_array
def deposit_particles(self, n):#here n is for the number of particles we are depositing on the lattice
self.random_columns(n) #every time is called get a DIFFERENT set of random numbers
for j in self.chosen_columns:#if statements applying boundary conditions
#also will work when BCs not imposed if edge columns are NEVER selected, i.e. system is effectively two columns smaller
if j==0:
p=self.array_search(0)+1
q=self.array_search(1)
r=self.array_search(self.__xsize-1)
if j==self.__xsize-1:
p=self.array_search(self.__xsize-1)+1
q=self.array_search(0)
r=self.array_search(self.__xsize-2)
else:
p=self.array_search(j)+1
q=self.array_search(j+1)
r=self.array_search(j-1)
x=[p,q,r]
h=max(x)
self.update_array(h, j)
#print j, self.system_array
return h
def roughness(self): #works out the roughness for a particular square matrix
"""returns the rougheness for the array"""
x=np.array([])
for j in np.arange(0, self.__xsize,1):
x=np.append(x, self.array_search(j))
y=np.average(x)
a=(x-y)**2
b=np.sum(a)
z=(1/(float(self.__xsize)))*b #remember edge columns are kept empty so column does not
w=z**0.5
return w
def roughness_dynamics(self, n, iterations):#generates a series of roughness values for a series of matrices
"""iterates the BD forward in time, depositing n particles for each of the iterations; takes instance of the BallisticDeposition class"""
self.n=n #property of the data analysis object
self.iterations=iterations #property of the dataanalysis object
self.propagation_number= self.propagation_number + self.iterations #property of the BallisticDeposition object; total no. of iterations ,i.e. data values
self.total_particles = self.total_particles + self.iterations*self.n #property of the BallisticDeposition object; total no. of particles deposited
x=np.array([])
m=1
while m<=iterations:
self.deposit_particles(n)
x=np.append(x, self.roughness())
m = m+1
print m-1
self.data=x
return self.data
def add_data(self):
"""filling data into separate roughness array and creating the matching time array; need to enter numpy array as the parameter"""
for i in np.arange(0, self.data.size):
self.roughness_array=np.append(self.roughness_array, self.data[i])
self.time_array=np.append(self.time_array, np.arange(self.total_particles-(self.iterations-1)*self.n, self.total_particles+self.n,self.n))
return self.roughness_array, self.time_array
def erase_data(self):
self.roughness_array=np.array([])
self.time_array=np.array([])
return self.roughness_array, self.time_array
def partial_erase_data(self, first_index, last_index):
"""erases all the elements in between and including those given by the indices"""
self.roughness_array=np.delete(self.roughness_array, np.arange(first_index, last_index+1))
self.time_array=np.delete(self.time_array, np.arange(first_index, last_index+1))
return self.roughness_array, self.time_array
def line_plot(self, line_of_best_fit=None):
log_t=np.log(self.time_array)
log_w=np.log(self.roughness_array)
m, b = np.polyfit(log_t, log_w, 1)
fig = plt.figure()
fig.suptitle('Log-log Plot of Roughness Against Time', fontsize=14, fontweight='bold')
ax = fig.add_subplot(111)
ax.set_title('axes title')
ax.set_xlabel('log(t)')
ax.set_ylabel('log(w)')
ax.plot(log_t, log_w, 'r+')
if line_of_best_fit == True:
ax.plot(log_t, m*log_t + b, '-')
ax.text(0.1,0.9, r'$\beta=$%g' % (m) , style='italic', horizontalalignment='left',verticalalignment='top', transform=ax.transAxes )
ax.text(0.1,0.8, 'Particles Deposited=%g' % (self.total_particles) , style='italic', horizontalalignment='left',verticalalignment='top', transform=ax.transAxes ) #position of test and the test itself
#plt.axis([40, 160, 0, 0.03])
plt.grid(True)
plt.show()
return None
def saturation_value(self):
w_sat = np.average(self.roughness_array)
|
MSNBC – For many Americans, the switch to daylight saving time is an annual rite of exhaustion. Gaining that extra hour of daylight at night means losing it in the morning.
The time shift disrupts the body’s natural circadian rhythm, according to sleep scientists. So the alarm clock blares just as your internal sleep-wake cycle orders you to stay snugly in bed. It’s always harder to adjust to the "spring ahead" time change (this year on March 11) than to the "fall back" change (on November 4), just as it’s harder to fly east than west. |
#! /usr/bin/python3
"""Construct a geographic "baseline" matrix from a collection of
shapefiles (assumed to be maps of the Earth). Shapefiles can be
either "positive" or "negative". The baseline matrix represents a
grid laid over the Earth; points inside the union of positive geometry
and not inside the union of negative geometry will have value 1,
points well within the complementary space will have value 0, and
points right on the edge (as determined by the "fuzz" argument) will
have intermediate values. The grid will have points exactly on all
four edges, except when the westmost and eastmost meridians coincide,
in which case the eastmost meridian will not be included.
"""
import argparse
import functools
import math
import os
import sys
import fiona
import fiona.crs
import numpy as np
import pyproj
import shapely
import shapely.geometry
import shapely.ops
import shapely.prepared
import tables
from fiona.errors import FionaValueError
from argparse import ArgumentTypeError
class GeographicMatrix:
def __init__(self, args):
# WGS84 reference ellipsoid: see page 3-1 (physical page 34) of
# http://earth-info.nga.mil/GandG/publications/tr8350.2/wgs84fin.pdf
# A and F are exact, A is in meters.
A = 6378137 # equatorial semi-axis
F = 1/298.257223563 # flattening
B = A * (1-F) # polar semi-axis
lon_spacing = (args.resolution * 180) / (A * math.pi)
lat_spacing = (args.resolution * 180) / (B * math.pi)
fuzz_degrees = (args.fuzz * 180) / ((A+B) * math.pi / 2)
# To avoid rounding errors, precalculate the number of grid rows
# and columns so we can use linspace() rather than arange().
n_lon = int(math.floor((args.east - args.west) / lon_spacing))
n_lat = int(math.floor((args.north - args.south) / lat_spacing))
south = args.south
north = south + n_lat * lat_spacing
west = args.west
east = west + n_lon * lon_spacing
if (east - west) - 360.0 <= 1e-6:
sys.stderr.write("East-west wraparound, shrinking grid.\n")
n_lon -= 1
east -= lon_spacing
sys.stderr.write(
"Matrix dimensions {}x{}\n"
"Longitude spacing {:.9f}; eastmost grid error {:.9f}\n"
" Latitude spacing {:.9f}; northmost grid error {:.9f}\n"
.format(n_lon, n_lat,
lon_spacing, args.east - east,
lat_spacing, args.north - north))
lon = np.linspace(west, east, n_lon)
lat = np.linspace(south, north, n_lat)
mtx = np.zeros((n_lat, n_lon), dtype=np.float32)
# We save all the (adjusted) parameters from the command line
# so we can record them as metadata in the output file later.
self.resolution = args.resolution
self.fuzz = args.fuzz
self.north = north
self.south = south
self.west = west
self.east = east
self.lon_spacing = lon_spacing
self.lat_spacing = lat_spacing
# These are actually needed by process_geometry.
self.lon = lon
self.lat = lat
self.mtx = mtx
self.fuzz_deg = fuzz_degrees
self.geoms = []
def write_to(self, fname):
with tables.open_file(fname, 'w') as f:
M = f.create_carray(f.root, 'baseline',
tables.Atom.from_dtype(self.mtx.dtype),
self.mtx.shape,
filters=tables.Filters(complevel=6,
complib='zlib'))
M[:,:] = self.mtx[:,:]
M.attrs.resolution = self.resolution
M.attrs.fuzz = self.fuzz
M.attrs.north = self.north
M.attrs.south = self.south
M.attrs.east = self.east
M.attrs.west = self.west
M.attrs.lon_spacing = self.lon_spacing
M.attrs.lat_spacing = self.lat_spacing
M.attrs.longitudes = self.lon
M.attrs.latitudes = self.lat
# If you don't manually encode the strings, or if you use
# normal Python arrays, you get pickle barf in the file
# instead of a proper HDF vector-of-strings. I could
# combine these attributes into a record array, but this
# is simpler.
M.attrs.geom_names = np.array([ g[1].encode('utf-8')
for g in self.geoms ])
M.attrs.geom_senses = np.array([ g[0].encode('utf-8')
for g in self.geoms ])
# If you don't set a TITLE on M, the file is slightly out of
# spec and R's hdf5load() will segfault(!)
M.attrs.TITLE = "baseline"
def process_geometry(self, sense, geom):
assert sense == '+' or sense == '-'
name = os.path.splitext(os.path.basename(geom.name))[0]
self.geoms.append((sense, name))
sys.stderr.write("Processing {} (crs={})...\n"
.format(name, fiona.crs.to_string(geom.crs)))
# unary_union does not accept generators
inner_boundary = shapely.ops.unary_union([
shapely.geometry.shape(g['geometry'])
for g in geom])
# It is (marginally) more efficient to transform the inner
# boundary to the desired "raw WGS84 lat/long" coordinate
# system after combining it into one shape.
inner_boundary = shapely.ops.transform(
functools.partial(
pyproj.transform,
pyproj.Proj(geom.crs),
pyproj.Proj(proj="latlong", datum="WGS84", ellps="WGS84")),
inner_boundary)
outer_boundary = inner_boundary.buffer(self.fuzz_deg)
inner_boundary_p = shapely.prepared.prep(inner_boundary)
outer_boundary_p = shapely.prepared.prep(outer_boundary)
for i, x in enumerate(self.lon):
for j, y in enumerate(self.lat):
pt = shapely.geometry.Point(x, y)
if inner_boundary_p.contains(pt):
val = 1
elif not outer_boundary_p.contains(pt):
val = 0
else:
# in between
val = 1 - min(1, max(0,
pt.distance(inner_boundary)/self.fuzz_deg))
if sense == '+':
self.mtx[j,i] = min(1, self.mtx[j,i] + val)
else:
self.mtx[j,i] = max(0, self.mtx[j,i] - val)
def process(args):
matrix = GeographicMatrix(args)
for sense, geom in args.shapefile:
matrix.process_geometry(sense, geom)
matrix.write_to(args.output)
def main():
def shapefile(fname):
if not fname:
raise ArgumentTypeError("shapefile name cannot be empty")
if fname[0] == '+':
tag = '+'
fname = fname[1:]
else:
tag = '?'
try:
return (tag, fiona.open(fname, 'r'))
except FionaValueError as e:
raise ArgumentTypeError(
"%s: not a shapefile (%s)" % (fname, str(e)))
except OSError as e:
raise ArgumentTypeError(
"%s: cannot open (%s)" % (fname, e.strerror))
def fixup_shapefile_arg(n, arg):
if arg[0] != '?': return arg
if n == 0: return ('+', arg[1])
return ('-', arg[1])
ap = argparse.ArgumentParser(description=__doc__)
ap.add_argument('-s', '--south', type=float, default=-60,
help='Southmost latitude for the output matrix. '
'The default is -60, which is south of all major '
'landmasses except Antarctica.')
ap.add_argument('-n', '--north', type=float, default=84,
help='Northmost latitude for the output matrix. '
'The default is 84, which is north of all major '
'landmasses.')
ap.add_argument('-w', '--west', type=float, default=-180,
help='Westmost longitude for the output matrix. '
'The default is -180.')
ap.add_argument('-e', '--east', type=float, default=180,
help='Eastmost longitude for the output matrix. '
'The default is 180.')
ap.add_argument('-r', '--resolution', type=float, default=5000,
help='Grid resolution of the matrix, in meters at '
'the equator. The matrix is NOT projected, so its '
'east-west resolution closer to the poles will be finer.'
'The default is 5km.')
ap.add_argument('-f', '--fuzz', type=float, default=None,
help='Fuzz radius. Points outside the positive geometry '
'by less than this distance will have values between 0 and '
'1. The default is 1.5 times the resolution.')
ap.add_argument('-o', '--output', default=None,
help='Name of output file. The default is to use the '
'name of the first input shapefile, with a ".hdf" suffix.')
ap.add_argument('shapefile', type=shapefile, nargs='+',
help='Shapefiles to process. The first shapefile in the '
'list is always considered positive geometry; subsequent '
'shapefiles are negative geometry unless specified with a '
'leading "+" on the filename.')
args = ap.parse_args()
if not args.shapefile:
ap.error("at least one shapefile must be specified")
if not (-180 <= args.west < args.east <= 180):
ap.error("improper values for --west/--east")
if not (-90 <= args.south < args.north < 90):
ap.error("improper values for --south/--north")
args.shapefile = [fixup_shapefile_arg(n, arg)
for n, arg in enumerate(args.shapefile)]
if args.output is None:
args.output = os.path.splitext(args.shapefile[0][1].name)[0] + '.hdf'
if args.fuzz is None:
args.fuzz = args.resolution * 1.5
process(args)
main()
|
For a week-by-week collection of audio recordings, presentations and handouts, see below. This section provides links to resources which apply to the class as a whole. Check back as it may be updated throughout the course.
The Story of His Glory by Steven C. Hawthorne: An article tracing the principal themes of a missional reading of Scripture.
The Mission of God by Christopher J.H. Wright: A book-length treatment of the "missional reading of Scripture" on which this class is based. This is a lengthy (500+ page) but accessible resource for those interested in going deeper.
Xplore Curriculum a 7-week Bible study curriculum for individuals and small groups produced by the Center for Mission Mobilization. Some of the themes for our class were drawn from this curriculum.
Whatcom County Perspectives on the World Christian Movement - a 15-week course exploring God's mission in the world and our place in it. Begins January 11th, 2018 in Ferndale.
Class Description: This class will present an introduction to the concept of a “missional reading” of Scripture, along with specific ways this missional understanding has been worked out throughout church history. Guest speakers will present on their specific areas of expertise, and participants will engage with the material through lecture, large- and small-group discussion. It is expected that participants will gain a broader and deeper understanding of mission as a primary theme of Scripture, that they will be exposed to some key concepts of missiology and that they will begin to see themselves as participants in God’s ongoing mission to the world.
Speakers: Rebecca and Tim--former international directors of a missions organization focused on unreached people groups.
Dr. Rhonda McEwen shared with us the theological and practical implications of Christian involvement in relief and development work, along with a helpful way of viewing poverty not primarily as a lack of material goods but rather as a series of fundamentally broken relationships. Listen to the audio file while paging through the slides below. A link to the World Bank video she references is available beneath the slideshow.
The video referenced in Rhonda's talk is available for viewing by clicking here.
WEEK 3: The Nations at Our Doorstep - Guest Speakers Kristin H. and Jeff J.
Kristin and Jeff, campus pastors at Whatcom Community College, shared with us one of the ways in which the Big Story of God's missional heart is being played out in front of us today: ministry to international students, many of whom come from countries which are closed to missionaries or not culturally receptive to the gospel. They were both passionate and thoughtful about their callings as intercultural workers on a U.S. college campus.
Week 1 Handout: Includes scriptures and discussion questions related to a missional reading of Scripture.
The following video, produced by The Bible Project, gives an overview of what it means that humans are made in the "image of God," which in turn gives us a more holistic sense of our mission as image-bearers. |
#
# Copyright (C) 2010, 2011, 2012, 2014, 2015, 2016, 2019
# Smithsonian Astrophysical Observatory
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""Find the version numbers and release dates of the Chandra CALDB.
"""
import os
import copy
import socket
import time
from html.parser import HTMLParser
from urllib.request import urlopen
from urllib.error import URLError
import pycrates
from ciao_contrib import logger_wrapper as lw
__all__ = (
"check_caldb_version",
"get_caldb_dir",
"get_caldb_releases",
"get_caldb_installed",
"get_caldb_installed_version",
"get_caldb_installed_date"
)
lgr = lw.initialize_module_logger('caldb')
v3 = lgr.verbose3
release_notes_url = "https://cxc.cfa.harvard.edu/caldb/downloads/releasenotes.html"
def todate(txt):
"""Convert text to time.
This is intended to convert a time string, as reported by the
CALDB web pages, to a time object.
Parameters
----------
txt : str or None
The time string.
Returns
-------
val : time object or str or None
If the input can be converted to a time object then that
object is returned, otherwise the input is returned unchanged.
"""
if txt is None:
return None
try:
return time.strptime(txt + "UTC", "%Y-%m-%dT%H:%M:%S%Z")
except ValueError:
return txt
# The current version has the main data in a td with class=mainbar
# but this is hopefully going to change RSN for a div (as this comment
# was written a long time ago it obviously hasn't happened).
#
class CALDBReleaseParser(HTMLParser):
"""Extract relevant fields from the CIAO CALDB web page.
Parse the CALDB release-notes HTML page for
release information on the CALDB.
Raises
------
IOError
This can be caused if the result is empty (at least for the
CALDB release column); the page can not be parsed (e.g. it
does not match the expected contents). The error messages
are not 'user friendly' since they may reveal the internal
state of the object to make debugging easier.
Examples
--------
The use is, where h is a string containing the HTML to parse:
>>> p = CALDBReleaseParser()
>>> p.feed(h)
>>> p.close()
and then the p.releases field is a dictionary where the keys are
the various CALDB release types (e.g. CALDB, SDP, CIAO, and L3)
and the values are tuples of (version number, date) where the
version number is a string and the date is a time object.
"""
state = "need-table"
open_mode = {"need-table":
{"tag": "table",
"attribute": ("id", "caldbver"),
"newstate": "check-header"
},
}
close_mode = {"check-header": {"tag": "tr",
"newstate": "store-data"},
"store-data": {"tag": "table",
"newstate": "finished"}
}
store = []
row_store = None
item_store = None
def close(self):
HTMLParser.close(self)
if self.state != "finished":
raise IOError("incorrectly-nested tables; state={}".format(self.state))
st = self.store
def make(i):
return [(s[0], s[i]) for s in st if s[i] is not None]
self.releases = {"CALDB": make(1),
"SDP": make(2),
"CIAO": make(3),
"L3": make(4)}
if len(self.releases["CALDB"]) == 0:
raise IOError("No CALDB release information found!")
def handle_starttag(self, tag, attrs):
if self.state == "store-data":
# Could do the storing via a pseudo state machine as we do with
# finding the table, but just hard code the logic for now
#
if self.row_store is None:
if tag == "tr":
self.row_store = []
else:
raise IOError("A new row has started with the tag: {}".format(tag))
if tag == "td":
if self.item_store is None:
self.item_store = ""
else:
raise IOError("A new item has started but the item_store=[{}]".format(self.item_store))
return
if self.state not in self.open_mode:
return
tbl = self.open_mode[self.state]
if tag != tbl["tag"] or \
("attribute" in tbl and tbl["attribute"] not in attrs):
return
if "newstate" in tbl:
self.state = tbl["newstate"]
def handle_endtag(self, tag):
if self.state == "store-data":
if tag == "td":
item = self.item_store.strip()
if item.lower() in ["n/a", "not released publicly"]:
self.row_store.append(None)
else:
self.row_store.append(item)
self.item_store = None
elif tag == "tr":
r = self.row_store
if len(r) != 5:
raise IOError("Unable to parse row: {0}".format(r))
self.store.append((r[0],
todate(r[1]),
todate(r[2]),
todate(r[3]),
todate(r[4])))
self.row_store = None
if self.state not in self.close_mode:
return
tbl = self.close_mode[self.state]
if tag != tbl["tag"]:
return
self.state = tbl["newstate"]
def handle_data(self, data):
if self.state == "store-data" and self.item_store is not None:
ds = data.strip()
if ds is not None:
if self.item_store == "":
self.item_store = ds
else:
self.item_store += " {0}".format(ds)
def get_caldb_releases(timeout=None):
"""Return information on the CIAO CALDB releases.
Extracts the CIAO CALDB release history from the web page.
Parameters
----------
timeout : optional
The timeout option for the urlopen call. If not set then the
global Python timeout setting will be used. If given then it
is the maximum time to wait in seconds.
Returns
-------
releases : dict
The keys are the names "CALDB", "SDP", "CIAO" or "L3",
and the values are arrays of (version-string, date) tuples.
There is no guarantee that the lists are in descending order
of time or version number.
Raises
------
IOError
Many network errors are converted to an IOError with a
simple error message.
Notes
-----
This routine will only work if the computer is on-line and able to
access the Chandra CALDB pages at
https://cxc.cfa.harvard.edu/caldb/
This call turns off certificate validation for the requests since
there are issues with getting this working on all supported platforms.
It *only* does it for the calls it makes (i.e. it does not turn
off validation of any other requests).
The version-string is "1.2" or "4.2.2" and the date is a time
object.
"""
import ssl
# Note that the SSL context is explicitly
# set to stop verification, because the CIAO 4.12 release has
# seen some issues with certificate validation (in particular
# on Ubuntu and macOS systems).
#
context = ssl._create_unverified_context()
v3("About to download {}".format(release_notes_url))
try:
if timeout is None:
h = urlopen(release_notes_url, context=context)
else:
h = urlopen(release_notes_url, context=context, timeout=timeout)
except URLError as ue:
v3(" - failed with {}".format(ue))
# Probably excessive attempt to make a "nice" error message
#
if hasattr(ue, "reason"):
if hasattr(ue.reason, "errno") and \
ue.reason.errno == socket.EAI_NONAME:
raise IOError("Unable to reach the CALDB site - is the network down?")
else:
raise IOError("Unable to reach the CALDB site - {}".format(ue.reason))
elif hasattr(ue, "getcode"):
cval = ue.getcode()
if cval == 404:
raise IOError("The CALDB site appears to be unreachable.")
else:
raise IOError("The CALDB site returned {}".format(ue))
else:
raise IOError("Unable to access the CALDB site - {}".format(ue))
h = h.read().decode('utf-8')
try:
p = CALDBReleaseParser()
p.feed(h)
p.close()
except IOError:
raise IOError("Unable to parse the CALDB release table.")
# use a deep copy so that the parser can be cleaned up
# in case there's a lot of state squirreled away
# (although have not checked that it actually matters)
return copy.deepcopy(p.releases)
def get_caldb_dir():
"""Return the location of the CIAO CALDB.
Returns
-------
path : str
The location of the CIAO CALDB, as given by the CALDB
environment variable.
Raises
------
IOError
If the CALDB environment variable is not defined or does
not point to a directory.
"""
caldb = os.getenv("CALDB")
if caldb is None:
raise IOError("CALDB environment variable is not defined!")
elif not os.path.isdir(caldb):
raise IOError("CALDB directory does not exist: {}".format(caldb))
else:
return caldb
def get_caldb_installed(caldb=None):
"""What CIAO CALDB is installed (version and release date)?
Parameters
----------
caldb : str, optional
If set, the directory to search in, otherwise the
CALDB environment variable is used.
Returns
-------
version, date
The CIAO CALDB version, as a string, and the release date
of the version (as a date object) of the installed
CIAO CALDB.
See Also
--------
get_caldb_installed_date, get_caldb_installed_version
"""
if caldb is None:
caldb = get_caldb_dir()
fname = os.path.join(caldb,
"docs/chandra/caldb_version/caldb_version.fits")
cr = pycrates.TABLECrate(fname, mode='r')
for cname in ["CALDB_VER", "CALDB_DATE"]:
if not cr.column_exists(cname):
raise IOError("Unable to find the {} column in the CALDB version file.".format(cname))
cversion = pycrates.copy_colvals(cr, "CALDB_VER")[-1]
cdate = pycrates.copy_colvals(cr, "CALDB_DATE")[-1]
cversion = cversion.strip()
cdate = cdate.strip()
return (cversion, todate(cdate))
def get_caldb_installed_version(caldb=None):
"""What CIAO CALDB is installed (version)?
Parameters
----------
caldb : str, optional
If set, the directory to search in, otherwise the
CALDB environment variable is used.
Returns
-------
version : str
The CIAO CALDB version, as a string.
See Also
--------
get_caldb_installed, get_caldb_installed_date
"""
return get_caldb_installed(caldb)[0]
def get_caldb_installed_date(caldb=None):
"""What CIAO CALDB is installed (release date)?
Parameters
----------
caldb : str, optional
If set, the directory to search in, otherwise the
CALDB environment variable is used.
Returns
-------
date
The release date of the version (as a date object) of the
installed CIAO CALDB.
See Also
--------
get_caldb_installed, get_caldb_installed_version
"""
return get_caldb_installed(caldb)[1]
def version_to_tuple(version):
"""Convert CALDB version string to a tuple.
Parameters
----------
version : str
A CALDB version string like '4.4.10',
Returns
-------
version : tuple of int
The tuple of integers representing the input version. The
number of elements in the tuple depends on the input.
"""
toks = version.split('.')
try:
out = [int(t) for t in toks]
except ValueError:
raise ValueError("Invalid version string '{}'".format(version))
return tuple(out)
def check_caldb_version(version=None):
"""Is the locally-installed Chandra CALDB installation up-to-date?
The routine requires that the computer is on-line and able to
access the Chandra CALDB web site: https://cxc.harvard.edu/caldb/
Parameters
----------
version : str, optional
The version to compare to the latest released Chandra CALDB
(as obtained from https://cxc.harvard.edu/caldb/). If not
set then the version from the locally-installed Chandra CALDB
is used. The format for the string is integer values separated
by ".", such as "4.7.2".
Returns
-------
retval : None or (str, str)
If the installation is up to date then the routine returns
None, otherwise it returns the tuple
(version checked against, latest version).
Raises
------
IOError
If the version parameter is given but does not match a CALDB
release.
"""
# Converting the dotted string form (4.2.1 or 3.2.0.1) to
# a tuple of integers means we can just use Python's comparison
# operator and it handles cases like
#
# >>> (4,2,1) > (3,2,0,1)
# True
# >>> (4,2,2) > (4,2,2,2)
# False
#
# We are relying on the CALDB release numbers to be dotted
# integers, with no alphanumerics (i.e. not 4.4.1a)
#
rels = get_caldb_releases()
if version is None:
installed_ver = get_caldb_installed_version()
else:
installed_ver = version
# We check against the CALDB release numbers rather than
# the CALDB ones, since they form the "complete" set.
# Should perhaps be case insensitive but leave that for
# a later revision.
#
if version not in [v[0] for v in rels["CALDB"]]:
raise IOError("The input CALDB version '{}' is unknown!".format(version))
iver = version_to_tuple(installed_ver)
out = [v for (v, d) in rels["CIAO"] if version_to_tuple(v) > iver]
if out == []:
return
out.sort(reverse=True)
return (installed_ver, out[0])
# End
|
There are many things that happen whenever you are riding your bicycle out there. One of the common problems with a lot of bicycles is a flat tire. This makes biking time hard particularly if you do not have a pump. That is the reason why you need to purchase a bicycle tire pump in order to have a good biking experience. Modern bike pumps have super-efficient features and make life more comfortable. However, selecting the right bike bump can be troublesome. In this article, we have done some research and come up with the top 10 best bike pump in 2019.
Easy to Use Pressure Gauge: The best bike pump needs to have a readable pressure gauge for you to know the pressure amount you have inflated the tires. Look for the one that gives a precise and accurate reading.
Valve connection: A good pump is the one that can be able to connect common types of valves such as Schrader and Presta. The connection must be easy and does not course any leverage that damage tube or valve. In addition to that, consider a pump that has a flexible hose that prevents the valve from being pushed around.
Durability: Look for a durable pump that will serve you for a long time and will never go back to shops new product. The strong pump usually serves you perfectly when your bike has flat tire.
If you are an amateur who enjoys riding for fun or a professional rider, Raise Your Game Pump can cater for all your requirements. It’s an ultra-lightweight and compact bicycle tire pump that can be placed in your backpack or pocket, it also comes with a bicycle frame mount that will securely and conveniently attach it your bike. On top of that, it features a pressure gauge and High pressure up to 160 PSI that makes the pumping safe and easy.
Thus bike pump has Heavy-duty aluminum frame construction that makes it durable and can be used to inflate sports balls such as yoga ball, basketball ball, volleyball ball, soccer ball and more, markedly, the Bike Pump has an Innovative design for a firm grip to Presta and Schrader valves.
Forget about the flat tires now if you have NewMainone Portable bike pump with you on the go. With its 120 psi, maximum pressure enables you to pump up the tires with less effort compared to other traditional bike pumps. Besides that, this pump is compatible with both Presta and Schrader valves. The pump comes along with full bicycle repair sets such as 1 metal rasp. 2 nylon tire levers and ultra-strong glue-less tire patches.
This pump has a separate and flexible hose that would put fewer strains on the tire valve stems avoiding broken and bent stems. Last but not least, the pump is lightweight, powerful and durable.
Noble Cycling Bike Pump is an effective, light, portable, and robust, premium grade model made of aluminum alloy barrel. It is going to keep you and your friends riding on the road for many years. Including a reserve air tank that has a max pressure of 260 psi, it can release all the reserved pressure at ones. This allows you to seat on your tubeless tires without using an air compressor.
The included gauge enables you to read psi before releasing the air to seat the tire. On the other hand, the twin-valve head enables you to easily change from Schrader to Presta tires or Dunlop tires.
Topeak Joe is a powerful 160 psi high-quality floor pump that has a sturdy base that is a slip and break resistant. With its large 3 inches gauge that is mounted at the base makes it easier to read the pressure levels. Similarly, it is included with Ball and bladder heads allowing you to pump floatie, beach ball or sports ball. Having been harshly tested, you are certain to have a quality and long-lasting bike pump. This bike pump has an ergonomic rubber-padded handle for comfortable pumping.
This pump easily fits all types of valves including Schrader, Dunlop, and Presta valves. This Floor Bike Pump will keep the bike in tip-top condition all the time. If you have been working with bike pumps that offer inferior inflation, you need to try this one out.
Whenever you are looking for high quality and small bike pump, this Malker Bike Pump is right for you. It is almost weightless with the only 200g thus you can carry it with your bag or attach it on the bike frame. As a matter of facts, the pump barrel is made of mini Aluminum alloy making it ultra-durable and it is easy to carry.
Another great advantage of this bike pump is that it is easy to use and has up to 130 psi capacity allowing you to inflate your bike from a flat during the time of emergencies repair. The package comes with a Glueless Puncture Reparation Kit that will completely solve tire blew trouble.
This portable bike pump has a barrel that is constructed with Aluminum alloy. You will have the desire of safely carrying it in your carrier bag for you to have it readily at your convenience. With its Wide barrel design allow pushing of more air thus quickly inflate wide Mountain. On top of that, it’s designed for inflation of all kind of bike tires and its thin-barrel design provide less resistance.
This High Volume Bicycle Pump can likewise use to infiltrate other inflation jobs such as pumping up rafts, toys and sports balls with the assistance of the inflation cone and BONUS-ball needle included.
The AerGun X-1000 bike pump has a pressure gauge that has been combined to make sure that you have the preferred pressure in your tires. This pressure gauge is easy to use and this tire pump is ideal to be used in all bikes and other uses. You will love the superior performance of this pump and will fit the tires up to a pressure of 160 PSI.
The pump is engineered for high-performance bicycles and gives accurate tire pressure reading. This precision bike pump is long lasting and will last for years thus it is worth your money.
Whether you have a Hybrid mountain bike or Bmx, this bike pump will outfit your needs. It’s made with a superior quality aluminum alloy that gives it stunning design and finishes. On top of that, the pump is 8.75 inches long and only weighs 4.5oz, this makes it compact and durable. You can easily switch between Presta and Schrader valves, thanks to its innovative hose design.
This bike pump comes with a secure frame mount bracket that will keep it firm and it will never get lost when you are riding your bike. With its secure thread-on valve connection allows tight seal and there will be no air leaks.
It is a premium grade and handy floor drive pump made of durable heavy duty Aluminum. It is also ergonomically designed with a comfortable handle. On the other hand, with its accurate and extra-large gauge, you can easily read the pressure and has a Maximum Pressure 160 psi. It’s stable and durable with a newly designed valve head which you can easily switch between Schrader and Presta with no air leaks.
The long barrel allows more air that will be pushed through and this makes it faster and easier to pump. It does not end there because the pump comes with a long hose that has a 360-degree pivot for easy pumping.
This bike pump features a valve head that has been newly designed to enable it to switch faultlessly between valves without leaking air. It fits both Presta and Schrader valves with a simple switch. On top of that, the package includes a Glueless Puncture Kit that allows you to repair the Bike tires during the time of emergency. With its large accurate gauge, you will be able to measure the pressure PSI easily.
The pump infiltrates up to 160 PSI and it’s made with reinforced handle and strong steel barrel. After having this Floor Pump, you will never be frustrated again being left stranded with a flat tire.
The above top 10 best bike pumps reviews are the best options that you can find in the market currently. They offer the best performance, made of durable and lightweight materials for portability and are designed for ease of use. Go through each of them and choose the one that will fit your needs. |
#!/usr/bin/env python
from threaded_ssh import ThreadedClients
from ServerConfig import Tpcc
from ServerConfig import Kudu
from ServerConfig import TellStore
from ServerConfig import General
def hostToIp(host):
return General.infinibandIp[host]
def semicolonReduce(x, y):
return x + ';' + y
cmd = ""
if Tpcc.storage == Kudu:
cmd = "{0}/watch/tpcc/tpcc_kudu -H `hostname` -W {1} --network-threads 8 -s {2} -P {3}".format(Tpcc.builddir, Tpcc.warehouses, Kudu.master, len(Kudu.tservers)*4)
elif Tpcc.storage == TellStore:
Tpcc.rsyncBuild()
cmd = '{0}/watch/tpcc/tpcc_server -W {1} --network-threads 4 -c "{2}" -s "{3}"'.format(Tpcc.builddir, Tpcc.warehouses, General.infinibandIp[TellStore.commitmanager] + ":7242", reduce(semicolonReduce, map(lambda x: hostToIp(x) + ":7241", TellStore.servers)))
client0 = ThreadedClients(Tpcc.servers0, "numactl -m 0 -N 0 {0}".format(cmd), rnd_start=True, root=False)
client1 = ThreadedClients(Tpcc.servers1, "numactl -m 1 -N 1 {0} -p 8712".format(cmd), rnd_start=True, root=False)
client0.start()
client1.start()
client0.join()
client1.join()
|
twin beds to king bed platform popular of modern sized floating with storage or.
penguin organics baby apple kimono romper unisex green jazzy previously sale.
post it pop up notes dispenser 3 x iggy depression tour setlist.
roman photo photography shelton ct.
carhartt boot cut jeans petite cross flex scrub pants series 1889 bootcut.
adidas sleeveless shirt men tee turquoise underwear set salmon t.
ladies windbreaker true til death straight edge in walmart windbreakers.
kids wall bookshelf bookcase room curtains.
sg1 soccer announces partnership with titans uniforms.
spell porcelain century pipe with image of masons and how do you say doll in japanese. |
#!/usr/bin/python
import rospy
from sensor_msgs.msg import Joy
from axis_camera.msg import Axis
class Teleop:
def __init__(self):
rospy.init_node('axis_ptz_teleop')
self.enable_button = rospy.get_param('~enable_button', 1)
self.axis_pan = rospy.get_param('~axis_pan', 0)
self.axis_tilt = rospy.get_param('~axis_tilt', 1)
self.state = Axis(pan=220)
self.joy = None
self.pub = rospy.Publisher('cmd', Axis)
rospy.Subscriber("joy", Joy, self.joy_callback)
# rospy.Subscriber("state", Axis, self.state_callback)
def spin(self):
self.state.brightness = 5000
self.pub.publish(self.state)
r = rospy.Rate(5)
while not rospy.is_shutdown():
if self.joy != None and self.joy.buttons[self.enable_button] == 1:
#and (rospy.Time.now() - self.joy.header.stamp).to_sec() < 0.2:
self.state.pan += self.joy.axes[axis_pan]*5
self.state.tilt += self.joy.axes[axis_tilt]*5
if self.state.tilt > 85: self.state.tilt = 85
if self.state.tilt < 0: self.state.tilt = 0
self.pub.publish(self.state)
r.sleep()
def joy_callback(self, data):
self.joy = data
if __name__ == "__main__": Teleop().spin()
|
Decommissioning refers to the process of ending oil and gas operation at the offshore platform keeping in account the risk and safety of the offshore environment. This process carried out when oil production from the well decreases substantially. Offshore decommissioning must be done cost efficiently, as it comprises of risks and uncertainties, creating challenges for decommissioning projects.
The major drivers of offshore decommissioning market are infrastructure aging and maturing oilfields especially in North Sea and Gulf of Mexico. Decline in crude oil prices are estimated to augment the growth of the oilfields decommissioning market. Risk involved and high cost for decommissioning are some of the factors restraining the offshore decommissioning market growth.
Among the decommissioning process, the well plugging and abandonment accounted to have the largest share in offshore decommissioning market. Shallow water depth segment is accounted to have the largest growth in offshore decommissioning market, due to its less expenditure in shallow water. New projects are established in deep & ultra-deep waters over the last few decades emphasizing the growth of offshore decommissioning market.
Europe is projected to be the fastest growing market in offshore decommissioning, owing to the maturing oilfields and high spending in North Sea and UK. The ceasing of major oilfields and excellent regulatory framework is ensuring high pace of offshore decommissioning market in Europe. Oil and Gas Authority, in U.K., is expected to invest around US$ 71 billion for safe decommissioning.
North America is accounted to be the second largest market in offshore decommissioning, owing to maturing oilfields in Gulf of Mexico with large number of platforms decommissioning every year.
Asia Pacific is witnessed to have a significant growth rate in offshore decommissioning market, owing to rise of aging oilfields. The decommissioning opportunities in Indonesia and Malaysia is anticipated to enhance the offshore decommissioning market in this region. Middle East offshore decommissioning market is projected to have a significant growth, due to its prolonged oil and gas operations and production activities.
The U.S assembled The Outer Continental Shelf Lands Act (OCSLA) and regulations establishing decommissioning obligations, which must be fulfilled by the operator during the signing of an offshore lease, including the requirement of approval and permit to remove the platform.
The OCSLA regulations administrated by Bureau of Safety and Environment Enforcement (BSEE) demands that the operator must obtain approval through an application prior to the removal of platform. |
# Copyright (c) 2014 NetApp, Inc.
# Copyright (c) 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the instance module."""
import os
import time
import ddt
import mock
import netaddr
from oslo_config import cfg
from oslo_utils import importutils
import six
from manila import exception
from manila.share import configuration
from manila.share import driver # noqa
from manila.share.drivers import service_instance
from manila import test
from manila.tests import fake_compute
from manila.tests import fake_network
from manila.tests import utils as test_utils
CONF = cfg.CONF
def fake_get_config_option(key):
if key == 'driver_handles_share_servers':
return True
elif key == 'service_instance_password':
return None
elif key == 'service_instance_user':
return 'fake_user'
elif key == 'service_network_name':
return 'fake_service_network_name'
elif key == 'service_instance_flavor_id':
return 100
elif key == 'service_instance_name_template':
return 'fake_manila_service_instance_%s'
elif key == 'service_image_name':
return 'fake_service_image_name'
elif key == 'manila_service_keypair_name':
return 'fake_manila_service_keypair_name'
elif key == 'path_to_private_key':
return 'fake_path_to_private_key'
elif key == 'path_to_public_key':
return 'fake_path_to_public_key'
elif key == 'max_time_to_build_instance':
return 500
elif key == 'connect_share_server_to_tenant_network':
return False
elif key == 'service_network_cidr':
return '99.254.0.0/24'
elif key == 'service_network_division_mask':
return 27
elif key == 'service_network_name':
return 'fake_service_network_name'
elif key == 'interface_driver':
return 'i.am.fake.VifDriver'
elif key == 'admin_network_id':
return None
elif key == 'admin_subnet_id':
return None
else:
return mock.Mock()
class FakeServiceInstance(object):
def __init__(self, driver_config=None):
super(FakeServiceInstance, self).__init__()
self.compute_api = service_instance.compute.API()
self.admin_context = service_instance.context.get_admin_context()
self.driver_config = driver_config
def get_config_option(self, key):
return fake_get_config_option(key)
class FakeNetworkHelper(service_instance.BaseNetworkhelper):
@property
def NAME(self):
return service_instance.NEUTRON_NAME
@property
def neutron_api(self):
if not hasattr(self, '_neutron_api'):
self._neutron_api = mock.Mock()
return self._neutron_api
def __init__(self, service_instance_manager):
self.get_config_option = service_instance_manager.get_config_option
def get_network_name(self, network_info):
"""Return name of network."""
return 'fake_network_name'
def setup_connectivity_with_service_instances(self):
"""Nothing to do in fake network helper."""
def setup_network(self, network_info):
"""Combine fake network data."""
return dict()
def teardown_network(self, server_details):
"""Nothing to do in fake network helper."""
@ddt.ddt
class ServiceInstanceManagerTestCase(test.TestCase):
"""Test suite for service instance manager."""
def setUp(self):
super(ServiceInstanceManagerTestCase, self).setUp()
self.instance_id = 'fake_instance_id'
self.config = configuration.Configuration(None)
self.config.safe_get = mock.Mock(side_effect=fake_get_config_option)
self.mock_object(service_instance.compute, 'API', fake_compute.API)
self.mock_object(
service_instance.os.path, 'exists', mock.Mock(return_value=True))
self.mock_object(service_instance, 'NeutronNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
self._manager = service_instance.ServiceInstanceManager(self.config)
self._manager._execute = mock.Mock(return_value=('', ''))
self.mock_object(time, 'sleep')
def test_get_config_option_from_driver_config(self):
username1 = 'fake_username_1_%s' % self.id()
username2 = 'fake_username_2_%s' % self.id()
config_data = dict(
DEFAULT=dict(service_instance_user=username1),
CUSTOM=dict(service_instance_user=username2))
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(
service_instance.common_opts, config_group='CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
result = self._manager.get_config_option('service_instance_user')
self.assertEqual(username2, result)
def test_get_config_option_from_common_config(self):
username = 'fake_username_%s' % self.id()
config_data = dict(DEFAULT=dict(service_instance_user=username))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
result = self._manager.get_config_option('service_instance_user')
self.assertEqual(username, result)
def test_get_neutron_network_helper(self):
# Mock it again, because it was called in setUp method.
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(DEFAULT=dict(service_instance_user='fake_username',
driver_handles_share_servers=True))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
self._manager.network_helper
service_instance.NeutronNetworkHelper.assert_called_once_with(
self._manager)
def test_init_with_driver_config_and_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(CUSTOM=dict(
driver_handles_share_servers=True,
service_instance_user='fake_user'))
opts = service_instance.common_opts + driver.share_opts
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(opts, 'CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
self.assertTrue(
self._manager.get_config_option("driver_handles_share_servers"))
self.assertIsNotNone(self._manager.driver_config)
self.assertTrue(hasattr(self._manager, 'network_helper'))
self.assertTrue(service_instance.NeutronNetworkHelper.called)
def test_init_with_driver_config_and_wo_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(CUSTOM=dict(
driver_handles_share_servers=False,
service_instance_user='fake_user'))
opts = service_instance.common_opts + driver.share_opts
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(opts, 'CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
self.assertIsNotNone(self._manager.driver_config)
self.assertFalse(hasattr(self._manager, 'network_helper'))
self.assertFalse(service_instance.NeutronNetworkHelper.called)
def test_init_with_common_config_and_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(DEFAULT=dict(
service_instance_user='fake_username',
driver_handles_share_servers=True))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
self.assertTrue(
self._manager.get_config_option("driver_handles_share_servers"))
self.assertIsNone(self._manager.driver_config)
self.assertTrue(hasattr(self._manager, 'network_helper'))
self.assertTrue(service_instance.NeutronNetworkHelper.called)
def test_init_with_common_config_and_wo_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(DEFAULT=dict(
service_instance_user='fake_username',
driver_handles_share_servers=False))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
self.assertEqual(
False,
self._manager.get_config_option("driver_handles_share_servers"))
self.assertIsNone(self._manager.driver_config)
self.assertFalse(hasattr(self._manager, 'network_helper'))
self.assertFalse(service_instance.NeutronNetworkHelper.called)
def test_no_service_user_defined(self):
group_name = 'GROUP_%s' % self.id()
config_data = {group_name: dict()}
with test_utils.create_temp_config_with_opts(config_data):
config = configuration.Configuration(
service_instance.common_opts, config_group=group_name)
self.assertRaises(
exception.ServiceInstanceException,
service_instance.ServiceInstanceManager, config)
def test_get_service_instance_name_using_driver_config(self):
fake_server_id = 'fake_share_server_id_%s' % self.id()
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(CUSTOM=dict(
driver_handles_share_servers=True,
service_instance_user='fake_user'))
opts = service_instance.common_opts + driver.share_opts
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(opts, 'CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
result = self._manager._get_service_instance_name(fake_server_id)
self.assertIsNotNone(self._manager.driver_config)
self.assertEqual(
self._manager.get_config_option(
"service_instance_name_template") % "%s_%s" % (
self._manager.driver_config.config_group, fake_server_id),
result)
self.assertTrue(
self._manager.get_config_option("driver_handles_share_servers"))
self.assertTrue(hasattr(self._manager, 'network_helper'))
self.assertTrue(service_instance.NeutronNetworkHelper.called)
def test_get_service_instance_name_using_default_config(self):
fake_server_id = 'fake_share_server_id_%s' % self.id()
config_data = dict(CUSTOM=dict(
service_instance_user='fake_user'))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
result = self._manager._get_service_instance_name(fake_server_id)
self.assertIsNone(self._manager.driver_config)
self.assertEqual(
self._manager.get_config_option(
"service_instance_name_template") % fake_server_id, result)
def test__check_server_availability_available_from_start(self):
fake_server = dict(id='fake_server', ip='127.0.0.1')
self.mock_object(service_instance.socket.socket, 'connect')
self.mock_object(service_instance.time, 'sleep')
self.mock_object(service_instance.time, 'time',
mock.Mock(return_value=0))
result = self._manager._check_server_availability(fake_server)
self.assertTrue(result)
service_instance.socket.socket.connect.assert_called_once_with(
(fake_server['ip'], 22))
service_instance.time.time.assert_has_calls([
mock.call(), mock.call()])
service_instance.time.time.assert_has_calls([])
@ddt.data(True, False)
def test__check_server_availability_with_recall(self, is_ok):
fake_server = dict(id='fake_server', ip='fake_ip_address')
self.fake_time = 0
def fake_connect(addr):
if not(is_ok and self.fake_time > 1):
raise service_instance.socket.error
def fake_time():
return self.fake_time
def fake_sleep(time):
self.fake_time += 5
self.mock_object(service_instance.time, 'sleep',
mock.Mock(side_effect=fake_sleep))
self.mock_object(service_instance.socket.socket, 'connect',
mock.Mock(side_effect=fake_connect))
self.mock_object(service_instance.time, 'time',
mock.Mock(side_effect=fake_time))
self._manager.max_time_to_build_instance = 6
result = self._manager._check_server_availability(fake_server)
if is_ok:
self.assertTrue(result)
else:
self.assertFalse(result)
service_instance.socket.socket.connect.assert_has_calls([
mock.call((fake_server['ip'], 22)),
mock.call((fake_server['ip'], 22))])
service_instance.time.time.assert_has_calls([
mock.call(), mock.call(), mock.call()])
service_instance.time.time.assert_has_calls([mock.call()])
def test_get_server_ip_found_in_networks_section(self):
ip = '10.0.0.1'
net_name = self._manager.get_config_option('service_network_name')
fake_server = dict(networks={net_name: [ip]})
result = self._manager._get_server_ip(fake_server, net_name)
self.assertEqual(ip, result)
def test_get_server_ip_found_in_addresses_section(self):
ip = '10.0.0.1'
net_name = self._manager.get_config_option('service_network_name')
fake_server = dict(addresses={net_name: [dict(addr=ip, version=4)]})
result = self._manager._get_server_ip(fake_server, net_name)
self.assertEqual(ip, result)
@ddt.data(
{},
{'networks': {fake_get_config_option('service_network_name'): []}},
{'addresses': {fake_get_config_option('service_network_name'): []}})
def test_get_server_ip_not_found(self, data):
self.assertRaises(
exception.ManilaException,
self._manager._get_server_ip, data,
fake_get_config_option('service_network_name'))
def test_security_group_name_not_specified(self):
self.mock_object(self._manager, 'get_config_option',
mock.Mock(return_value=None))
result = self._manager._get_or_create_security_group(
self._manager.admin_context)
self.assertIsNone(result)
self._manager.get_config_option.assert_called_once_with(
'service_instance_security_group')
def test_security_group_name_from_config_and_sg_exist(self):
name = "fake_sg_name_from_config"
desc = "fake_sg_description"
fake_secgroup = {'id': 'fake_sg_id', 'name': name, 'description': desc}
self.mock_object(self._manager, 'get_config_option',
mock.Mock(return_value=name))
neutron_api = self._manager.network_helper.neutron_api
neutron_api.security_group_list.return_value = {
'security_groups': [fake_secgroup]}
result = self._manager._get_or_create_security_group(
self._manager.admin_context)
self.assertEqual(fake_secgroup, result)
self._manager.get_config_option.assert_called_once_with(
'service_instance_security_group')
neutron_api.security_group_list.assert_called_once_with({"name": name})
@ddt.data(None, 'fake_name')
def test_security_group_creation_with_name_from_config(self, name):
config_name = "fake_sg_name_from_config"
desc = "fake_sg_description"
fake_secgroup = {'id': 'fake_sg_id', 'name': name, 'description': desc}
self.mock_object(self._manager, 'get_config_option',
mock.Mock(return_value=name or config_name))
neutron_api = self._manager.network_helper.neutron_api
neutron_api.security_group_list.return_value = {'security_groups': []}
neutron_api.security_group_create.return_value = {
'security_group': fake_secgroup,
}
result = self._manager._get_or_create_security_group(
context=self._manager.admin_context,
name=name,
description=desc,
)
self.assertEqual(fake_secgroup, result)
if not name:
self._manager.get_config_option.assert_called_once_with(
'service_instance_security_group')
neutron_api.security_group_list.assert_called_once_with(
{"name": name or config_name})
neutron_api.security_group_create.assert_called_once_with(
name or config_name, desc)
def test_security_group_two_sg_in_list(self):
name = "fake_name"
fake_secgroup1 = {'id': 'fake_sg_id1', 'name': name}
fake_secgroup2 = {'id': 'fake_sg_id2', 'name': name}
neutron_api = self._manager.network_helper.neutron_api
neutron_api.security_group_list.return_value = {
'security_groups': [fake_secgroup1, fake_secgroup2]}
self.assertRaises(exception.ServiceInstanceException,
self._manager._get_or_create_security_group,
self._manager.admin_context,
name)
neutron_api.security_group_list.assert_called_once_with(
{"name": name})
@ddt.data(
dict(),
dict(service_port_id='fake_service_port_id'),
dict(public_port_id='fake_public_port_id'),
dict(service_port_id='fake_service_port_id',
public_port_id='fake_public_port_id'),
)
def test_set_up_service_instance(self, update_data):
fake_network_info = {'foo': 'bar', 'server_id': 'fake_server_id'}
fake_server = {
'id': 'fake', 'ip': '1.2.3.4', 'public_address': '1.2.3.4',
'pk_path': None, 'subnet_id': 'fake-subnet-id',
'router_id': 'fake-router-id',
'username': self._manager.get_config_option(
'service_instance_user'),
'admin_ip': 'admin_ip'}
fake_server.update(update_data)
expected_details = fake_server.copy()
expected_details.pop('pk_path')
expected_details['instance_id'] = expected_details.pop('id')
self.mock_object(self._manager, '_create_service_instance',
mock.Mock(return_value=fake_server))
self.mock_object(self._manager, '_check_server_availability')
result = self._manager.set_up_service_instance(
self._manager.admin_context, fake_network_info)
self._manager._create_service_instance.assert_called_once_with(
self._manager.admin_context,
fake_network_info['server_id'], fake_network_info)
self._manager._check_server_availability.assert_called_once_with(
expected_details)
self.assertEqual(expected_details, result)
def test_set_up_service_instance_not_available(self):
fake_network_info = {'foo': 'bar', 'server_id': 'fake_server_id'}
fake_server = {
'id': 'fake', 'ip': '1.2.3.4', 'public_address': '1.2.3.4',
'pk_path': None, 'subnet_id': 'fake-subnet-id',
'router_id': 'fake-router-id',
'username': self._manager.get_config_option(
'service_instance_user'),
'admin_ip': 'admin_ip'}
expected_details = fake_server.copy()
expected_details.pop('pk_path')
expected_details['instance_id'] = expected_details.pop('id')
self.mock_object(self._manager, '_create_service_instance',
mock.Mock(return_value=fake_server))
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=False))
result = self.assertRaises(
exception.ServiceInstanceException,
self._manager.set_up_service_instance,
self._manager.admin_context, fake_network_info)
self.assertTrue(hasattr(result, 'detail_data'))
self.assertEqual(
{'server_details': expected_details}, result.detail_data)
self._manager._create_service_instance.assert_called_once_with(
self._manager.admin_context,
fake_network_info['server_id'], fake_network_info)
self._manager._check_server_availability.assert_called_once_with(
expected_details)
def test_ensure_server(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
fake_server = fake_compute.FakeServer()
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=fake_server))
result = self._manager.ensure_service_instance(
self._manager.admin_context, server_details)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_details['instance_id'])
self._manager._check_server_availability.assert_called_once_with(
server_details)
self.assertTrue(result)
def test_ensure_server_not_exists(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(side_effect=exception.InstanceNotFound(
instance_id=server_details['instance_id'])))
result = self._manager.ensure_service_instance(
self._manager.admin_context, server_details)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_details['instance_id'])
self.assertFalse(self._manager._check_server_availability.called)
self.assertFalse(result)
def test_ensure_server_exception(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(side_effect=exception.ManilaException))
self.assertRaises(exception.ManilaException,
self._manager.ensure_service_instance,
self._manager.admin_context,
server_details)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_details['instance_id'])
self.assertFalse(self._manager._check_server_availability.called)
def test_ensure_server_non_active(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
fake_server = fake_compute.FakeServer(status='ERROR')
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=fake_server))
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
result = self._manager.ensure_service_instance(
self._manager.admin_context, server_details)
self.assertFalse(self._manager._check_server_availability.called)
self.assertFalse(result)
def test_ensure_server_no_instance_id(self):
# Tests that we avoid a KeyError if the share details don't have an
# instance_id key set (so we can't find the share instance).
self.assertFalse(self._manager.ensure_service_instance(
self._manager.admin_context, {'ip': '1.2.3.4'}))
def test_get_key_create_new(self):
keypair_name = self._manager.get_config_option(
'manila_service_keypair_name')
fake_keypair = fake_compute.FakeKeypair(name=keypair_name)
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[]))
self.mock_object(self._manager.compute_api, 'keypair_import',
mock.Mock(return_value=fake_keypair))
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual(
(fake_keypair.name,
os.path.expanduser(self._manager.get_config_option(
'path_to_private_key'))),
result)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.keypair_import.assert_called_once_with(
self._manager.admin_context, keypair_name, '')
def test_get_key_exists(self):
fake_keypair = fake_compute.FakeKeypair(
name=self._manager.get_config_option(
'manila_service_keypair_name'),
public_key='fake_public_key')
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[fake_keypair]))
self.mock_object(self._manager.compute_api, 'keypair_import',
mock.Mock(return_value=fake_keypair))
self.mock_object(self._manager, '_execute',
mock.Mock(return_value=('fake_public_key', '')))
result = self._manager._get_key(self._manager.admin_context)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
self.assertFalse(self._manager.compute_api.keypair_import.called)
self.assertEqual(
(fake_keypair.name,
os.path.expanduser(self._manager.get_config_option(
'path_to_private_key'))),
result)
def test_get_key_exists_recreate(self):
fake_keypair = fake_compute.FakeKeypair(
name=self._manager.get_config_option(
'manila_service_keypair_name'),
public_key='fake_public_key1')
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[fake_keypair]))
self.mock_object(self._manager.compute_api, 'keypair_import',
mock.Mock(return_value=fake_keypair))
self.mock_object(self._manager.compute_api, 'keypair_delete')
self.mock_object(self._manager, '_execute',
mock.Mock(return_value=('fake_public_key2', '')))
result = self._manager._get_key(self._manager.admin_context)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.keypair_delete.assert_called_once_with(
self._manager.admin_context, fake_keypair.id)
self._manager.compute_api.keypair_import.assert_called_once_with(
self._manager.admin_context, fake_keypair.name, 'fake_public_key2')
self.assertEqual(
(fake_keypair.name,
os.path.expanduser(self._manager.get_config_option(
'path_to_private_key'))),
result)
def test_get_key_more_than_one_exist(self):
fake_keypair = fake_compute.FakeKeypair(
name=self._manager.get_config_option(
'manila_service_keypair_name'),
public_key='fake_public_key1')
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[fake_keypair, fake_keypair]))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._get_key, self._manager.admin_context)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
def test_get_key_keypath_to_public_not_set(self):
self._manager.path_to_public_key = None
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_key_keypath_to_private_not_set(self):
self._manager.path_to_private_key = None
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_key_incorrect_keypath_to_public(self):
def exists_side_effect(path):
return False if path == 'fake_path' else True
self._manager.path_to_public_key = 'fake_path'
os_path_exists_mock = mock.Mock(side_effect=exists_side_effect)
with mock.patch.object(os.path, 'exists', os_path_exists_mock):
with mock.patch.object(os.path, 'expanduser',
mock.Mock(side_effect=lambda value: value)):
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_key_incorrect_keypath_to_private(self):
def exists_side_effect(path):
return False if path == 'fake_path' else True
self._manager.path_to_private_key = 'fake_path'
os_path_exists_mock = mock.Mock(side_effect=exists_side_effect)
with mock.patch.object(os.path, 'exists', os_path_exists_mock):
with mock.patch.object(os.path, 'expanduser',
mock.Mock(side_effect=lambda value: value)):
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_service_image(self):
fake_image1 = fake_compute.FakeImage(
name=self._manager.get_config_option('service_image_name'),
status='active')
fake_image2 = fake_compute.FakeImage(
name='service_image_name',
status='error')
fake_image3 = fake_compute.FakeImage(
name='another-image',
status='active')
self.mock_object(self._manager.compute_api, 'image_list',
mock.Mock(return_value=[fake_image1,
fake_image2,
fake_image3]))
result = self._manager._get_service_image(self._manager.admin_context)
self.assertEqual(fake_image1.id, result)
def test_get_service_image_not_found(self):
self.mock_object(self._manager.compute_api, 'image_list',
mock.Mock(return_value=[]))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._get_service_image, self._manager.admin_context)
fake_error_image = fake_compute.FakeImage(
name='service_image_name',
status='error')
self.mock_object(self._manager.compute_api, 'image_list',
mock.Mock(return_value=[fake_error_image]))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._get_service_image, self._manager.admin_context)
def test_get_service_image_ambiguous(self):
fake_image = fake_compute.FakeImage(
name=fake_get_config_option('service_image_name'),
status='active')
fake_images = [fake_image, fake_image]
self.mock_object(self._manager.compute_api, 'image_list',
mock.Mock(return_value=fake_images))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._get_service_image, self._manager.admin_context)
def test__delete_server_not_found(self):
self.mock_object(self._manager.compute_api, 'server_delete')
self.mock_object(
self._manager.compute_api, 'server_get',
mock.Mock(side_effect=exception.InstanceNotFound(
instance_id=self.instance_id)))
self._manager._delete_server(
self._manager.admin_context, self.instance_id)
self.assertFalse(self._manager.compute_api.server_delete.called)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, self.instance_id)
def test__delete_server(self):
def fake_server_get(*args, **kwargs):
ctx = args[0]
if not hasattr(ctx, 'called'):
ctx.called = True
return
else:
raise exception.InstanceNotFound(instance_id=self.instance_id)
self.mock_object(self._manager.compute_api, 'server_delete')
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(side_effect=fake_server_get))
self._manager._delete_server(
self._manager.admin_context, self.instance_id)
self._manager.compute_api.server_delete.assert_called_once_with(
self._manager.admin_context, self.instance_id)
self._manager.compute_api.server_get.assert_has_calls([
mock.call(self._manager.admin_context, self.instance_id),
mock.call(self._manager.admin_context, self.instance_id)])
def test__delete_server_found_always(self):
self.fake_time = 0
def fake_time():
return self.fake_time
def fake_sleep(time):
self.fake_time += 1
self.mock_object(self._manager.compute_api, 'server_delete')
self.mock_object(self._manager.compute_api, 'server_get')
self.mock_object(service_instance, 'time')
self.mock_object(
service_instance.time, 'time', mock.Mock(side_effect=fake_time))
self.mock_object(
service_instance.time, 'sleep', mock.Mock(side_effect=fake_sleep))
self.mock_object(self._manager, 'max_time_to_build_instance', 2)
self.assertRaises(
exception.ServiceInstanceException, self._manager._delete_server,
self._manager.admin_context, self.instance_id)
self._manager.compute_api.server_delete.assert_called_once_with(
self._manager.admin_context, self.instance_id)
service_instance.time.sleep.assert_has_calls(
[mock.call(mock.ANY) for i in range(2)])
service_instance.time.time.assert_has_calls(
[mock.call() for i in range(4)])
self._manager.compute_api.server_get.assert_has_calls(
[mock.call(self._manager.admin_context,
self.instance_id) for i in range(3)])
def test_delete_service_instance(self):
fake_server_details = dict(
router_id='foo', subnet_id='bar', instance_id='quuz')
self.mock_object(self._manager, '_delete_server')
self.mock_object(self._manager.network_helper, 'teardown_network')
self._manager.delete_service_instance(
self._manager.admin_context, fake_server_details)
self._manager._delete_server.assert_called_once_with(
self._manager.admin_context, fake_server_details['instance_id'])
self._manager.network_helper.teardown_network.assert_called_once_with(
fake_server_details)
@ddt.data(
*[{'service_config': service_config,
'tenant_config': tenant_config,
'server': server}
for service_config, tenant_config in (
('fake_net_s', 'fake_net_t'),
('fake_net_s', '12.34.56.78'),
('98.76.54.123', 'fake_net_t'),
('98.76.54.123', '12.34.56.78'))
for server in (
{'networks': {
'fake_net_s': ['foo', '98.76.54.123', 'bar'],
'fake_net_t': ['baar', '12.34.56.78', 'quuz']}},
{'addresses': {
'fake_net_s': [
{'addr': 'fake1'},
{'addr': '98.76.54.123'},
{'addr': 'fake2'}],
'fake_net_t': [
{'addr': 'fake3'},
{'addr': '12.34.56.78'},
{'addr': 'fake4'}],
}})])
@ddt.unpack
def test_get_common_server_valid_cases(self, service_config,
tenant_config, server):
self._get_common_server(service_config, tenant_config, server,
'98.76.54.123', '12.34.56.78', True)
@ddt.data(
*[{'service_config': service_config,
'tenant_config': tenant_config,
'server': server}
for service_config, tenant_config in (
('fake_net_s', 'fake'),
('fake', 'fake_net_t'),
('fake', 'fake'),
('98.76.54.123', '12.12.12.1212'),
('12.12.12.1212', '12.34.56.78'),
('12.12.12.1212', '12.12.12.1212'),
('1001::1001', '1001::100G'),
('1001::10G1', '1001::1001'),
)
for server in (
{'networks': {
'fake_net_s': ['foo', '98.76.54.123', 'bar'],
'fake_net_t': ['baar', '12.34.56.78', 'quuz']}},
{'addresses': {
'fake_net_s': [
{'addr': 'fake1'},
{'addr': '98.76.54.123'},
{'addr': 'fake2'}],
'fake_net_t': [
{'addr': 'fake3'},
{'addr': '12.34.56.78'},
{'addr': 'fake4'}],
}})])
@ddt.unpack
def test_get_common_server_invalid_cases(self, service_config,
tenant_config, server):
self._get_common_server(service_config, tenant_config, server,
'98.76.54.123', '12.34.56.78', False)
@ddt.data(
*[{'service_config': service_config,
'tenant_config': tenant_config,
'server': server}
for service_config, tenant_config in (
('fake_net_s', '1001::1002'),
('1001::1001', 'fake_net_t'),
('1001::1001', '1001::1002'))
for server in (
{'networks': {
'fake_net_s': ['foo', '1001::1001'],
'fake_net_t': ['bar', '1001::1002']}},
{'addresses': {
'fake_net_s': [{'addr': 'foo'}, {'addr': '1001::1001'}],
'fake_net_t': [{'addr': 'bar'}, {'addr': '1001::1002'}]}})])
@ddt.unpack
def test_get_common_server_valid_ipv6_address(self, service_config,
tenant_config, server):
self._get_common_server(service_config, tenant_config, server,
'1001::1001', '1001::1002', True)
def _get_common_server(self, service_config, tenant_config,
server, service_address, network_address,
is_valid=True):
fake_instance_id = 'fake_instance_id'
fake_user = 'fake_user'
fake_pass = 'fake_pass'
fake_server = {'id': fake_instance_id}
fake_server.update(server)
expected = {
'backend_details': {
'username': fake_user,
'password': fake_pass,
'pk_path': self._manager.path_to_private_key,
'ip': service_address,
'public_address': network_address,
'instance_id': fake_instance_id,
}
}
def fake_get_config_option(attr):
if attr == 'service_net_name_or_ip':
return service_config
elif attr == 'tenant_net_name_or_ip':
return tenant_config
elif attr == 'service_instance_name_or_id':
return fake_instance_id
elif attr == 'service_instance_user':
return fake_user
elif attr == 'service_instance_password':
return fake_pass
else:
raise exception.ManilaException("Wrong test data provided.")
self.mock_object(
self._manager.compute_api, 'server_get_by_name_or_id',
mock.Mock(return_value=fake_server))
self.mock_object(
self._manager, 'get_config_option',
mock.Mock(side_effect=fake_get_config_option))
if is_valid:
actual = self._manager.get_common_server()
self.assertEqual(expected, actual)
else:
self.assertRaises(
exception.ManilaException,
self._manager.get_common_server)
self.assertTrue(
self._manager.compute_api.server_get_by_name_or_id.called)
def test___create_service_instance_with_sg_success(self):
self.mock_object(service_instance, 'NeutronNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
config_data = dict(DEFAULT=dict(
driver_handles_share_servers=True,
service_instance_user='fake_user'))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
server_create = dict(id='fakeid', status='CREATING', networks=dict())
net_name = self._manager.get_config_option("service_network_name")
sg = {'id': 'fakeid', 'name': 'fakename'}
ip_address = 'fake_ip_address'
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = dict()
network_data = {'nics': ['fake_nic1', 'fake_nic2']}
network_data['router'] = dict(id='fake_router_id')
server_get = dict(
id='fakeid', status='ACTIVE', networks={net_name: [ip_address]})
network_data.update(dict(
router_id='fake_router_id', subnet_id='fake_subnet_id',
public_port=dict(id='fake_public_port',
fixed_ips=[dict(ip_address=ip_address)]),
service_port=dict(id='fake_service_port',
fixed_ips=[{'ip_address': ip_address}]),
admin_port={'id': 'fake_admin_port',
'fixed_ips': [{'ip_address': ip_address}]}))
self.mock_object(service_instance.time, 'time',
mock.Mock(return_value=5))
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager.network_helper, 'get_network_name',
mock.Mock(return_value=net_name))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(self._manager, '_get_or_create_security_group',
mock.Mock(return_value=sg))
self.mock_object(self._manager.compute_api, 'server_create',
mock.Mock(return_value=server_create))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=server_get))
self.mock_object(self._manager.compute_api,
'add_security_group_to_server')
expected = {
'id': server_get['id'],
'status': server_get['status'],
'pk_path': key_data[1],
'public_address': ip_address,
'router_id': network_data.get('router_id'),
'subnet_id': network_data.get('subnet_id'),
'instance_id': server_get['id'],
'ip': ip_address,
'networks': server_get['networks'],
'router_id': network_data['router']['id'],
'public_port_id': 'fake_public_port',
'service_port_id': 'fake_service_port',
'admin_port_id': 'fake_admin_port',
'admin_ip': 'fake_ip_address',
}
result = self._manager._create_service_instance(
self._manager.admin_context, instance_name, network_info)
self.assertEqual(expected, result)
self.assertTrue(service_instance.time.time.called)
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager._get_or_create_security_group.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor=100,
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_create['id'])
(self._manager.compute_api.add_security_group_to_server.
assert_called_once_with(
self._manager.admin_context, server_get['id'], sg['id']))
self._manager.network_helper.get_network_name.assert_has_calls([])
def test___create_service_instance_neutron_no_admin_ip(self):
self.mock_object(service_instance, 'NeutronNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
config_data = {'DEFAULT': {
'driver_handles_share_servers': True,
'service_instance_user': 'fake_user'}}
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
server_create = {'id': 'fakeid', 'status': 'CREATING', 'networks': {}}
net_name = self._manager.get_config_option("service_network_name")
sg = {'id': 'fakeid', 'name': 'fakename'}
ip_address = 'fake_ip_address'
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = {}
network_data = {
'nics': ['fake_nic1', 'fake_nic2'],
'router_id': 'fake_router_id', 'subnet_id': 'fake_subnet_id',
'public_port': {'id': 'fake_public_port',
'fixed_ips': [{'ip_address': ip_address}]},
'service_port': {'id': 'fake_service_port',
'fixed_ips': [{'ip_address': ip_address}]},
'admin_port': {'id': 'fake_admin_port',
'fixed_ips': []},
'router': {'id': 'fake_router_id'}}
server_get = {
'id': 'fakeid', 'status': 'ACTIVE', 'networks':
{net_name: [ip_address]}}
self.mock_object(service_instance.time, 'time',
mock.Mock(return_value=5))
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager.network_helper, 'get_network_name',
mock.Mock(return_value=net_name))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(self._manager, '_get_or_create_security_group',
mock.Mock(return_value=sg))
self.mock_object(self._manager.compute_api, 'server_create',
mock.Mock(return_value=server_create))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=server_get))
self.mock_object(self._manager.compute_api,
'add_security_group_to_server')
self.assertRaises(
exception.AdminIPNotFound, self._manager._create_service_instance,
self._manager.admin_context, instance_name, network_info)
self.assertTrue(service_instance.time.time.called)
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager._get_or_create_security_group.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor=100,
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_create['id'])
(self._manager.compute_api.add_security_group_to_server.
assert_called_once_with(
self._manager.admin_context, server_get['id'], sg['id']))
self._manager.network_helper.get_network_name.assert_has_calls([])
@ddt.data(
dict(
instance_id_included=False,
mockobj=mock.Mock(side_effect=exception.ServiceInstanceException)),
dict(
instance_id_included=True,
mockobj=mock.Mock(return_value=dict(id='fakeid', status='ERROR'))))
@ddt.unpack
def test___create_service_instance_failed_to_create(
self, instance_id_included, mockobj):
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = dict()
network_data = dict(
nics=['fake_nic1', 'fake_nic2'],
router_id='fake_router_id', subnet_id='fake_subnet_id')
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(
self._manager.compute_api, 'server_create', mockobj)
self.mock_object(
self._manager, 'wait_for_instance_to_be_active',
mock.Mock(side_effect=exception.ServiceInstanceException))
try:
self._manager._create_service_instance(
self._manager.admin_context, instance_name, network_info)
except exception.ServiceInstanceException as e:
expected = dict(server_details=dict(
subnet_id=network_data['subnet_id'],
router_id=network_data['router_id']))
if instance_id_included:
expected['server_details']['instance_id'] = 'fakeid'
self.assertEqual(expected, e.detail_data)
else:
raise exception.ManilaException('Expected error was not raised.')
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor=100,
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
def test___create_service_instance_failed_to_build(self):
server_create = dict(id='fakeid', status='CREATING', networks=dict())
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = dict()
network_data = dict(
nics=['fake_nic1', 'fake_nic2'],
router_id='fake_router_id', subnet_id='fake_subnet_id')
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(self._manager.compute_api, 'server_create',
mock.Mock(return_value=server_create))
self.mock_object(
self._manager, 'wait_for_instance_to_be_active',
mock.Mock(side_effect=exception.ServiceInstanceException))
try:
self._manager._create_service_instance(
self._manager.admin_context, instance_name, network_info)
except exception.ServiceInstanceException as e:
self.assertEqual(
dict(server_details=dict(subnet_id=network_data['subnet_id'],
router_id=network_data['router_id'],
instance_id=server_create['id'])),
e.detail_data)
else:
raise exception.ManilaException('Expected error was not raised.')
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor=100,
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
@ddt.data(
dict(name=None, path=None),
dict(name=None, path='/tmp'))
@ddt.unpack
def test__create_service_instance_no_key_and_no_path(self, name, path):
key_data = name, path
self.mock_object(self._manager, '_get_service_image')
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._create_service_instance,
self._manager.admin_context, 'fake_instance_name', dict())
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
@mock.patch('time.sleep')
@mock.patch('time.time')
def _test_wait_for_instance(self, mock_time, mock_sleep,
server_get_side_eff=None,
expected_try_count=1,
expected_sleep_count=0,
expected_ret_val=None,
expected_exc=None):
mock_server_get = mock.Mock(side_effect=server_get_side_eff)
self.mock_object(self._manager.compute_api, 'server_get',
mock_server_get)
self.fake_time = 0
def fake_time():
return self.fake_time
def fake_sleep(sleep_time):
self.fake_time += sleep_time
# Note(lpetrut): LOG methods can call time.time
mock_time.side_effect = fake_time
mock_sleep.side_effect = fake_sleep
timeout = 3
if expected_exc:
self.assertRaises(
expected_exc,
self._manager.wait_for_instance_to_be_active,
instance_id=mock.sentinel.instance_id,
timeout=timeout)
else:
instance = self._manager.wait_for_instance_to_be_active(
instance_id=mock.sentinel.instance_id,
timeout=timeout)
self.assertEqual(expected_ret_val, instance)
mock_server_get.assert_has_calls(
[mock.call(self._manager.admin_context,
mock.sentinel.instance_id)] * expected_try_count)
mock_sleep.assert_has_calls([mock.call(1)] * expected_sleep_count)
def test_wait_for_instance_timeout(self):
server_get_side_eff = [
exception.InstanceNotFound(
instance_id=mock.sentinel.instance_id),
{'status': 'BUILDING'},
{'status': 'ACTIVE'}]
# Note that in this case, although the status is active, the
# 'networks' field is missing.
self._test_wait_for_instance(
server_get_side_eff=server_get_side_eff,
expected_exc=exception.ServiceInstanceException,
expected_try_count=3,
expected_sleep_count=3)
def test_wait_for_instance_error_state(self):
mock_instance = {'status': 'ERROR'}
self._test_wait_for_instance(
server_get_side_eff=[mock_instance],
expected_exc=exception.ServiceInstanceException,
expected_try_count=1)
def test_wait_for_instance_available(self):
mock_instance = {'status': 'ACTIVE',
'networks': mock.sentinel.networks}
self._test_wait_for_instance(
server_get_side_eff=[mock_instance],
expected_try_count=1,
expected_ret_val=mock_instance)
def test_reboot_server(self):
fake_server = {'instance_id': mock.sentinel.instance_id}
soft_reboot = True
mock_reboot = mock.Mock()
self.mock_object(self._manager.compute_api, 'server_reboot',
mock_reboot)
self._manager.reboot_server(fake_server, soft_reboot)
mock_reboot.assert_called_once_with(self._manager.admin_context,
fake_server['instance_id'],
soft_reboot)
class BaseNetworkHelperTestCase(test.TestCase):
"""Tests Base network helper for service instance."""
def test_instantiate_valid(self):
class FakeNetworkHelper(service_instance.BaseNetworkhelper):
@property
def NAME(self):
return 'fake_NAME'
def __init__(self, service_instance_manager):
self.fake_init = 'fake_init_value'
def get_network_name(self, network_info):
return 'fake_network_name'
def setup_connectivity_with_service_instances(self):
return 'fake_setup_connectivity_with_service_instances'
def setup_network(self, network_info):
return 'fake_setup_network'
def teardown_network(self, server_details):
return 'fake_teardown_network'
instance = FakeNetworkHelper('fake')
attrs = [
'fake_init', 'NAME', 'get_network_name', 'teardown_network',
'setup_connectivity_with_service_instances', 'setup_network',
]
for attr in attrs:
self.assertTrue(hasattr(instance, attr))
self.assertEqual('fake_init_value', instance.fake_init)
self.assertEqual('fake_NAME', instance.NAME)
self.assertEqual(
'fake_network_name', instance.get_network_name('fake'))
self.assertEqual(
'fake_setup_connectivity_with_service_instances',
instance.setup_connectivity_with_service_instances())
self.assertEqual('fake_setup_network', instance.setup_network('fake'))
self.assertEqual(
'fake_teardown_network', instance.teardown_network('fake'))
def test_instantiate_invalid(self):
self.assertRaises(
TypeError, service_instance.BaseNetworkhelper, 'fake')
@ddt.ddt
class NeutronNetworkHelperTestCase(test.TestCase):
"""Tests Neutron network helper for service instance."""
def setUp(self):
super(NeutronNetworkHelperTestCase, self).setUp()
self.mock_object(importutils, 'import_class')
self.fake_manager = FakeServiceInstance()
def _init_neutron_network_plugin(self):
self.mock_object(
service_instance.NeutronNetworkHelper, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
return service_instance.NeutronNetworkHelper(self.fake_manager)
def test_init_neutron_network_plugin(self):
instance = self._init_neutron_network_plugin()
self.assertEqual(service_instance.NEUTRON_NAME, instance.NAME)
attrs = [
'neutron_api', 'vif_driver', 'service_network_id',
'connect_share_server_to_tenant_network', 'get_config_option']
for attr in attrs:
self.assertTrue(hasattr(instance, attr), "No attr '%s'" % attr)
(service_instance.NeutronNetworkHelper._get_service_network_id.
assert_called_once_with())
self.assertEqual('DEFAULT', instance.neutron_api.config_group_name)
def test_init_neutron_network_plugin_with_driver_config_group(self):
self.fake_manager.driver_config = mock.Mock()
self.fake_manager.driver_config.config_group = (
'fake_config_group')
self.fake_manager.driver_config.network_config_group = None
instance = self._init_neutron_network_plugin()
self.assertEqual('fake_config_group',
instance.neutron_api.config_group_name)
def test_init_neutron_network_plugin_with_network_config_group(self):
self.fake_manager.driver_config = mock.Mock()
self.fake_manager.driver_config.config_group = (
"fake_config_group")
self.fake_manager.driver_config.network_config_group = (
"fake_network_config_group")
instance = self._init_neutron_network_plugin()
self.assertEqual('fake_network_config_group',
instance.neutron_api.config_group_name)
def test_admin_project_id(self):
instance = self._init_neutron_network_plugin()
admin_project_id = 'fake_admin_project_id'
self.mock_class('manila.network.neutron.api.API', mock.Mock())
instance.neutron_api.admin_project_id = admin_project_id
self.assertEqual(admin_project_id, instance.admin_project_id)
def test_get_network_name(self):
network_info = dict(neutron_net_id='fake_neutron_net_id')
network = dict(name='fake_network_name')
instance = self._init_neutron_network_plugin()
self.mock_object(
instance.neutron_api, 'get_network',
mock.Mock(return_value=network))
result = instance.get_network_name(network_info)
self.assertEqual(network['name'], result)
instance.neutron_api.get_network.assert_called_once_with(
network_info['neutron_net_id'])
def test_get_service_network_id_none_exist(self):
service_network_name = fake_get_config_option('service_network_name')
network = dict(id='fake_network_id')
admin_project_id = 'fake_admin_project_id'
self.mock_object(
service_instance.neutron.API, 'get_all_admin_project_networks',
mock.Mock(return_value=[]))
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'network_create',
mock.Mock(return_value=network))
instance = service_instance.NeutronNetworkHelper(self.fake_manager)
result = instance._get_service_network_id()
self.assertEqual(network['id'], result)
self.assertTrue(service_instance.neutron.API.
get_all_admin_project_networks.called)
service_instance.neutron.API.network_create.assert_has_calls([
mock.call(instance.admin_project_id, service_network_name)])
def test_get_service_network_id_one_exist(self):
service_network_name = fake_get_config_option('service_network_name')
network = dict(id='fake_network_id', name=service_network_name)
admin_project_id = 'fake_admin_project_id'
self.mock_object(
service_instance.neutron.API, 'get_all_admin_project_networks',
mock.Mock(return_value=[network]))
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
instance = service_instance.NeutronNetworkHelper(self.fake_manager)
result = instance._get_service_network_id()
self.assertEqual(network['id'], result)
self.assertTrue(service_instance.neutron.API.
get_all_admin_project_networks.called)
def test_get_service_network_id_two_exist(self):
service_network_name = fake_get_config_option('service_network_name')
network = dict(id='fake_network_id', name=service_network_name)
self.mock_object(
service_instance.neutron.API, 'get_all_admin_project_networks',
mock.Mock(return_value=[network, network]))
helper = service_instance.NeutronNetworkHelper(self.fake_manager)
self.assertRaises(exception.ManilaException,
lambda: helper.service_network_id)
(service_instance.neutron.API.get_all_admin_project_networks.
assert_has_calls([mock.call()]))
@ddt.data(dict(), dict(subnet_id='foo'), dict(router_id='bar'))
def test_teardown_network_no_service_data(self, server_details):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
instance.teardown_network(server_details)
self.assertFalse(
service_instance.neutron.API.router_remove_interface.called)
@ddt.data(
*[dict(server_details=sd, fail=f) for f in (True, False)
for sd in (dict(service_port_id='fake_service_port_id'),
dict(public_port_id='fake_public_port_id'),
dict(service_port_id='fake_service_port_id',
public_port_id='fake_public_port_id'))]
)
@ddt.unpack
def test_teardown_network_with_ports(self, server_details, fail):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
if fail:
delete_port_mock = mock.Mock(
side_effect=exception.NetworkException(code=404))
else:
delete_port_mock = mock.Mock()
self.mock_object(instance.neutron_api, 'delete_port', delete_port_mock)
self.mock_object(service_instance.LOG, 'debug')
instance.teardown_network(server_details)
self.assertFalse(instance.neutron_api.router_remove_interface.called)
self.assertEqual(
len(server_details),
len(instance.neutron_api.delete_port.mock_calls))
for k, v in server_details.items():
self.assertIn(
mock.call(v), instance.neutron_api.delete_port.mock_calls)
if fail:
service_instance.LOG.debug.assert_has_calls([
mock.call(mock.ANY, mock.ANY) for sd in server_details
])
else:
service_instance.LOG.debug.assert_has_calls([])
@ddt.data(
dict(service_port_id='fake_service_port_id'),
dict(public_port_id='fake_public_port_id'),
dict(service_port_id='fake_service_port_id',
public_port_id='fake_public_port_id'),
)
def test_teardown_network_with_ports_unhandled_exception(self,
server_details):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
delete_port_mock = mock.Mock(
side_effect=exception.NetworkException(code=500))
self.mock_object(
service_instance.neutron.API, 'delete_port', delete_port_mock)
self.mock_object(service_instance.LOG, 'debug')
self.assertRaises(
exception.NetworkException,
instance.teardown_network,
server_details,
)
self.assertFalse(
service_instance.neutron.API.router_remove_interface.called)
service_instance.neutron.API.delete_port.assert_called_once_with(
mock.ANY)
service_instance.LOG.debug.assert_has_calls([])
def test_teardown_network_with_wrong_ports(self):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
self.mock_object(
service_instance.neutron.API, 'delete_port')
self.mock_object(service_instance.LOG, 'debug')
instance.teardown_network(dict(foo_id='fake_service_port_id'))
service_instance.neutron.API.router_remove_interface.assert_has_calls(
[])
service_instance.neutron.API.delete_port.assert_has_calls([])
service_instance.LOG.debug.assert_has_calls([])
def test_teardown_network_subnet_is_used(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'fixed_ips': [{'subnet_id': server_details['subnet_id']}],
'device_id': 'fake_device_id',
'device_owner': 'compute:foo'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
instance.teardown_network(server_details)
self.assertFalse(
service_instance.neutron.API.router_remove_interface.called)
self.assertFalse(service_instance.neutron.API.update_subnet.called)
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['fixed_ips', 'device_id', 'device_owner'])
def test_teardown_network_subnet_not_used(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'fixed_ips': [{'subnet_id': server_details['subnet_id']}],
'device_id': 'fake_device_id',
'device_owner': 'network:router_interface'},
{'fixed_ips': [{'subnet_id': 'bar' + server_details['subnet_id']}],
'device_id': 'fake_device_id',
'device_owner': 'compute'},
{'fixed_ips': [{'subnet_id': server_details['subnet_id']}],
'device_id': '',
'device_owner': 'compute'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
instance.teardown_network(server_details)
(service_instance.neutron.API.router_remove_interface.
assert_called_once_with('bar', 'foo'))
(service_instance.neutron.API.update_subnet.
assert_called_once_with('foo', ''))
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['fixed_ips', 'device_id', 'device_owner'])
def test_teardown_network_subnet_not_used_and_get_error_404(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'fixed_ips': [{'subnet_id': server_details['subnet_id']}],
'device_id': 'fake_device_id',
'device_owner': 'fake'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface',
mock.Mock(side_effect=exception.NetworkException(code=404)))
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
instance.teardown_network(server_details)
(service_instance.neutron.API.router_remove_interface.
assert_called_once_with('bar', 'foo'))
(service_instance.neutron.API.update_subnet.
assert_called_once_with('foo', ''))
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['fixed_ips', 'device_id', 'device_owner'])
def test_teardown_network_subnet_not_used_get_unhandled_error(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'fixed_ips': [{'subnet_id': server_details['subnet_id']}],
'device_id': 'fake_device_id',
'device_owner': 'fake'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface',
mock.Mock(side_effect=exception.NetworkException(code=500)))
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
self.assertRaises(
exception.NetworkException,
instance.teardown_network, server_details)
(service_instance.neutron.API.router_remove_interface.
assert_called_once_with('bar', 'foo'))
self.assertFalse(service_instance.neutron.API.update_subnet.called)
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['fixed_ips', 'device_id', 'device_owner'])
def test_setup_network_and_connect_share_server_to_tenant_net(self):
def fake_create_port(*aargs, **kwargs):
if aargs[1] == 'fake_service_network_id':
return self.service_port
elif aargs[1] == 'fake_tenant_network_id':
return self.public_port
else:
raise exception.ManilaException('Got unexpected data')
admin_project_id = 'fake_admin_project_id'
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
cidr = '13.0.0.0/24'
self.service_port = dict(
id='fake_service_port_id',
fixed_ips=[dict(ip_address='fake_service_port_ip_address')])
self.public_port = dict(
id='fake_tenant_port_id',
fixed_ips=[dict(ip_address='fake_public_port_ip_address')])
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.connect_share_server_to_tenant_network = True
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(side_effect=fake_create_port))
self.mock_object(
service_instance.neutron.API, 'subnet_create',
mock.Mock(return_value=service_subnet))
self.mock_object(
instance, 'setup_connectivity_with_service_instances',
mock.Mock(return_value=service_subnet))
self.mock_object(
instance, '_get_cidr_for_subnet', mock.Mock(return_value=cidr))
self.mock_object(
instance, '_get_service_subnet', mock.Mock(return_value=None))
expected = {
'ip_address': self.public_port['fixed_ips'][0]['ip_address'],
'public_port': self.public_port,
'service_port': self.service_port,
'service_subnet': service_subnet,
'ports': [self.public_port, self.service_port],
'nics': [{'port-id': self.public_port['id']},
{'port-id': self.service_port['id']}]}
result = instance.setup_network(network_info)
self.assertEqual(expected, result)
(instance.setup_connectivity_with_service_instances.
assert_called_once_with())
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_cidr_for_subnet.assert_called_once_with()
self.assertTrue(service_instance.neutron.API.subnet_create.called)
self.assertTrue(service_instance.neutron.API.create_port.called)
def test_setup_network_and_connect_share_server_to_tenant_net_admin(self):
def fake_create_port(*aargs, **kwargs):
if aargs[1] == 'fake_admin_network_id':
return self.admin_port
elif aargs[1] == 'fake_tenant_network_id':
return self.public_port
else:
raise exception.ManilaException('Got unexpected data')
admin_project_id = 'fake_admin_project_id'
network_info = {
'neutron_net_id': 'fake_tenant_network_id',
'neutron_subnet_id': 'fake_tenant_subnet_id'}
self.admin_port = {
'id': 'fake_admin_port_id',
'fixed_ips': [{'ip_address': 'fake_admin_port_ip_address'}]}
self.public_port = {
'id': 'fake_tenant_port_id',
'fixed_ips': [{'ip_address': 'fake_public_port_ip_address'}]}
instance = self._init_neutron_network_plugin()
instance.use_admin_port = True
instance.use_service_network = False
instance.admin_network_id = 'fake_admin_network_id'
instance.admin_subnet_id = 'fake_admin_subnet_id'
instance.connect_share_server_to_tenant_network = True
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(side_effect=fake_create_port))
self.mock_object(
instance, 'setup_connectivity_with_service_instances')
expected = {
'ip_address': self.public_port['fixed_ips'][0]['ip_address'],
'public_port': self.public_port,
'admin_port': self.admin_port,
'ports': [self.public_port, self.admin_port],
'nics': [{'port-id': self.public_port['id']},
{'port-id': self.admin_port['id']}]}
result = instance.setup_network(network_info)
self.assertEqual(expected, result)
(instance.setup_connectivity_with_service_instances.
assert_called_once_with())
self.assertTrue(service_instance.neutron.API.create_port.called)
@ddt.data(None, exception.NetworkException(code=400))
def test_setup_network_using_router_success(self, return_obj):
admin_project_id = 'fake_admin_project_id'
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
cidr = '13.0.0.0/24'
self.admin_port = {
'id': 'fake_admin_port_id',
'fixed_ips': [{'ip_address': 'fake_admin_port_ip_address'}]}
self.service_port = dict(
id='fake_service_port_id',
fixed_ips=[dict(ip_address='fake_service_port_ip_address')])
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.use_admin_port = True
instance.admin_network_id = 'fake_admin_network_id'
instance.admin_subnet_id = 'fake_admin_subnet_id'
instance.connect_share_server_to_tenant_network = False
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
router = dict(id='fake_router_id')
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(side_effect=[self.service_port, self.admin_port]))
self.mock_object(
service_instance.neutron.API, 'subnet_create',
mock.Mock(return_value=service_subnet))
self.mock_object(
instance, '_get_private_router', mock.Mock(return_value=router))
self.mock_object(
service_instance.neutron.API, 'router_add_interface',
mock.Mock(side_effect=return_obj))
self.mock_object(instance, 'setup_connectivity_with_service_instances')
self.mock_object(
instance, '_get_cidr_for_subnet', mock.Mock(return_value=cidr))
self.mock_object(
instance, '_get_service_subnet', mock.Mock(return_value=None))
expected = {
'ip_address': self.service_port['fixed_ips'][0]['ip_address'],
'service_port': self.service_port,
'service_subnet': service_subnet,
'admin_port': self.admin_port, 'router': router,
'ports': [self.service_port, self.admin_port],
'nics': [{'port-id': self.service_port['id']},
{'port-id': self.admin_port['id']}]}
result = instance.setup_network(network_info)
self.assertEqual(expected, result)
(instance.setup_connectivity_with_service_instances.
assert_called_once_with())
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_cidr_for_subnet.assert_called_once_with()
self.assertTrue(service_instance.neutron.API.subnet_create.called)
self.assertTrue(service_instance.neutron.API.create_port.called)
instance._get_private_router.assert_called_once_with(
network_info['neutron_net_id'], network_info['neutron_subnet_id'])
(service_instance.neutron.API.router_add_interface.
assert_called_once_with(router['id'], service_subnet['id']))
def test_setup_network_using_router_addon_of_interface_failed(self):
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.connect_share_server_to_tenant_network = False
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
router = dict(id='fake_router_id')
self.mock_object(
instance, '_get_private_router', mock.Mock(return_value=router))
self.mock_object(
service_instance.neutron.API, 'router_add_interface',
mock.Mock(side_effect=exception.NetworkException(code=500)))
self.mock_object(
instance, '_get_service_subnet',
mock.Mock(return_value=service_subnet))
self.assertRaises(
exception.NetworkException,
instance.setup_network, network_info)
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_private_router.assert_called_once_with(
network_info['neutron_net_id'], network_info['neutron_subnet_id'])
(service_instance.neutron.API.router_add_interface.
assert_called_once_with(router['id'], service_subnet['id']))
def test_setup_network_using_router_connectivity_verification_fail(self):
admin_project_id = 'fake_admin_project_id'
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
cidr = '13.0.0.0/24'
self.service_port = dict(
id='fake_service_port_id',
fixed_ips=[dict(ip_address='fake_service_port_ip_address')])
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.connect_share_server_to_tenant_network = False
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
router = dict(id='fake_router_id')
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(return_value=self.service_port))
self.mock_object(
service_instance.neutron.API, 'subnet_create',
mock.Mock(return_value=service_subnet))
self.mock_object(service_instance.neutron.API, 'delete_port')
self.mock_object(
instance, '_get_private_router', mock.Mock(return_value=router))
self.mock_object(
service_instance.neutron.API, 'router_add_interface')
self.mock_object(
instance, 'setup_connectivity_with_service_instances',
mock.Mock(side_effect=exception.ManilaException('Fake')))
self.mock_object(
instance, '_get_cidr_for_subnet', mock.Mock(return_value=cidr))
self.mock_object(
instance, '_get_service_subnet', mock.Mock(return_value=None))
self.assertRaises(
exception.ManilaException, instance.setup_network, network_info)
(instance.setup_connectivity_with_service_instances.
assert_called_once_with())
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_cidr_for_subnet.assert_called_once_with()
self.assertTrue(service_instance.neutron.API.subnet_create.called)
self.assertTrue(service_instance.neutron.API.create_port.called)
instance._get_private_router.assert_called_once_with(
network_info['neutron_net_id'], network_info['neutron_subnet_id'])
(service_instance.neutron.API.router_add_interface.
assert_called_once_with(router['id'], service_subnet['id']))
service_instance.neutron.API.delete_port.assert_has_calls([
mock.call(self.service_port['id'])])
def test__get_cidr_for_subnet_success(self):
expected = (
fake_get_config_option('service_network_cidr').split('/')[0] +
'/' + six.text_type(
fake_get_config_option('service_network_division_mask')))
instance = self._init_neutron_network_plugin()
self.mock_object(
instance, '_get_all_service_subnets', mock.Mock(return_value=[]))
result = instance._get_cidr_for_subnet()
self.assertEqual(expected, result)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_cidr_for_subnet_failure(self):
subnets = []
serv_cidr = netaddr.IPNetwork(
fake_get_config_option('service_network_cidr'))
division_mask = fake_get_config_option('service_network_division_mask')
for subnet in serv_cidr.subnet(division_mask):
subnets.append(dict(cidr=six.text_type(subnet.cidr)))
instance = self._init_neutron_network_plugin()
self.mock_object(
instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_cidr_for_subnet)
instance._get_all_service_subnets.assert_called_once_with()
def test_setup_connectivity_with_service_instances(self):
instance = self._init_neutron_network_plugin()
instance.use_admin_port = True
instance.admin_network_id = 'fake_admin_network_id'
instance.admin_subnet_id = 'fake_admin_subnet_id'
interface_name_service = 'fake_interface_name_service'
interface_name_admin = 'fake_interface_name_admin'
fake_division_mask = fake_get_config_option(
'service_network_division_mask')
fake_subnet_service = fake_network.FakeSubnet(
cidr='10.254.0.0/%s' % fake_division_mask)
fake_subnet_admin = fake_network.FakeSubnet(id='fake_admin_subnet_id',
cidr='10.0.0.0/24')
fake_service_port = fake_network.FakePort(fixed_ips=[
{'subnet_id': fake_subnet_service['id'],
'ip_address': '10.254.0.2'}], mac_address='fake_mac_address')
fake_admin_port = fake_network.FakePort(fixed_ips=[
{'subnet_id': fake_subnet_admin['id'], 'ip_address': '10.0.0.4'}],
mac_address='fake_mac_address')
self.mock_object(instance, '_get_service_port',
mock.Mock(side_effect=[fake_service_port,
fake_admin_port]))
self.mock_object(instance, '_add_fixed_ips_to_service_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.vif_driver, 'get_device_name',
mock.Mock(side_effect=[interface_name_service,
interface_name_admin]))
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(side_effect=[fake_subnet_service,
fake_subnet_admin,
fake_subnet_admin]))
self.mock_object(instance, '_remove_outdated_interfaces')
self.mock_object(instance.vif_driver, 'plug')
device_mock = mock.Mock()
self.mock_object(service_instance.ip_lib, 'IPDevice',
mock.Mock(return_value=device_mock))
instance.setup_connectivity_with_service_instances()
instance._get_service_port.assert_has_calls([
mock.call(instance.service_network_id, None, 'manila-share'),
mock.call('fake_admin_network_id', 'fake_admin_subnet_id',
'manila-admin-share')])
instance.vif_driver.get_device_name.assert_has_calls([
mock.call(fake_service_port), mock.call(fake_admin_port)])
instance.vif_driver.plug.assert_has_calls([
mock.call(interface_name_service, fake_service_port['id'],
fake_service_port['mac_address']),
mock.call(interface_name_admin, fake_admin_port['id'],
fake_admin_port['mac_address'])])
instance.neutron_api.get_subnet.assert_has_calls([
mock.call(fake_subnet_service['id']),
mock.call(fake_subnet_admin['id']),
mock.call(fake_subnet_admin['id'])])
instance.vif_driver.init_l3.assert_has_calls([
mock.call(interface_name_service,
['10.254.0.2/%s' % fake_division_mask]),
mock.call(interface_name_admin, ['10.0.0.4/24'])])
service_instance.ip_lib.IPDevice.assert_has_calls([
mock.call(interface_name_service),
mock.call(interface_name_admin)])
device_mock.route.pullup_route.assert_has_calls([
mock.call(interface_name_service),
mock.call(interface_name_admin)])
instance._remove_outdated_interfaces.assert_called_with(device_mock)
def test__get_set_of_device_cidrs(self):
device = fake_network.FakeDevice('foo')
expected = set(('1.0.0.0/27', '2.0.0.0/27'))
instance = self._init_neutron_network_plugin()
result = instance._get_set_of_device_cidrs(device)
self.assertEqual(expected, result)
def test__get_set_of_device_cidrs_exception(self):
device = fake_network.FakeDevice('foo')
self.mock_object(device.addr, 'list', mock.Mock(
side_effect=Exception('foo does not exist')))
instance = self._init_neutron_network_plugin()
result = instance._get_set_of_device_cidrs(device)
self.assertEqual(set(), result)
def test__remove_outdated_interfaces(self):
device = fake_network.FakeDevice(
'foobarquuz', [dict(ip_version=4, cidr='1.0.0.0/27')])
devices = [fake_network.FakeDevice('foobar')]
instance = self._init_neutron_network_plugin()
self.mock_object(instance.vif_driver, 'unplug')
self.mock_object(
service_instance.ip_lib.IPWrapper, 'get_devices',
mock.Mock(return_value=devices))
instance._remove_outdated_interfaces(device)
instance.vif_driver.unplug.assert_called_once_with('foobar')
def test__get_service_port_none_exist(self):
instance = self._init_neutron_network_plugin()
admin_project_id = 'fake_admin_project_id'
fake_port_values = {'device_id': 'manila-share',
'binding:host_id': 'fake_host'}
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
fake_service_port = fake_network.FakePort(device_id='manila-share')
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[]))
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=fake_service_port))
result = instance._get_service_port(instance.service_network_id,
None, 'manila-share')
instance.neutron_api.list_ports.assert_called_once_with(
**fake_port_values)
instance.neutron_api.create_port.assert_called_once_with(
instance.admin_project_id, instance.service_network_id,
device_id='manila-share', device_owner='manila:share',
host_id='fake_host', subnet_id=None, port_security_enabled=False)
service_instance.socket.gethostname.assert_called_once_with()
self.assertFalse(instance.neutron_api.update_port_fixed_ips.called)
self.assertEqual(fake_service_port, result)
def test__get_service_port_one_exist_on_same_host(self):
instance = self._init_neutron_network_plugin()
fake_port_values = {'device_id': 'manila-share',
'binding:host_id': 'fake_host'}
fake_service_port = fake_network.FakePort(**fake_port_values)
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[fake_service_port]))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=fake_service_port))
result = instance._get_service_port(instance.service_network_id,
None, 'manila-share')
instance.neutron_api.list_ports.assert_called_once_with(
**fake_port_values)
self.assertFalse(instance.neutron_api.create_port.called)
self.assertFalse(instance.neutron_api.update_port_fixed_ips.called)
self.assertEqual(fake_service_port, result)
def test__get_service_port_one_exist_on_different_host(self):
instance = self._init_neutron_network_plugin()
admin_project_id = 'fake_admin_project_id'
fake_port = {'device_id': 'manila-share',
'binding:host_id': 'fake_host'}
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
fake_service_port = fake_network.FakePort(**fake_port)
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[]))
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=fake_service_port))
result = instance._get_service_port(instance.service_network_id,
None, 'manila-share')
instance.neutron_api.list_ports.assert_called_once_with(
**fake_port)
instance.neutron_api.create_port.assert_called_once_with(
instance.admin_project_id, instance.service_network_id,
device_id='manila-share', device_owner='manila:share',
host_id='fake_host', subnet_id=None, port_security_enabled=False)
service_instance.socket.gethostname.assert_called_once_with()
self.assertFalse(instance.neutron_api.update_port_fixed_ips.called)
self.assertEqual(fake_service_port, result)
def test__get_service_port_two_exist_on_same_host(self):
instance = self._init_neutron_network_plugin()
fake_service_port = fake_network.FakePort(**{
'device_id': 'manila-share', 'binding:host_id': 'fake_host'})
self.mock_object(
instance.neutron_api, 'list_ports',
mock.Mock(return_value=[fake_service_port, fake_service_port]))
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.assertRaises(
exception.ServiceInstanceException, instance._get_service_port,
instance.service_network_id, None, 'manila-share')
self.assertFalse(instance.neutron_api.create_port.called)
def test__add_fixed_ips_to_service_port(self):
ip_address1 = '13.0.0.13'
subnet_id1 = 'fake_subnet_id1'
subnet_id2 = 'fake_subnet_id2'
port = dict(id='fooport', fixed_ips=[dict(
subnet_id=subnet_id1, ip_address=ip_address1)])
expected = mock.Mock()
network = dict(subnets=[subnet_id1, subnet_id2])
instance = self._init_neutron_network_plugin()
self.mock_object(instance.neutron_api, 'get_network',
mock.Mock(return_value=network))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=expected))
result = instance._add_fixed_ips_to_service_port(port)
self.assertEqual(expected, result)
instance.neutron_api.get_network.assert_called_once_with(
instance.service_network_id)
instance.neutron_api.update_port_fixed_ips.assert_called_once_with(
port['id'], dict(fixed_ips=[
dict(subnet_id=subnet_id1, ip_address=ip_address1),
dict(subnet_id=subnet_id2)]))
def test__get_private_router_success(self):
instance = self._init_neutron_network_plugin()
network = fake_network.FakeNetwork()
subnet = fake_network.FakeSubnet(gateway_ip='fake_ip')
router = fake_network.FakeRouter(id='fake_router_id')
port = fake_network.FakePort(fixed_ips=[
dict(subnet_id=subnet['id'],
ip_address=subnet['gateway_ip'])],
device_id=router['id'])
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(return_value=subnet))
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[port]))
self.mock_object(instance.neutron_api, 'show_router',
mock.Mock(return_value=router))
result = instance._get_private_router(network['id'], subnet['id'])
self.assertEqual(router, result)
instance.neutron_api.get_subnet.assert_called_once_with(subnet['id'])
instance.neutron_api.list_ports.assert_called_once_with(
network_id=network['id'])
instance.neutron_api.show_router.assert_called_once_with(router['id'])
def test__get_private_router_no_gateway(self):
instance = self._init_neutron_network_plugin()
subnet = fake_network.FakeSubnet(gateway_ip='')
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(return_value=subnet))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_private_router, 'fake_network_id', subnet['id'])
instance.neutron_api.get_subnet.assert_called_once_with(
subnet['id'])
def test__get_private_router_subnet_is_not_attached_to_the_router(self):
instance = self._init_neutron_network_plugin()
network_id = 'fake_network_id'
subnet = fake_network.FakeSubnet(gateway_ip='fake_ip')
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(return_value=subnet))
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[]))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_private_router, network_id, subnet['id'])
instance.neutron_api.get_subnet.assert_called_once_with(
subnet['id'])
instance.neutron_api.list_ports.assert_called_once_with(
network_id=network_id)
def test__get_service_subnet_none_found(self):
subnet_name = 'fake_subnet_name'
instance = self._init_neutron_network_plugin()
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=[]))
result = instance._get_service_subnet(subnet_name)
self.assertIsNone(result)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_service_subnet_unused_found(self):
subnet_name = 'fake_subnet_name'
subnets = [fake_network.FakeSubnet(id='foo', name=''),
fake_network.FakeSubnet(id='bar', name='quuz')]
instance = self._init_neutron_network_plugin()
self.mock_object(instance.neutron_api, 'update_subnet')
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
result = instance._get_service_subnet(subnet_name)
self.assertEqual(subnets[0], result)
instance._get_all_service_subnets.assert_called_once_with()
instance.neutron_api.update_subnet.assert_called_once_with(
subnets[0]['id'], subnet_name)
def test__get_service_subnet_one_found(self):
subnet_name = 'fake_subnet_name'
subnets = [fake_network.FakeSubnet(id='foo', name='quuz'),
fake_network.FakeSubnet(id='bar', name=subnet_name)]
instance = self._init_neutron_network_plugin()
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
result = instance._get_service_subnet(subnet_name)
self.assertEqual(subnets[1], result)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_service_subnet_two_found(self):
subnet_name = 'fake_subnet_name'
subnets = [fake_network.FakeSubnet(id='foo', name=subnet_name),
fake_network.FakeSubnet(id='bar', name=subnet_name)]
instance = self._init_neutron_network_plugin()
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_service_subnet, subnet_name)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_all_service_subnets(self):
subnet_id1 = 'fake_subnet_id1'
subnet_id2 = 'fake_subnet_id2'
instance = self._init_neutron_network_plugin()
network = dict(subnets=[subnet_id1, subnet_id2])
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(side_effect=lambda s_id: dict(id=s_id)))
self.mock_object(instance.neutron_api, 'get_network',
mock.Mock(return_value=network))
result = instance._get_all_service_subnets()
self.assertEqual([dict(id=subnet_id1), dict(id=subnet_id2)], result)
instance.neutron_api.get_network.assert_called_once_with(
instance.service_network_id)
instance.neutron_api.get_subnet.assert_has_calls([
mock.call(subnet_id1), mock.call(subnet_id2)])
|
get_author_name is deprecated since version 2.8.0!
(int) $auth_id The ID of the author.
(string) The author's display name.
Retrieve the specified author's preferred display name. |
""" Meta elements. """
from mauzr.gui import TextMixin, ColorStateMixin, RectBackgroundMixin
from mauzr.gui import BaseElement, ColorState
__author__ = "Alexander Sowitzki"
class Acceptor(TextMixin, RectBackgroundMixin, BaseElement):
""" Acknowledge all states via one click.
:param placement: Center and size of the element.
:type placement: tuple
:param panel: Panel to control.
:type panel: mauzr.gui.panel.Table
"""
def __init__(self, placement, panel):
BaseElement.__init__(self, *placement)
RectBackgroundMixin.__init__(self)
TextMixin.__init__(self, "Clear")
self._panel = panel
def _on_click(self):
# Assume click means acknowledge
for element in self._panel.elements:
element.state_acknowledged = True
@property
def _color(self):
""" Color of the element as tuple. """
return ColorState.INFORMATION.value[0]
class Muter(ColorStateMixin, TextMixin, RectBackgroundMixin, BaseElement):
""" Mute audio notifications.
:param placement: Center and size of the element.
:type placement: tuple
:param panel: Panel to control.
:type panel: mauzr.gui.panel.Table
"""
def __init__(self, placement, panel):
BaseElement.__init__(self, *placement)
RectBackgroundMixin.__init__(self)
TextMixin.__init__(self, "Mute")
conditions = {ColorState.WARNING: lambda v: v,
ColorState.INFORMATION: lambda v: not v}
ColorStateMixin.__init__(self, conditions)
self._muted = False
self._panel = panel
def _on_click(self):
# Assume click means acknowledge
self._muted = not self._muted
self._update_state(self._muted)
self._panel.mute(self._muted)
@property
def _color(self):
""" Color of the element as tuple. """
if self._muted:
return ColorState.WARNING.value[0]
return ColorState.INFORMATION.value[0]
|
Gillotts Close is a very small residential road situated on the outskirts of Henley. As its name implies it is fairly close to Gillotts School.
The entrance to Gillotts Close taken from its junction with Makins Road.
This picture shows one side of Gillotts Close. The metal railings at the back of the picture cover a path to Gillotts School. |
class Solution(object):
def twoSum(self, numbers, target):
"""
:type numbers: List[int]
:type target: int
:rtype: List[int]
"""
if len(numbers)<2:
return None
else:
for i in range(len(numbers)):#自己写的,时间复杂度O(n^2),AC不了
for x in range(1,len(numbers)):
if numbers[i]+numbers[x]!=target:
continue
else:
if i==x:
return[i+1,x+1+1]
else:
return[i+1,x+1]
if __name__ == '__main__':
s=Solution()
print(s.twoSum([5,25,75],100))
class Solution(object):
def twoSum(self, numbers, target):
"""
:type numbers: List[int]
:type target: int
:rtype: List[int]
"""
for i in xrange(len(numbers)):
if i>0 and numbers[i-1]==numbers[i]:#跳过重复元素
continue
low=i+1
high=len(numbers)-1
while(low<=high):
mid=(low+high)/2#二分法查找提速
if target-numbers[i]==numbers[mid]:
return [i+1, mid+1]
elif target-numbers[i]<numbers[mid]:
high=mid-1
else:
low=mid+1 |
It is so funny how our tastes change as we get older. As a child and even teen you would never catch me eating spinach. Yet somewhere in my adulthood I discovered how delicious it is. I love it in a salad but my favorite way to each spinach is sauteed. My father has a couple of restaurants in Bronxville (New York) and whenever he comes up to visit he always brings us a few trays of yummy food. They make the most delicious sauteed spinach ever and I decided one day that I was going to try it myself. So I took my organic spinach from our CSA and gave it a go. While it wasn't a complete fail it did taste pretty bitter. So I emailed dad to find out what his secret is and he told me how they cook it to perfection at the restaurant. Following his instructions I made the yummiest sauteed spinach and I can't wait to make it again. I am sharing his secret below, enjoy!
Bring a large pot of water to a boil and add the spinach.
Stir it for a few minutes till all of the spinach leaves start to wrinkle a bit. Drain and set aside to cool.
I rinsed with cold water to speed up the cooling process. Then squeeze all of the excess water out of the spinach. Like you are wringing water out of clothes. Keep going so you can get as much water out as possible. The less water you have then the less bitter the taste. You will be left with a few clumps of spinach.
Return pot to stove and over medium-low heat add some olive oil and garlic. How much garlic depends on your tastes. Saute garlic for about one minute.
Add the spinach to the pot and saute for roughly four minutes, constantly stirring. The spinach will separate and no longer be in clumps. Add salt to taste.
Time to plate and serve! This sauteed spinach goes great with a saucy/cheesy main dish like chicken parmigiana or chicken rollatini.
Tip: A large bunch of spinach will reduce down to pretty much one or two servings. So if you are cooking for a group, use A LOT of spinach.
You know what? A lot of people knock spinach, but I really love it. Thanks for the recipe, I will have to try it!
I would absolutely love this. I am crazy about spinach. I hate having to clean it because it's soooo sandy, but I love the taste!
We love spinach! Thanks for sharing the recipe. |
import cv2
from cv_helpers import extract_color, get_subset, get_dimens, show, get_contours, \
get_center_for_contour, apply_offset_to_single_location
from modules.complicated_wires_common import WireColor
_WIRE_Y_BOUNDARIES = (
64.0,
73.6,
)
_LED_Y_BOUNDARIES = (
13.8,
20.9,
)
_STAR_Y_BOUNDARIES = (
75.5,
86.4,
)
_TOP_X_BOUNDARIES = (
12.7,
22.4,
31.6,
41.2,
51.6,
60.9,
69.7,
)
_BOTTOM_X_BOUNDARIES = (
12.1,
23.3,
35.1,
47.4,
59.7,
69.9,
82.1,
)
_STAR_RATIO_THRESHOLD = 0.03
def _get_wire_color_and_mat_or_none(wire, hue, saturation, value, color):
mat = extract_color(wire, hue, saturation, value)
if mat.any():
return color, mat
else:
return None
def _get_wire_colors_and_positions(im):
colors_and_positions = []
for i in range(len(_BOTTOM_X_BOUNDARIES) - 1):
wire = get_subset(im, _BOTTOM_X_BOUNDARIES[i:i + 2], _WIRE_Y_BOUNDARIES)
wire_colors_and_mats = filter(None, (
_get_wire_color_and_mat_or_none(wire, 354 / 2, (220, 255), (150, 220), WireColor.RED),
_get_wire_color_and_mat_or_none(wire, 37 / 2, (0, 50), (200, 255), WireColor.WHITE),
_get_wire_color_and_mat_or_none(wire, 229 / 2, (150, 200), (75, 215), WireColor.BLUE),
))
if not wire_colors_and_mats:
colors_and_positions.append(None)
continue
wire_colors, mats = zip(*wire_colors_and_mats)
w, h = get_dimens(im)
left = int((w * _BOTTOM_X_BOUNDARIES[i]) / 100.0)
top = int((h * _WIRE_Y_BOUNDARIES[0]) / 100.0)
summed_wires = sum(mats)
structuring_element1 = cv2.getStructuringElement(cv2.MORPH_RECT, (15, 15))
summed_wires = cv2.morphologyEx(summed_wires, cv2.MORPH_CLOSE, structuring_element1)
contour = max(get_contours(summed_wires, close_and_open=False), key=cv2.contourArea)
center = get_center_for_contour(contour)
center = apply_offset_to_single_location(center, (left, top))
# show(summed_wires)
colors_and_positions.append((wire_colors, center))
return colors_and_positions
def _get_leds_are_lit(im):
leds_are_lit = []
for i in range(len(_TOP_X_BOUNDARIES) - 1):
led = get_subset(im, _TOP_X_BOUNDARIES[i:i + 2], _LED_Y_BOUNDARIES)
lit_led = extract_color(led, 51 / 2, (40, 90), (220, 255))
leds_are_lit.append(lit_led.any())
# show(lit_led)
return leds_are_lit
def _get_has_stars(im):
has_stars = []
for i in range(len(_BOTTOM_X_BOUNDARIES) - 1):
star = get_subset(im, _BOTTOM_X_BOUNDARIES[i:i + 2], _STAR_Y_BOUNDARIES)
has_star = extract_color(star, 33 / 2, (75, 125), (0, 70))
# show(has_star)
w, h = get_dimens(star)
star_ratio = float(cv2.countNonZero(has_star)) / (w * h)
# print star_ratio
has_stars.append(star_ratio > _STAR_RATIO_THRESHOLD)
return has_stars
def get_complicated_wire_info_for_module(im):
"""
Returns a list (has_led, wire_colors_and_position, has_star) for each position. The
wire_colors_and_position will be a tuple of ((wire_color, ...), (x_pos, y_pos)) if a wire
exists, or None if there is no wire.
"""
leds = _get_leds_are_lit(im)
wires = _get_wire_colors_and_positions(im)
stars = _get_has_stars(im)
return zip(leds, wires, stars)
|
If you require a copy of ser2pl. The file should be stored in the actual installation path of the software. How to run Memtest86 to scan for memory corruption causing ser2pl. As a Gold Certified Independent Software Vendor ISV , Solvusoft is able to provide the highest level of customer satisfaction through delivering top-level software and service solutions, which have been subject to a rigourous and continually-audited approval process by Microsoft. Incorrectly editing your registry can stop your PC from functioning and create irreversible damage to your operating system. If you would like to learn more about manual registry editing, please see the links below.
If that is ser2pl case, then it is likely you will need to replace the associated hardware causing the ser2pl. Memtest86 will begin testing ser2pl memory corruption.
Proposed as ser2pl by Drew Coppock Ser2pl, May 14, 2: Ser2pl you have just added new memory to the computer, we recommend temporarily removing it to make sure it is not the cause of your ser2pl. Please see “Causes of Ser2pl.
Should you experience an actual problem, ser2pl to recall the last thing you did, or the last thing you installed before the problem appeared for the ser2pl time. Home Files Software News Contact.
Furthermore, ser2pl a possibility that the ser2pl. Malwarebytes Ser2pl detects and removes sleeping spyware, adware, Trojans, keyloggers, malware and ser2pl from your hard drive. Enter any administrator passwords if prompted. Using Registry Editor incorrectly can cause serious problems that may require you to reinstall Windows.
He is a lifelong computer geek and loves everything ser2pl to computers, software, and ser2pl technology. Instructions for Windows 8: The file should be stored in the actual installation path of the software.
Right-Click to bring up the Start Context Menu. Because of this risk, we ser2pl recommend using a trusted registry cleaner such as WinThruster Developed by Microsoft Gold Certified Partner to scan and repair any ser2pl. GUCA, click here to download the installation files with ser2pl correct ser2pl.
Sign in to vote. Sfr2pl the on-screen directions to complete the uninstallation of your ser2pl.
Reinstalling Windows will erase everything from your hard drive, allowing ser2pl to start again with a fresh system. Follow the steps in the Wizard ser2pl choose a restore point.
Run a free scan to check for ser2pl drivers in ser2pl of updating. This step is your final option in trying to resolve your ser2pl. Ser2pl continuing to browse this site, you agree to this use. Never test it wth Vista. Solvusoft’s close relationship with Microsoft as a Gold Certified Partner enables us to provide ser2pl software solutions that ser2pl optimized for performance on Windows operating systems.
Always remember to perform periodic backups, or at least to set restore points. Remove Ser2pl My Forums. Maintaining a driver backup provides you with the security of knowing that you can rollback any driver to a previous version if necessary. Ser2pl for serious problems, rather than reinstalling Windows, you are better off repairing of your installation or, ser2pl Windows 8 and later ser2pl, executing the DISM.
Can anyone tell me if ser2pl. Ser2pl, a clean install of Windows will ser2pl quickly clean out any and all “junk” that has accumulated over the normal usage of your computer. Ser2pl website is using cookies. Therefore, you should check the ser2pl. If you are not a Florida Probe xer2pl, please do not ser2pl us with ser2pl, as we will not be able to assist you.
The best part is that repairing registry errors can also dramatically improve system speed and performance. |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# chroot.py
#
# This file was forked from Cnchi (graphical installer from Antergos)
# Check it at https://github.com/antergos
#
# Copyright © 2013-2015 Antergos (http://antergos.com/)
# Copyright © 2013-2015 Manjaro (http://manjaro.org)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
""" Chroot related functions. Used in the installation process """
import logging
import os
import subprocess
# When testing, no _() is available
try:
_("")
except NameError as err:
def _(message):
return message
_special_dirs_mounted = False
def get_special_dirs():
""" Get special dirs to be mounted or unmounted """
special_dirs = ["/dev", "/dev/pts", "/proc", "/sys"]
efi = "/sys/firmware/efi/efivars"
if os.path.exists(efi):
special_dirs.append(efi)
return special_dirs
def mount_special_dirs(dest_dir):
""" Mount special directories for our chroot (bind them)"""
"""
There was an error creating the child process for this terminal
grantpt failed: Operation not permitted
"""
global _special_dirs_mounted
# Don't try to remount them
if _special_dirs_mounted:
msg = _("Special dirs are already mounted. Skipping.")
logging.debug(msg)
return
special_dirs = []
special_dirs = get_special_dirs()
for special_dir in special_dirs:
mountpoint = os.path.join(dest_dir, special_dir[1:])
os.makedirs(mountpoint, mode=0o755, exist_ok=True)
# os.chmod(mountpoint, 0o755)
cmd = ["mount", "--bind", special_dir, mountpoint]
logging.debug("Mounting special dir '{0}' to {1}".format(special_dir, mountpoint))
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError as process_error:
txt = "Unable to mount {0}, command {1} failed: {2}".format(mountpoint, process_error.cmd, process_error.output)
logging.warning(txt)
_special_dirs_mounted = True
def umount_special_dirs(dest_dir):
""" Umount special directories for our chroot """
global _special_dirs_mounted
# Do not umount if they're not mounted
if not _special_dirs_mounted:
msg = _("Special dirs are not mounted. Skipping.")
logging.debug(msg)
return
special_dirs = []
special_dirs = get_special_dirs()
for special_dir in reversed(special_dirs):
mountpoint = os.path.join(dest_dir, special_dir[1:])
logging.debug("Unmounting special dir '{0}'".format(mountpoint))
try:
subprocess.check_call(["umount", mountpoint])
except subprocess.CalledProcessError:
logging.debug("Can't unmount. Trying -l to force it.")
try:
subprocess.check_call(["umount", "-l", mountpoint])
except subprocess.CalledProcessError as process_error:
txt = "Unable to unmount {0}, command {1} failed: {2}".format(
mountpoint, process_error.cmd, process_error.output)
logging.warning(txt)
_special_dirs_mounted = False
def run(cmd, dest_dir, timeout=None, stdin=None):
""" Runs command inside the chroot """
full_cmd = ['chroot', dest_dir]
for element in cmd:
full_cmd.append(element)
proc = None
try:
proc = subprocess.Popen(full_cmd,
stdin=stdin,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
outs, errs = proc.communicate(timeout=timeout)
txt = outs.decode().strip()
if len(txt) > 0:
logging.debug(txt)
except subprocess.TimeoutExpired as timeout_error:
if proc:
proc.kill()
proc.communicate()
logging.error("Timeout running the command {0}".format(timeout_error.cmd))
except subprocess.CalledProcessError as process_error:
logging.error("Error running command {0}: {1}".format(process_error.cmd, process_error.output))
except OSError as os_error:
logging.error("Error running command {0}: {1}".format(" ".join(full_cmd), os_error))
|
If I don't use isAuthenticated in the config/policies.js file I do not get this error.
How to apply policies to errors?
Why is 'encryptedPassword' parameter being interpreted as undefined in sails.js project? |
import random
import operator
from . import storage
from .core import command
class Quotes(storage.SelectableStorage):
lib = 'pmx'
@classmethod
def initialize(cls):
cls.store = cls.from_URI()
cls._finalizers.append(cls.finalize)
@classmethod
def finalize(cls):
del cls.store
@staticmethod
def split_num(lookup):
prefix, sep, num = lookup.rpartition(' ')
if not prefix or not num.isdigit():
return lookup, 0
return prefix, int(num)
def lookup(self, rest=''):
rest = rest.strip()
return self.lookup_with_num(*self.split_num(rest))
class SQLiteQuotes(Quotes, storage.SQLiteStorage):
def init_tables(self):
CREATE_QUOTES_TABLE = '''
CREATE TABLE
IF NOT EXISTS quotes (
quoteid INTEGER NOT NULL,
library VARCHAR NOT NULL,
quote TEXT NOT NULL,
PRIMARY KEY (quoteid)
)
'''
CREATE_QUOTES_INDEX = '''
CREATE INDEX
IF NOT EXISTS ix_quotes_library
on quotes(library)
'''
CREATE_QUOTE_LOG_TABLE = '''
CREATE TABLE IF NOT EXISTS quote_log (quoteid varchar, logid INTEGER)
'''
self.db.execute(CREATE_QUOTES_TABLE)
self.db.execute(CREATE_QUOTES_INDEX)
self.db.execute(CREATE_QUOTE_LOG_TABLE)
self.db.commit()
def lookup_with_num(self, thing='', num=0):
lib = self.lib
BASE_SEARCH_SQL = """
SELECT quoteid, quote
FROM quotes
WHERE library = ? %s order by quoteid
"""
thing = thing.strip().lower()
num = int(num)
if thing:
wtf = ' AND %s' % (
' AND '.join(["quote like '%%%s%%'" % x for x in thing.split()])
)
SEARCH_SQL = BASE_SEARCH_SQL % wtf
else:
SEARCH_SQL = BASE_SEARCH_SQL % ''
results = [x[1] for x in self.db.execute(SEARCH_SQL, (lib,)).fetchall()]
n = len(results)
if n > 0:
if num:
i = num - 1
else:
i = random.randrange(n)
quote = results[i]
else:
i = 0
quote = ''
return (quote, i + 1, n)
def add(self, quote):
lib = self.lib
quote = quote.strip()
if not quote:
# Do not add empty quotes
return
ADD_QUOTE_SQL = 'INSERT INTO quotes (library, quote) VALUES (?, ?)'
res = self.db.execute(ADD_QUOTE_SQL, (lib, quote))
quoteid = res.lastrowid
query = 'SELECT id, message FROM LOGS order by datetime desc limit 1'
log_id, log_message = self.db.execute(query).fetchone()
if quote in log_message:
query = 'INSERT INTO quote_log (quoteid, logid) VALUES (?, ?)'
self.db.execute(query, (quoteid, log_id))
self.db.commit()
def __iter__(self):
# Note: also filter on quote not null, for backward compatibility
query = "SELECT quote FROM quotes WHERE library = ? and quote is not null"
for row in self.db.execute(query, [self.lib]):
yield {'text': row[0]}
def export_all(self):
query = """
SELECT quote, library, logid
from quotes
left outer join quote_log on quotes.quoteid = quote_log.quoteid
"""
fields = 'text', 'library', 'log_id'
return (dict(zip(fields, res)) for res in self.db.execute(query))
class MongoDBQuotes(Quotes, storage.MongoDBStorage):
collection_name = 'quotes'
def find_matches(self, thing):
thing = thing.strip().lower()
words = thing.split()
def matches(quote):
quote = quote.lower()
return all(word in quote for word in words)
return [
row
for row in self.db.find(dict(library=self.lib)).sort('_id')
if matches(row['text'])
]
def lookup_with_num(self, thing='', num=0):
by_text = operator.itemgetter('text')
results = list(map(by_text, self.find_matches(thing)))
n = len(results)
if n > 0:
if num:
i = num - 1
else:
i = random.randrange(n)
quote = results[i]
else:
i = 0
quote = ''
return (quote, i + 1, n)
def delete(self, lookup):
"""
If exactly one quote matches, delete it. Otherwise,
raise a ValueError.
"""
lookup, num = self.split_num(lookup)
if num:
result = self.find_matches(lookup)[num - 1]
else:
(result,) = self.find_matches(lookup)
self.db.delete_one(result)
def add(self, quote):
quote = quote.strip()
quote_id = self.db.insert_one(dict(library=self.lib, text=quote))
# see if the quote added is in the last IRC message logged
newest_first = [('_id', storage.pymongo.DESCENDING)]
last_message = self.db.database.logs.find_one(sort=newest_first)
if last_message and quote in last_message['message']:
self.db.update_one(
{'_id': quote_id}, {'$set': dict(log_id=last_message['_id'])}
)
def __iter__(self):
return self.db.find(dict(library=self.lib))
def _build_log_id_map(self):
from . import logging
if not hasattr(logging.Logger, 'log_id_map'):
log_db = self.db.database.logs
logging.Logger.log_id_map = dict(
(logging.MongoDBLogger.extract_legacy_id(rec['_id']), rec['_id'])
for rec in log_db.find(projection=[])
)
return logging.Logger.log_id_map
def import_(self, quote):
log_id_map = self._build_log_id_map()
log_id = quote.pop('log_id', None)
log_id = log_id_map.get(log_id, log_id)
if log_id is not None:
quote['log_id'] = log_id
self.db.insert_one(quote)
@command(aliases='q')
def quote(rest):
"""
If passed with nothing then get a random quote. If passed with some
string then search for that. If prepended with "add:" then add it to the
db, eg "!quote add: drivers: I only work here because of pmxbot!".
Delete an individual quote by prepending "del:" and passing a search
matching exactly one query.
"""
rest = rest.strip()
if rest.startswith('add: ') or rest.startswith('add '):
quote_to_add = rest.split(' ', 1)[1]
Quotes.store.add(quote_to_add)
qt = False
return 'Quote added!'
if rest.startswith('del: ') or rest.startswith('del '):
cmd, sep, lookup = rest.partition(' ')
Quotes.store.delete(lookup)
return 'Deleted the sole quote that matched'
qt, i, n = Quotes.store.lookup(rest)
if not qt:
return
return '(%s/%s): %s' % (i, n, qt)
|
Musings From Our Caffeinated Highs: The Existence of God: Part 1 – The Nature of Evidence and Why Logic?
The Existence of God: Part 1 – The Nature of Evidence and Why Logic?
The above image (source found here) is a classic example of modern secular thinking. Christians are irrational, illogical and unreasonable. They are fools for believing in God and even more foolish for the belief that this Jesus person is the incarnation of God. Atheists, in recent years, have taken the gloves off. They are no longer engaging in polite discourse. Although religion as a whole is under attack, it is Christians who bear the brunt. To find the truth of this statement one simply need only to spend a few minutes on any popular website or public forum such as reddit or facebook. The vast majority of the atheist venom is directed specifically toward the bible, the God of the bible, Jesus and His followers. They have taken the disagreement to the level of mockery and complete intolerance for the Christian perspective. They say belief in God is a psychological crutch for simpletons, unable to handle reality. And why is the Christian so mock-worthy? According to them, evidence contradicts the bible.
Atheists argue that the evidence against the biblical perspective on origins specifically - but other matters as well - is overwhelming. The universe and life both have “natural” origins; therefore existence itself is not a proof of God. They take this perspective even further. The claim is that science precludes the existence of God. You cannot use God as an explanation for anything in science. In other words, science can’t prove or disprove God and if it can’t be accounted for scientifically it does not exist. Although this line of thinking is full of baseless presuppositions, I’m not going to specifically address those issues at this time, but I will address them (Lord willing) in a future article that will examine the atheist world view in a more general sense. For now we will look at two of atheism’s most basic foundations.
Now I should point out that there is absolutely no reason to believe in the existence of the Oort cloud other than the fact that we still have comets orbiting the sun, just like Christians do not yet have an explanation for distant starlight. You see both sides can argue over evidence all day long, but we will only endlessly come up with reasons why the other's evidence does not affect our world views. Everyone has a world view, and they interpret everything on the basis of that world view. So our task - as the human race in the search for truth - is quite simple, examine the different world views and find the one that makes the most logical sense. But that leads to one very important question, why logic?
Proponent of atheism Dr. Gordon Stein has said, “The use of logic or reason is the only valid way to examine the truth or falsity of a statement which claims to be factual.” In other words, logic is the most fundamental rule by which we measure truth. You can’t prove anything without logic or reason. This is actually something Christians and atheists agree upon. The problem with this standard for truth is that it is in-and-of itself a claim of truth. So the atheist has to now prove by logic and reason, that it is by logic and reason that you prove everything. This is circular reasoning and begging the question, two logical fallacies that prove an argument to be false. The only option left for the atheist is to prove - by some method other than logic or reason - that it is by logic and reason that you prove everything. In which case, they destroy the argument on its own foundation. This is an inescapable trap and the biggest problem with atheism. The atheist perspective on its most basic foundation is totally irrational and self-refuting. The reason for this is that the atheist perspective cannot account for the existence of logic, so they have no logical reason for the use of logic and reason. According to their own standards, science must be able to account for something for it to exist. But there is no “logic particle” or “planet logic” beaming logic (like radio waves) into the universe. But don’t Christians have the same problem? No they don’t, because their world view can account for logic and give a reason why it must be used. Logic is a reflection of God’s thinking. God is logical, and therefore logic is the standard by which we must think. To do otherwise violates God’s law and is therefore irrational. The same can be demonstrated with all non-physical properties of the universe such as morality and the laws of nature.
In closing there are four ways atheists try to get out of the logical trap their world view creates for them. The first is to try and say matter is logical therefore logic is a property of matter. There are two problems with this argument; the first is that matter changes and logic does not. If logic was based off of matter it would change. The second problem with this is that matter obeys the laws of logic and nature, laws do not obey matter therefore it cannot be the source.
The second method of escape is to say the laws of logic are like the rules of grammar. They are not laws they are simply conventions among people. If that were true then logic would be different from culture to culture. For instance, in England it might make sense for me to contradict myself and say I am 100% a person and 100% not a person.
The third escape is to say logic is created by chemical reactions in our brain. It is just how the human mind works. The first problem with this is that people are not always logical. That is why there are logical fallacies. If it was just how the human mind worked we would not have to be taught how to be logical. The second problem with this is that what happens in your brain is not what happens in mine. It is similar to the “matter source” and “conventions” arguments. Logic would then be changeable.
Finally they might try to say something along the lines of, “Just because you can prove something logically does not mean it actually exists.” This is referring, of course, to God. This is not really a method of escape, but rather an attack on the overall argument. What I say to this (and the three methods of escape in general) is that it is the atheist who is forced to deny and attack logic … not the Christian. It is not Christians who have to diminish logic or rational thought to try and uphold their world view. Christians can account for, or give a reason for, the use of logic. The atheist cannot. In fact in the rational world the atheist can’t prove anything because they have absolutely no basis for logic or any other kind of truth. They borrow from the Christian world view to try and fill in the gaps of their world view. The atheist world view is clearly inconsistent and irrational.
In part 2 (eta, soonish) we will broaden our view and tackle the atheist claim of neutrality, and the naturalist world view.
"You see both sides can argue over evidence all day long, but we will only endlessly come up with reasons why the other's evidence does not affect our world views."
You hit the nail on the head, Scott. I loved reading this article - especially when you discuss how Christians are not the ones attacking logic. It's the atheist that is required to diminish the use of logic or rational thought. |
"""
Copyright 2017, Fujitsu Network Communications, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import json
import getpass
import Tools
from Framework.Utils import xml_Utils, file_Utils
from Framework.Utils.testcase_Utils import pNote
from Framework.Utils.print_Utils import print_info
from Framework.Utils.xml_Utils import getElementWithTagAttribValueMatch
__author__ = 'Keenan Jabri'
class LineResult:
"""Class that generates html result line items"""
data = {}
html = ''
def __init__(self):
"""Constructor for class LineResult"""
self.keys = ['type', 'name', 'info', 'description', 'timestamp', 'duration', 'status', 'impact', 'onerror', 'msc', 'static',
'dynamic']
def get_info(self, line):
"""gets info for line"""
inf_obj = line.get("info") if line.get("info") else ' '
info = json.dumps(inf_obj)
info = info.replace('}"', ',').replace('"{', '').replace("'", "").replace('"', '')
return info
def set_dynamic_content(self, line):
"""sets content that is subjected to change"""
self.data['dynamic'] = [line.get("keywords"), line.get("passes"), line.get("failures"),
line.get("errors"), line.get("exceptions"), line.get("skipped")]
self.data['timestamp'] = line.get("timestamp")
def set_attributes(self, line, variant, stepcount):
"""sets attributes"""
if 'Keyword' not in variant and 'step' not in variant:
stepcount = ''
result_file = line.get("resultfile") if line.get("resultfile") else line.get("resultsdir") if line.get(
"resultsdir") else ''
status_name = line.get("status") if line.get("status") else ''
self.data = {'nameAttr': variant + 'Record',
'type': variant.replace('Test', '').replace('Keyword', 'step ') + str(stepcount),
'name': line.get("name"),
'info': self.get_info(line),
'description': line.get("description"),
'timestamp': line.get("timestamp"),
'duration': line.get("time"),
'status': '<span class=' + status_name + '>' + status_name + '</span>',
'impact': line.get("impact"),
'onerror': line.get("onerror"),
'msc': '<span style="padding-left:10px; padding-right: 10px;"><a href="' + result_file
+ '"><i class="fa fa-line-chart"> </i></a></span>' + (
'' if variant == 'Keyword' else '<span style="padding-left:10px; padding-right: 10px;"><a href="' + (
line.get("logsdir") if line.get(
"logsdir") else '') + '"><i class="fa fa-book"> </i></a></span>') + (
'<span style="padding-left:10px; padding-right: 10px;"><a href="' + line.get("defects")
+ '"><i class="fa fa-bug"> </i></a></span>' if line.get("defects") else ''),
'static': ['Count', 'Passed', 'Failed', 'Errors', 'Exceptions', 'Skipped']
}
def set_html(self, line, variant, stepcount):
"""sets the html code"""
if self.html == '':
self.set_attributes(line, variant, stepcount)
self.set_dynamic_content(line)
top_level = ''
top_level_next = ''
if not line.get("display") or line.get("display") == 'True':
if self.data['nameAttr'] != 'KeywordRecord':
for elem in self.keys:
if elem == 'dynamic':
for dynamicElem in self.data['dynamic']:
top_level_next += '<td>' + (dynamicElem if dynamicElem else '0') + '</td>'
elif elem == 'static':
for staticElem in self.data['static']:
top_level += '<td>' + (staticElem if staticElem else '') + '</td>'
else:
top_level += '<td rowspan="2"><div>' + (
self.data[elem] if self.data[elem] else '') + '</div></td>'
top_level_next = '<tr>' + top_level_next + '</tr>'
else:
for elem in self.keys:
if elem != 'static' and elem != 'dynamic':
top_level += '<td rowspan="2"><div>' + (
self.data[elem] if self.data[elem] else '') + '</div></td>'
self.html = '<tr name="' + self.data['nameAttr'] + '">' + top_level + '</tr>' + top_level_next
class WarriorHtmlResults:
"""Class that generates html results using the junit result file """
lineObjs = []
lineCount = 0
recount = 0
steps = 0
def __init__(self, junit_file=None):
""" init function"""
self.junit_file = junit_file
self.html_template = "{0}{1}reporting{1}html_results_template.html" \
.format(Tools.__path__[0], os.sep)
self.junit_root = xml_Utils.getRoot(self.junit_file)
def create_line_result(self, line, variant):
""" create new objs"""
temp = LineResult()
temp.set_html(line, variant, self.steps)
self.lineObjs.append(temp)
self.lineCount += 1
def set_line_objs(self):
""" call to create a new obj per item"""
self.lineCount = 0
project_node_list = [self.junit_root]
for project_node in project_node_list:
self.create_line_result(project_node, "Project")
for testsuite_node in project_node.findall("testsuite"):
self.create_line_result(testsuite_node, "Testsuite")
#to add setup result in html file
for setup_node in testsuite_node.findall("Setup"):
self.create_line_result(setup_node, "Setup")
self.steps = 0
for step_node in setup_node.findall("properties"):
for node in step_node.findall("property"):
if node.get('type') == 'keyword':
self.steps += 1
self.create_line_result(node, "Keyword")
for testcase_node in testsuite_node.findall("testcase"):
self.create_line_result(testcase_node, "Testcase")
self.steps = 0
for step_node in testcase_node.findall("properties"):
for node in step_node.findall("property"):
if node.get('type') == 'keyword':
self.steps += 1
self.create_line_result(node, "Keyword")
#to add debug result in html file
for debug_node in testsuite_node.findall("Debug"):
self.create_line_result(debug_node, "Debug")
self.steps = 0
for step_node in debug_node.findall("properties"):
for node in step_node.findall("property"):
if node.get('type') == 'keyword':
self.steps += 1
self.create_line_result(node, "Keyword")
#to add cleanup result in html file
for cleanup_node in testsuite_node.findall("Cleanup"):
self.create_line_result(cleanup_node, "Cleanup")
self.steps = 0
for step_node in cleanup_node.findall("properties"):
for node in step_node.findall("property"):
if node.get('type') == 'keyword':
self.steps += 1
self.create_line_result(node, "Keyword")
def get_path(self):
""" get the html results path """
filename = file_Utils.getNameOnly(os.path.basename(self.junit_file))
filename = filename.split("_junit")[0]
html_filename = filename + ".html"
if hasattr(self, 'givenPath'):
html_results_path = self.givenPath + os.sep + html_filename
else:
results_dir = os.path.dirname(self.junit_file)
html_results_path = results_dir + os.sep + html_filename
return html_results_path
def merge_html(self, dynamic_html):
""" merge html from template and dynamic """
temp = open(self.html_template)
template_html = temp.read().replace('\n', '')
temp.close()
index = template_html.rfind('</table>')
return template_html[:index] + dynamic_html + template_html[index:] + self.get_war_version() + self.get_user()
def get_war_version(self):
""" find the warrior version """
path = self.get_path().split('warriorframework')[0] + 'warriorframework/version.txt'
if os.path.isfile(path):
version = open(path, 'r').read().splitlines()[1].split(':')[1]
return '<div class="version">' + version + '</div>'
return ''
def get_user(self):
""" find the user who executed the testcase """
try:
user = getpass.getuser()
except Exception:
user = "Unknown_user"
return '<div class="user">' + user + '</div>'
def generate_html(self, junitObj, givenPath, print_summary=False):
""" build the html givenPath: added this feature in case of later down the line
calling from outside junit file ( no actual use as of now )
"""
if junitObj:
self.junit_file = junitObj
self.junit_root = xml_Utils.getRoot(self.junit_file)
if givenPath:
self.givenPath = givenPath
self.set_line_objs()
html = ''
for item in self.lineObjs:
html += item.html
html = self.merge_html(html)
elem_file = open(self.get_path(), 'w')
elem_file.write(html)
elem_file.close()
self.lineObjs = []
# Prints result summary at the end of execution
if print_summary is True:
print_info("++++ Results Summary ++++")
print_info("Open the Results summary file given below in a browser to "
"view results summary for this execution")
print_info("Results summary file: {0}".format(self.get_path()))
print_info("+++++++++++++++++++++++++")
|
Become a Stokes Nature Center Member Today!
Members play a critical role in ensuring that nature education in Cache Valley continues to flourish. Choose the membership that works best for you!
Free admission and discounts at more than 300 participating nature centers throughout the USA and Canada. |
'''
Code for mmDGM
Author: Chongxuan Li ([email protected])
Co-author: Tianlin Shi
Version = '1.0'
'''
import sys, os
import pdb
import numpy as np
import theano
import theano.tensor as T
import collections as C
import anglepy as ap
import anglepy.ndict as ndict
import color
from anglepy.misc import lazytheanofunc
import math, inspect
#import theano.sandbox.cuda.rng_curand as rng_curand
def shared32(x, name=None, borrow=False):
return theano.shared(np.asarray(x, dtype='float32'), name=name, borrow=borrow)
def cast32(x):
return T.cast(x, dtype='float32')
'''
Fully connected deep variational auto-encoder (VAE_Z_X)
'''
class GPUVAE_MM_Z_X(ap.GPUVAEModel):
def __init__(self, get_optimizer, n_x, n_y, n_hidden_q, n_z, n_hidden_p, nonlinear_q='tanh', nonlinear_p='tanh', type_px='bernoulli', type_qz='gaussianmarg', type_pz='gaussianmarg', prior_sd=1, init_sd=1e-2, var_smoothing=0, n_mixture=50, c=10, ell=1, average_activation = 0.1, sparsity_weight = 3):
self.constr = (__name__, inspect.stack()[0][3], locals())
self.n_x = n_x
self.n_y = n_y
self.n_hidden_q = n_hidden_q
self.n_z = n_z
self.n_hidden_p = n_hidden_p
self.dropout = False
self.nonlinear_q = nonlinear_q
self.nonlinear_p = nonlinear_p
self.type_px = type_px
self.type_qz = type_qz
self.type_pz = type_pz
self.prior_sd = prior_sd
self.var_smoothing = var_smoothing
self.n_mixture = n_mixture
self.c = c
self.ell = ell
self.average_activation = average_activation
self.sparsity_weight = sparsity_weight
if os.environ.has_key('c'):
self.c = float(os.environ['c'])
if os.environ.has_key('ell'):
self.ell = float(os.environ['ell'])
self.sv = 0
if os.environ.has_key('sv'):
self.sv = int(os.environ['sv'])
color.printBlue('apply supervision from layer ' + str(self.sv+1) + ' to end.')
self.super_to_mean = False
if os.environ.has_key('super_to_mean') and bool(int(os.environ['super_to_mean'])) == True:
self.super_to_mean = True
color.printBlue('apply supervision to z_mean.')
self.train_residual = False
if os.environ.has_key('train_residual') and bool(int(os.environ['train_residual'])) == True:
self.train_residual = True
color.printBlue('Train residual wrt prior instead of the whole model.')
self.Lambda = 0
if os.environ.has_key('Lambda'):
self.Lambda = float(os.environ['Lambda'])
self.sigma_square = 1
if os.environ.has_key('sigma_square'):
self.sigma_square = float(os.environ['sigma_square'])
if os.environ.has_key('dropout'):
self.dropout = bool(int(os.environ['dropout']))
color.printBlue('c = ' + str(self.c) + ' , ell = ' + str(self.ell) + ' , sigma_square = ' + str(self.sigma_square))
# Init weights
v, w = self.init_w(1e-2)
for i in v: v[i] = shared32(v[i])
for i in w: w[i] = shared32(w[i])
if not self.super_to_mean:
W = shared32(np.zeros((sum(n_hidden_q[self.sv:])+1, n_y)))
#print 'apply supervision from', self.sv+1, ' to end.'
else:
W = shared32(np.zeros((n_z+1, n_y)))
#print 'apply supervison to z_mean'
self.v = v
self.v['W'] = W
#print 'dimension of the prediction model: ', self.v['W'].get_value().shape
self.w = w
super(GPUVAE_MM_Z_X, self).__init__(get_optimizer)
def factors(self, x, z, A):
v = self.v # parameters of recognition model.
w = self.w # parameters of generative model.
'''
z is unused
x['x'] is the data
The names of dict z[...] may be confusing here: the latent variable z is not included in the dict z[...],
but implicitely computed from epsilon and parameters in w.
z is computed with g(.) from eps and variational parameters
let logpx be the generative model density: log p(x|z) where z=g(.)
let logpz be the prior of Z plus the entropy of q(z|x): logp(z) + H_q(z|x)
So the lower bound L(x) = logpx + logpz
let logpv and logpw be the (prior) density of the parameters
'''
# Compute q(z|x)
hidden_q = [x['x']]
hidden_q_s = [x['x']]
def f_softplus(x): return T.log(T.exp(x) + 1)# - np.log(2)
def f_rectlin(x): return x*(x>0)
def f_rectlin2(x): return x*(x>0) + 0.01 * x
nonlinear = {'tanh': T.tanh, 'sigmoid': T.nnet.sigmoid, 'softplus': f_softplus, 'rectlin': f_rectlin, 'rectlin2': f_rectlin2}
nonlinear_q = nonlinear[self.nonlinear_q]
nonlinear_p = nonlinear[self.nonlinear_p]
#rng = rng_curand.CURAND_RandomStreams(0)
import theano.tensor.shared_randomstreams
rng = theano.tensor.shared_randomstreams.RandomStreams(0)
# TOTAL HACK
#hidden_q.append(nonlinear_q(T.dot(v['scale0'], A) * T.dot(w['out_w'].T, hidden_q[-1]) + T.dot(v['b0'], A)))
#hidden_q.append(nonlinear_q(T.dot(v['scale1'], A) * T.dot(w['w1'].T, hidden_q[-1]) + T.dot(v['b1'], A)))
for i in range(len(self.n_hidden_q)):
hidden_q.append(nonlinear_q(T.dot(v['w'+str(i)], hidden_q[-1]) + T.dot(v['b'+str(i)], A)))
hidden_q_s.append(T.nnet.sigmoid(T.dot(v['w'+str(i)], hidden_q_s[-1]) + T.dot(v['b'+str(i)], A)))
if self.dropout:
hidden_q[-1] *= 2. * (rng.uniform(size=hidden_q[-1].shape, dtype='float32') > .5)
hidden_q_s[-1] *= 2. * (rng.uniform(size=hidden_q_s[-1].shape, dtype='float32') > .5)
'''
print 'mm_model'
for (d, xx) in x.items():
print d
'''
#print 'x', x['mean_prior'].type
#print 'T', (T.dot(v['mean_w'], hidden_q[-1]) + T.dot(v['mean_b'], A)).type
if not self.train_residual:
q_mean = T.dot(v['mean_w'], hidden_q[-1]) + T.dot(v['mean_b'], A)
else:
q_mean = x['mean_prior'] + T.dot(v['mean_w'], hidden_q[-1]) + T.dot(v['mean_b'], A)
#q_mean = T.dot(v['mean_w'], hidden_q[-1]) + T.dot(v['mean_b'], A)
if self.type_qz == 'gaussian' or self.type_qz == 'gaussianmarg':
q_logvar = T.dot(v['logvar_w'], hidden_q[-1]) + T.dot(v['logvar_b'], A)
else: raise Exception()
ell = cast32(self.ell)
self.param_c = shared32(0)
sv = self.sv
a_a = cast32(self.average_activation)
s_w = cast32(self.sparsity_weight)
def activate():
res = 0
if self.super_to_mean:
lenw = len(v['W'].get_value())
res += T.dot(v['W'][:-1,:].T, q_mean)
res += T.dot(v['W'][lenw-1:lenw,:].T, A)
else:
lenw = len(v['W'].get_value())
for (hi, hidden) in enumerate(hidden_q[1+sv:]):
res += T.dot(v['W'][sum(self.n_hidden_q[sv:sv+hi]):sum(self.n_hidden_q[sv:sv+hi+1]),:].T, hidden)
res += T.dot(v['W'][lenw-1:lenw,:].T, A)
return res
predy = T.argmax(activate(), axis=0)
# function for distribution q(z|x)
theanofunc = lazytheanofunc('warn', mode='FAST_RUN')
self.dist_qz['z'] = theanofunc([x['x'], x['mean_prior']] + [A], [q_mean, q_logvar])
self.dist_qz['hidden'] = theanofunc([x['x'], x['mean_prior']] + [A], hidden_q[1:])
self.dist_qz['predy'] = theanofunc([x['x'], x['mean_prior']] + [A], predy)
# compute cost (posterior regularization).
true_resp = (activate() * x['y']).sum(axis=0, keepdims=True)
T.addbroadcast(true_resp, 0)
cost = self.param_c * (ell * (1-x['y']) + activate() - true_resp).max(axis=0).sum() \
+ self.Lambda * (v['W'] * v['W']).sum()
# compute the sparsity penalty
sparsity_penalty = 0
for i in range(1, len(hidden_q_s)):
sparsity_penalty += (a_a*T.log(a_a/(hidden_q_s[i].mean(axis=1))) + (1-a_a)*T.log((1-a_a)/(1-(hidden_q_s[i].mean(axis=1))))).sum(axis=0)
sparsity_penalty *= s_w
# Compute virtual sample
eps = rng.normal(size=q_mean.shape, dtype='float32')
_z = q_mean + T.exp(0.5 * q_logvar) * eps
# Compute log p(x|z)
hidden_p = [_z]
for i in range(len(self.n_hidden_p)):
hidden_p.append(nonlinear_p(T.dot(w['w'+str(i)], hidden_p[-1]) + T.dot(w['b'+str(i)], A)))
if self.dropout:
hidden_p[-1] *= 2. * (rng.uniform(size=hidden_p[-1].shape, dtype='float32') > .5)
if self.type_px == 'bernoulli':
p = T.nnet.sigmoid(T.dot(w['out_w'], hidden_p[-1]) + T.dot(w['out_b'], A))
_logpx = - T.nnet.binary_crossentropy(p, x['x'])
self.dist_px['x'] = theanofunc([_z] + [A], p)
elif self.type_px == 'gaussian':
x_mean = T.dot(w['out_w'], hidden_p[-1]) + T.dot(w['out_b'], A)
x_logvar = T.dot(w['out_logvar_w'], hidden_p[-1]) + T.dot(w['out_logvar_b'], A)
_logpx = ap.logpdfs.normal2(x['x'], x_mean, x_logvar)
self.dist_px['x'] = theanofunc([_z] + [A], [x_mean, x_logvar])
elif self.type_px == 'bounded01':
x_mean = T.nnet.sigmoid(T.dot(w['out_w'], hidden_p[-1]) + T.dot(w['out_b'], A))
x_logvar = T.dot(w['out_logvar_b'], A)
_logpx = ap.logpdfs.normal2(x['x'], x_mean, x_logvar)
# Make it a mixture between uniform and Gaussian
w_unif = T.nnet.sigmoid(T.dot(w['out_unif'], A))
_logpx = T.log(w_unif + (1-w_unif) * T.exp(_logpx))
self.dist_px['x'] = theanofunc([_z] + [A], [x_mean, x_logvar])
else: raise Exception("")
# Note: logpx is a row vector (one element per sample)
logpx = T.dot(shared32(np.ones((1, self.n_x))), _logpx) # logpx = log p(x|z,w)
# log p(z) (prior of z)
if self.type_pz == 'gaussianmarg':
if not self.train_residual:
logpz = -0.5 * (np.log(2 * np.pi * self.sigma_square) + ((q_mean-x['mean_prior'])**2 + T.exp(q_logvar))/self.sigma_square).sum(axis=0, keepdims=True)
else:
logpz = -0.5 * (np.log(2 * np.pi * self.sigma_square) + (q_mean**2 + T.exp(q_logvar))/self.sigma_square).sum(axis=0, keepdims=True)
elif self.type_pz == 'gaussian':
logpz = ap.logpdfs.standard_normal(_z).sum(axis=0, keepdims=True)
elif self.type_pz == 'mog':
pz = 0
for i in range(self.n_mixture):
pz += T.exp(ap.logpdfs.normal2(_z, T.dot(w['mog_mean'+str(i)], A), T.dot(w['mog_logvar'+str(i)], A)))
logpz = T.log(pz).sum(axis=0, keepdims=True) - self.n_z * np.log(float(self.n_mixture))
elif self.type_pz == 'laplace':
logpz = ap.logpdfs.standard_laplace(_z).sum(axis=0, keepdims=True)
elif self.type_pz == 'studentt':
logpz = ap.logpdfs.studentt(_z, T.dot(T.exp(w['logv']), A)).sum(axis=0, keepdims=True)
else:
raise Exception("Unknown type_pz")
# loq q(z|x) (entropy of z)
if self.type_qz == 'gaussianmarg':
logqz = - 0.5 * (np.log(2 * np.pi) + 1 + q_logvar).sum(axis=0, keepdims=True)
elif self.type_qz == 'gaussian':
logqz = ap.logpdfs.normal2(_z, q_mean, q_logvar).sum(axis=0, keepdims=True)
else: raise Exception()
# [new part] Fisher divergence of latent variables
if self.var_smoothing > 0:
dlogq_dz = T.grad(logqz.sum(), _z) # gives error when using gaussianmarg instead of gaussian
dlogp_dz = T.grad((logpx + logpz).sum(), _z)
FD = 0.5 * ((dlogq_dz - dlogp_dz)**2).sum(axis=0, keepdims=True)
# [end new part]
logqz -= self.var_smoothing * FD
# Note: logpv and logpw are a scalars
if True:
def f_prior(_w, prior_sd=self.prior_sd):
return ap.logpdfs.normal(_w, 0, prior_sd).sum()
else:
def f_prior(_w, prior_sd=self.prior_sd):
return ap.logpdfs.standard_laplace(_w / prior_sd).sum()
return logpx, logpz, logqz, cost, sparsity_penalty
# Generate epsilon from prior
def gen_eps(self, n_batch):
z = {'eps': np.random.standard_normal(size=(self.n_z, n_batch)).astype('float32')}
return z
# Generate variables
def gen_xz_prior(self, x, z, mean_prior, sigma_square, n_batch):
x, z = ndict.ordereddicts((x, z))
A = np.ones((1, n_batch)).astype(np.float32)
for i in z: z[i] = z[i].astype(np.float32)
for i in x: x[i] = x[i].astype(np.float32)
tmp = np.random.standard_normal(size=(self.n_z, n_batch)).astype(np.float32)
z['z'] = tmp * np.sqrt(sigma_square) + mean_prior
if self.type_px == 'bernoulli':
x['x'] = self.dist_px['x'](*([z['z']] + [A]))
elif self.type_px == 'bounded01' or self.type_px == 'gaussian':
x_mean, x_logvar = self.dist_px['x'](*([z['z']] + [A]))
if not x.has_key('x'):
x['x'] = np.random.normal(x_mean, np.exp(x_logvar/2))
if self.type_px == 'bounded01':
x['x'] = np.maximum(np.zeros(x['x'].shape), x['x'])
x['x'] = np.minimum(np.ones(x['x'].shape), x['x'])
else: raise Exception("")
return x
# Generate variables
def gen_xz(self, x, z, n_batch):
x, z = ndict.ordereddicts((x, z))
A = np.ones((1, n_batch)).astype(np.float32)
for i in z: z[i] = z[i].astype(np.float32)
for i in x: x[i] = x[i].astype(np.float32)
_z = {}
# If x['x'] was given but not z['z']: generate z ~ q(z|x)
if x.has_key('x') and not z.has_key('z'):
q_mean, q_logvar = self.dist_qz['z'](*([x['x'], x['mean_prior']] + [A]))
q_hidden = self.dist_qz['hidden'](*([x['x'], x['mean_prior']] + [A]))
predy = self.dist_qz['predy'](*([x['x'], x['mean_prior']] + [A]))
_z['mean'] = q_mean
_z['logvar'] = q_logvar
_z['hidden'] = q_hidden
_z['predy'] = predy
# Require epsilon
if not z.has_key('eps'):
eps = self.gen_eps(n_batch)['eps']
z['z'] = q_mean + np.exp(0.5 * q_logvar) * eps
elif not z.has_key('z'):
if self.type_pz in ['gaussian','gaussianmarg']:
z['z'] = np.random.standard_normal(size=(self.n_z, n_batch)).astype(np.float32)
elif self.type_pz == 'laplace':
z['z'] = np.random.laplace(size=(self.n_z, n_batch)).astype(np.float32)
elif self.type_pz == 'studentt':
z['z'] = np.random.standard_t(np.dot(np.exp(self.w['logv'].get_value()), A)).astype(np.float32)
elif self.type_pz == 'mog':
i = np.random.randint(self.n_mixture)
loc = np.dot(self.w['mog_mean'+str(i)].get_value(), A)
scale = np.dot(np.exp(.5*self.w['mog_logvar'+str(i)].get_value()), A)
z['z'] = np.random.normal(loc=loc, scale=scale).astype(np.float32)
else:
raise Exception('Unknown type_pz')
# Generate from p(x|z)
if self.type_px == 'bernoulli':
p = self.dist_px['x'](*([z['z']] + [A]))
_z['x'] = p
if not x.has_key('x'):
x['x'] = np.random.binomial(n=1,p=p)
elif self.type_px == 'bounded01' or self.type_px == 'gaussian':
x_mean, x_logvar = self.dist_px['x'](*([z['z']] + [A]))
_z['x'] = x_mean
if not x.has_key('x'):
x['x'] = np.random.normal(x_mean, np.exp(x_logvar/2))
if self.type_px == 'bounded01':
x['x'] = np.maximum(np.zeros(x['x'].shape), x['x'])
x['x'] = np.minimum(np.ones(x['x'].shape), x['x'])
else: raise Exception("")
return x, z, _z
def gen_xz_prior11(self, x, z, mean_prior, sigma_square, n_batch):
x, z = ndict.ordereddicts((x, z))
A = np.ones((1, n_batch)).astype(np.float32)
z['z'] = mean_prior.astype(np.float32)
if self.type_px == 'bernoulli':
x['x'] = self.dist_px['x'](*([z['z']] + [A]))
elif self.type_px == 'bounded01' or self.type_px == 'gaussian':
x_mean, x_logvar = self.dist_px['x'](*([z['z']] + [A]))
if not x.has_key('x'):
x['x'] = np.random.normal(x_mean, np.exp(x_logvar/2))
if self.type_px == 'bounded01':
x['x'] = np.maximum(np.zeros(x['x'].shape), x['x'])
x['x'] = np.minimum(np.ones(x['x'].shape), x['x'])
else: raise Exception("")
return x
def variables(self):
z = {}
# Define observed variables 'x'
x = {'x': T.fmatrix('x'), 'mean_prior': T.fmatrix('mean_prior'), 'y': T.fmatrix('y'), }
#x = {'x': T.fmatrix('x'), 'y': T.fmatrix('y'), }
return x, z
def init_w(self, std=1e-2):
def rand(size):
if len(size) == 2 and size[1] > 1:
return np.random.normal(0, 1, size=size) / np.sqrt(size[1])
return np.random.normal(0, std, size=size)
v = {}
#v['scale0'] = np.ones((self.n_hidden_q[0], 1))
#v['scale1'] = np.ones((self.n_hidden_q[0], 1))
v['w0'] = rand((self.n_hidden_q[0], self.n_x))
v['b0'] = rand((self.n_hidden_q[0], 1))
for i in range(1, len(self.n_hidden_q)):
v['w'+str(i)] = rand((self.n_hidden_q[i], self.n_hidden_q[i-1]))
v['b'+str(i)] = rand((self.n_hidden_q[i], 1))
v['mean_w'] = rand((self.n_z, self.n_hidden_q[-1]))
v['mean_b'] = rand((self.n_z, 1))
if self.type_qz in ['gaussian','gaussianmarg']:
v['logvar_w'] = np.zeros((self.n_z, self.n_hidden_q[-1]))
v['logvar_b'] = np.zeros((self.n_z, 1))
w = {}
if self.type_pz == 'mog':
for i in range(self.n_mixture):
w['mog_mean'+str(i)] = rand((self.n_z, 1))
w['mog_logvar'+str(i)] = rand((self.n_z, 1))
if self.type_pz == 'studentt':
w['logv'] = np.zeros((self.n_z, 1))
if len(self.n_hidden_p) > 0:
w['w0'] = rand((self.n_hidden_p[0], self.n_z))
w['b0'] = rand((self.n_hidden_p[0], 1))
for i in range(1, len(self.n_hidden_p)):
w['w'+str(i)] = rand((self.n_hidden_p[i], self.n_hidden_p[i-1]))
w['b'+str(i)] = rand((self.n_hidden_p[i], 1))
w['out_w'] = rand((self.n_x, self.n_hidden_p[-1]))
w['out_b'] = np.zeros((self.n_x, 1))
if self.type_px == 'gaussian':
w['out_logvar_w'] = rand((self.n_x, self.n_hidden_p[-1]))
w['out_logvar_b'] = np.zeros((self.n_x, 1))
if self.type_px == 'bounded01':
w['out_logvar_b'] = np.zeros((self.n_x, 1))
w['out_unif'] = np.zeros((self.n_x, 1))
else:
w['out_w'] = rand((self.n_x, self.n_z))
w['out_b'] = np.zeros((self.n_x, 1))
if self.type_px == 'gaussian':
w['out_logvar_w'] = rand((self.n_x, self.n_z))
w['out_logvar_b'] = np.zeros((self.n_x, 1))
if self.type_px == 'bounded01':
w['out_logvar_b'] = np.zeros((self.n_x, 1))
w['out_unif'] = np.zeros((self.n_x, 1))
return v, w
|
Pizza Hut Just Hired a Robot Waiter. Should You be Concerned?
Next time you have these interactions at Pizza Hut, you just might be chatting with a robot — by which I mean an actual, mechanized humanoid machine, not a really bored college student.
SoftBank is bringing its (admittedly adorable) humanoid bot, Pepper, to Asian Pizza Hut storefronts by the end of 2016. Using MasterCard’s MasterPass technology, Pepper will be able to process customers’ full orders, from greeting to checkout.
What’s more, Pepper will provide customized recommendations and remind patrons about ongoing specials and deals. The robot’s even got human-like hand gestures and a friendly intonation to help make the whole interaction a little less creepy.
Pizza Hut is by no means the only company outsourcing its minimum-wage jobs to technology.
Last month, China’s Taste and Aroma restaurant unveiled a fleet of robot waiters — although they’re much less high-tech than Pepper, only carrying predefined orders along set paths.
Hotels in Japan and Belgium employ robotic staff, too, and TEDxSydney welcomed a tiny robot usher to the famous Opera House.
And this kind of technological outsourcing isn’t just happening abroad.
You’re probably already familiar with the new(ish) self-checkout lanes in many American grocery stores. And in response to recent minimum wage hikes, Wendy’s and McDonald’s are planning to install labor-reducing, self-serve kiosks in U.S. locations.
After all, it’s simple math: Although a robot’s one-time setup expense may be high, it’s quickly more cost-effective than paying a flesh-and-blood human being $15 per hour.
Plus, robots are infinitely more reliable than people. They never wake up with a headache and arrive late to work, and they don’t require health care, time off or incentives to do the best job they can.
Whether or not you’d be comfortable giving your order to a humanoid automaton, we’re willing to bet you’re not comfortable handing over your job to one.
Servers at nice restaurants might be safe for a little while longer. Some Chinese robot waitstaff have shown an unfortunate tendency to deliver patrons’ orders… straight into their laps. Not exactly service with a smile.
But with big-name companies making concrete plans to roll out automation technology this year, it’s not unreasonable for fast-food workers to start getting concerned about their jobs.
Fortunately, you probably didn’t want to sling fries for the rest of your life anyway — minimum-wage food industry jobs tend to be pretty boring and thankless, in general.
If you’re looking for ways to prepare for the coming robot takeover, here are some resources for you to consider.
It might be time to go back to school, which can be expensive. Check out these 100 college scholarships to help you get educated on the cheap (or free) — and don’t forget that even prestigious schools might offer generous need-based aid.
If you really don’t like the idea of a traditional, four-year degree, check out these six alternative certifications with gainful prospects.
Or, skip the schooling entirely and strike out on your own. Here’s our guide to getting your dream freelance business off the ground.
And even if none of these options are on the table for you right now, you can look into other high-paying retail jobs. Luckily, it looks like they’re still working out some pretty major kinks on robotic clothes-folding technology.
Your Turn: Are you prepared for the coming robot takeover? What other jobs are threatened by automation?
Jamie Cattanach (@jamiecattanach) is a staff writer at The Penny Hoarder. Her creative writing has been featured in DMQ Review, Sweet: A Literary Confection and elsewhere. She’s really glad that robots can’t write good articles or poetry… yet. |
#Python 2.3 or higher required
from xml import sax
from xml.dom import EMPTY_NAMESPACE as NULL_NAMESPACE
from xml.dom import EMPTY_PREFIX as NULL_PREFIX
from xml.dom import XML_NAMESPACE
from xml.dom import Node
from Ft.Xml import Domlette
START_DOCUMENT = 1
END_DOCUMENT = 2
START_ELEMENT = 3
END_ELEMENT = 4
CHARACTER_DATA = 10
COMMENT = 11
PI = 12
#
# namespace_mixin is a utility that helps manage namespace prefix mappings
#
class namespace_mixin:
def __init__(self):
self._ns_prefix = {XML_NAMESPACE: [u'xml'], NULL_NAMESPACE: [NULL_PREFIX]}
self._prefix_ns = {u'xml': [XML_NAMESPACE], NULL_PREFIX: [NULL_NAMESPACE]}
return
def startPrefixMapping(self, prefix, uri):
self._ns_prefix.setdefault(uri, []).append(prefix)
self._prefix_ns.setdefault(prefix, []).append(uri)
return
def endPrefixMapping(self, prefix):
uri = self._prefix_ns[prefix].pop()
prefix = self._ns_prefix[uri].pop()
#assert prefix == uri
return
def name_to_qname(self, name):
#print self._ns_prefix
#print self._prefix_ns
uri, local = name
prefix = self._ns_prefix[uri][-1]
qname = ( prefix and ( prefix + u':' ) or '') + local
return qname
#
# Tenorsax framework: helps linerarize SAX logic
#
class tenorsax(namespace_mixin, sax.ContentHandler):
def __init__(self, consumer):
namespace_mixin.__init__(self)
self.consumer = consumer
self.dispatcher = consumer.top_dispatcher
self.curr_gen = None
return
def startElementNS(self, name, qname, attributes):
(ns, local) = name
qname = self.name_to_qname(name)
#print "Start element", (name, qname)
self.consumer.event = (START_ELEMENT, ns, local)
self.consumer.params = attributes
self.curr_gen = tenorsax.event_loop_body(self.dispatcher, self.curr_gen, self.consumer.event)
return
def endElementNS(self, name, qname):
(ns, local) = name
qname = self.name_to_qname(name)
#print "end element", (name, qname)
self.consumer.event = (END_ELEMENT, ns, local)
self.consumer.params = None
self.curr_gen = tenorsax.event_loop_body(self.dispatcher, self.curr_gen, self.consumer.event)
return
def characters(self, text):
#print "characters", text
self.consumer.event = (CHARACTER_DATA,)
self.consumer.params = text
self.curr_gen = tenorsax.event_loop_body(self.dispatcher, self.curr_gen, self.consumer.event)
return
def event_loop_body(dispatcher, curr_gen, event):
if curr_gen:
curr_gen = tenorsax.execute_delegate(curr_gen)
else:
curr_gen = tenorsax.check_for_delegate(dispatcher, event)
return curr_gen
event_loop_body = staticmethod(event_loop_body)
def execute_delegate(curr_gen):
try:
curr_gen.next()
except StopIteration:
curr_gen = None
return curr_gen
execute_delegate = staticmethod(execute_delegate)
def check_for_delegate(dispatcher, event):
if event[0] == START_ELEMENT:
end_condition = (END_ELEMENT,) + event[1:]
else:
end_condition = None
curr_gen = None
delegate_generator = dispatcher.get(event)
if delegate_generator:
#Fire up the generator
curr_gen = delegate_generator(end_condition)
try:
curr_gen.next()
except StopIteration:
print "immediate end"
#Immediate generator termination
curr_gen = None
return curr_gen
check_for_delegate = staticmethod(check_for_delegate)
#
#
#
from xml import sax
from xml.dom import XML_NAMESPACE, XMLNS_NAMESPACE
from xml.dom import EMPTY_NAMESPACE as NULL_NAMESPACE
from xml.dom import EMPTY_PREFIX as NULL_PREFIX
from Ft.Xml.Xslt import parser as XPatternParser
from Ft.Xml.Xslt.XPatterns import Patterns
from Ft.Xml.Xslt.XPatterns import Pattern
from Ft.Xml.Xslt.XPatterns import DocumentNodeTest
from Ft.Xml.XPath.ParsedNodeTest import LocalNameTest
from Ft.Xml.XPath.ParsedNodeTest import NamespaceTest
from Ft.Xml.XPath.ParsedNodeTest import QualifiedNameTest
from Ft.Xml.XPath.ParsedNodeTest import PrincipalTypeTest
DUMMY_DOCELEM = u'dummy'
START_STATE = 0
TOP = -1
ANY = '?'
#Used to figure out whether a wildcard event is user-specified,
#Or added internally
EXPLICIT, IMPLICIT = (True, False)
class xpattern_state_machine:
"""
A simple state machine that interprets XPatterns
A state is "live" when it represents the successful completion
of an XPattern.
"""
PARSER = XPatternParser.new()
def __init__(self, repr_xp, xp, nss):
self._state_table = {START_STATE: {}}
self._live_states = {}
self._ignored_subtree_states = []
self._substate_depth = 0
newest_state = START_STATE
last_state = START_STATE
for subpat in xp.patterns:
steps = subpat.steps[:]
steps.reverse()
for (step_count, (axis_type, node_test, ancestor)) in enumerate(steps):
if isinstance(node_test, DocumentNodeTest):
start_event = (1, None, None)
end_event = (0, None, None)
elif isinstance(node_test, LocalNameTest):
if node_test.nodeType == Node.ELEMENT_NODE:
start_event = (1, None, node_test._name)
end_event = (0, None, node_test._name)
else:
continue
elif isinstance(node_test, QualifiedNameTest):
if node_test.nodeType == Node.ELEMENT_NODE:
ns = nss[node_test._prefix]
start_event = (1, ns, node_test._localName)
end_event = (0, ns, node_test._localName)
else:
continue
elif isinstance(node_test, PrincipalTypeTest):
if node_test.nodeType == Node.ELEMENT_NODE:
start_event = (1, ANY, EXPLICIT)
end_event = (0, ANY, EXPLICIT)
else:
continue
elif isinstance(node_test, NamespaceTest):
if node_test.nodeType == Node.ELEMENT_NODE:
ns = nss[node_test._prefix]
start_event = (1, ns, ANY)
end_event = (0, ns, ANY)
else:
continue
else:
import sys; print >> sys.stderr, "Pattern step not supported:", (axis_type, node_test, ancestor), "Node test class", node_test.__class__
continue
if self._state_table[last_state].has_key(start_event):
top_state = self._state_table[last_state][start_event]
else:
newest_state += 1
top_state = newest_state
self._state_table[top_state] = {}
self._state_table[last_state][start_event] = top_state
self._state_table[top_state][end_event] = last_state
last_state = top_state
complete_state = top_state #The state representing completion of an XPattern
if step_count and not ancestor:
#Insert a state, which handles any child element
#Not explicitly matching some other state (so that
#/a/b/c is not a mistaken match for XPattern /a/c)
start_event = (1, ANY, IMPLICIT)
end_event = (0, ANY, IMPLICIT)
newest_state += 1
self._state_table[newest_state] = {}
self._state_table[parent_start_element_event][start_event] = newest_state
self._state_table[newest_state][end_event] = parent_start_element_event
self._ignored_subtree_states.append(newest_state)
parent_start_element_event = top_state
self._live_states[top_state] = repr_xp
#print self._state_table
#print self._live_states
self._state = START_STATE
self.entering_xpatterns = []
self.leaving_xpatterns = []
self.current_xpatterns = []
self.tree_depth = 0
self.depth_marks = []
return
def event(self, is_start, ns, local):
"""
Register an event and effect ant state transitions
found in the state table
"""
#We only have a chunk ready for the handler in
#the explicit case below
self.entering_xpatterns = []
self.leaving_xpatterns = []
self.tree_depth += is_start and 1 or -1
#print "event", (is_start, ns, local), self._state, self.tree_depth, self.depth_marks
#An end event is never significant unless we know we're expecting it
if not is_start and self.depth_marks and self.tree_depth != self.depth_marks[-1]:
return self._state
lookup_from = self._state_table[self._state]
#FIXME: second part should be an element node test "*", should not match, say, start document
if not lookup_from.has_key((is_start, ns, local)) and (ns, local) == (None, None):
return self._state
if lookup_from.has_key((is_start, ns, local)) or lookup_from.has_key((is_start, ns, ANY)):
try:
new_state = lookup_from[(is_start, ns, local)]
except KeyError:
new_state = lookup_from[(is_start, ns, ANY)]
if (new_state in self._live_states):
#Entering a defined XPattern chunk
self.entering_xpatterns.append(self._live_states[new_state])
self.current_xpatterns.append(self._live_states[new_state])
elif (self._state in self._live_states):
#Leaving a defined XPattern chunk
self.leaving_xpatterns.append(self.current_xpatterns.pop())
if is_start:
self.depth_marks.append(self.tree_depth - 1)
else:
self.depth_marks.pop()
self._state = new_state
elif lookup_from.has_key((is_start, ANY, EXPLICIT)):
new_state = lookup_from[(is_start, ANY, EXPLICIT)]
if (new_state in self._live_states):
#Entering a defined XPattern chunk
self.entering_xpatterns.append(self._live_states[new_state])
self.current_xpatterns.append(self._live_states[new_state])
elif (self._state in self._live_states):
#Leaving a defined XPattern chunk
self.leaving_xpatterns.append(self.current_xpatterns.pop())
self._state = new_state
if is_start:
self.depth_marks.append(self.tree_depth - 1)
else:
self.depth_marks.pop()
elif lookup_from.has_key((is_start, ANY, IMPLICIT)):
new_state = lookup_from[(is_start, ANY, IMPLICIT)]
self._state = new_state
if is_start:
self.depth_marks.append(self.tree_depth - 1)
else:
self.depth_marks.pop()
#print self.entering_xpatterns,self.leaving_xpatterns,self.current_xpatterns
return self._state
def status(self):
"""
1 if currently within an XPattern, 0 if not
Calling code might also want to just check
self.current_xpatterns directly
"""
return not not self.current_xpatterns
class xpattern_state_manager:
"""
And aggregation of multiple state machines, one for each registered pattern
"""
PARSER = XPatternParser.new()
def __init__(self, xpatterns, nss):
if not hasattr(xpatterns[0], "match"):
self._xpatterns = [ (p, self.PARSER.parse(p)) for p in xpatterns ]
else:
self._xpatterns = [ (repr(xp), self.PARSER.parse(p)) for p in xpatterns ]
self._machines = [ xpattern_state_machine(repr_xp, xp, nss) for repr_xp, xp in self._xpatterns ]
return
def event(self, is_start, ns, local):
for machine in self._machines:
machine.event(is_start, ns, local)
#FIXME: Slow and clumsy
self.entering_xpatterns = []
self.leaving_xpatterns = []
self.current_xpatterns = []
for m in self._machines:
self.entering_xpatterns.extend(m.entering_xpatterns)
self.leaving_xpatterns.extend(m.leaving_xpatterns)
self.current_xpatterns.extend(m.current_xpatterns)
#print "manager event", (self.entering_xpatterns, self.leaving_xpatterns, self.current_xpatterns)
return
def status(self):
"""
1 if currently within an XPattern, 0 if not
Calling code might also want to just check
self.current_xpatterns directly
"""
return not not self.current_xpatterns
class sax2dom_chunker(namespace_mixin, sax.ContentHandler):
"""
Note: Ignores nodes prior to the document element, such as PIs and
text nodes. Collapses CDATA sections into plain text
Only designed to work if you set the feature
sax.handler.feature_namespaces
to 1 on the parser you use.
xpatterns - list of XPatterns. Only portions of the
tree within these patterns will be instantiated as DOM (as
chunks fed to chunk_consumer in sequence)
If None (the default, a DOM node will be created representing
the entire tree.
nss - a dictionary of prefix -> namespace name mappings used to
interpret XPatterns
chunk_consumer - a callable object taking a DOM node. It will be
invoked as each DOM chunk is prepared.
domimpl - DOM implemention to build, e.g. mindom (the default)
or cDomlette or pxdom (if you have the right third-party
packages installed).
owner_doc - for advanced uses, if you want to use an existing
DOM document object as the owner of all created nodes.
"""
def __init__(self,
xpatterns=None,
nss=None,
chunk_consumer=None,
domimpl=Domlette.implementation,
owner_doc=None,
):
namespace_mixin.__init__(self)
nss = nss or {}
#HINT: To use minidom
#domimpl = xml.dom.minidom.getDOMImplementation()
self._impl = domimpl
if isinstance(xpatterns, str) or isinstance(xpatterns, unicode) :
xpatterns = [xpatterns]
#print xpatterns
if owner_doc:
self._owner_doc = owner_doc
else:
try:
dt = self._impl.createDocumentType(DUMMY_DOCELEM, None, u'')
except AttributeError:
#Domlette doesn't need createDocumentType
dt = None
self._owner_doc = self._impl.createDocument(
DUMMY_DOCELEM, DUMMY_DOCELEM, dt)
#Create a docfrag to hold all the generated nodes.
root_node = self._owner_doc.createDocumentFragment()
self._nodeStack = [ root_node ]
self.state_machine = xpattern_state_manager(xpatterns, nss)
self._chunk_consumer = chunk_consumer
return
def get_root_node(self):
"""
Only useful if the user does not register trim paths
If so, then after SAX processing the user can call this
method to retrieve resulting DOM representing the entire
document
"""
return self._nodeStack[0]
#Overridden ContentHandler methods
def startDocument(self):
self.state_machine.event(1, None, None)
return
def endDocument(self):
self.state_machine.event(0, None, None)
return
def startElementNS(self, name, qname, attribs):
(ns, local) = name
qname = self.name_to_qname(name)
self.state_machine.event(1, ns, local)
if not self.state_machine.status():
return
new_element = self._owner_doc.createElementNS(ns, qname or local)
#No, "for aname in attributes" not possible because
#AttributeListImpl diesn't play by those rules :-(
for ((attr_ns, lname), value) in attribs.items():
if attr_ns is not None:
attr_qname = attribs.getQNameByName((attr_ns, lname))
else:
attr_qname = lname
attr = self._owner_doc.createAttributeNS(
attr_ns, attr_qname)
attr_qname = attribs.getQNameByName((attr_ns, lname))
attr.value = value
new_element.setAttributeNodeNS(attr)
self._nodeStack.append(new_element)
return
def endElementNS(self, name, qname):
(ns, local) = name
qname = self.name_to_qname(name)
self.state_machine.event(0, ns, local)
if not self.state_machine.status():
if (self._chunk_consumer and
self.state_machine.leaving_xpatterns):
#Complete the element being closed because it
#Is the last bit of a DOM to be fed to the consumer
new_element = self._nodeStack[TOP]
del self._nodeStack[TOP]
self._nodeStack[TOP].appendChild(new_element)
#Feed the consumer
self._chunk_consumer(self._nodeStack[0])
#Start all over with a new doc frag so the old
#One's memory can be reclaimed
root_node = self._owner_doc.createDocumentFragment()
self._nodeStack = [ root_node ]
return
new_element = self._nodeStack[TOP]
del self._nodeStack[TOP]
self._nodeStack[TOP].appendChild(new_element)
return
def processingInstruction(self, target, data):
if self.state_machine.status():
pi = self._owner_doc.createProcessingInstruction(
target, data)
self._nodeStack[TOP].appendChild(pi)
return
def characters(self, chars):
if self.state_machine.status():
new_text = self._owner_doc.createTextNode(chars)
self._nodeStack[TOP].appendChild(new_text)
return
#Overridden LexicalHandler methods
def comment(self, text):
if self.state_machine.status():
new_comment = self._owner_doc.createComment(text)
self._nodeStack[TOP].appendChild(new_comment)
return
from xml.sax.saxutils import XMLFilterBase
#FIXME: Set up to use actual PyXML if available
from harpia.amara.pyxml_standins import *
class normalize_text_filter(XMLFilterBase, LexicalHandler):
"""
SAX filter to ensure that contiguous white space nodes are
delivered merged into a single node
"""
def __init__(self, *args):
XMLFilterBase.__init__(self, *args)
self._accumulator = []
return
def _complete_text_node(self):
if self._accumulator:
XMLFilterBase.characters(self, ''.join(self._accumulator))
self._accumulator = []
return
def startDocument(self):
XMLFilterBase.startDocument(self)
return
def endDocument(self):
XMLFilterBase.endDocument(self)
return
def startElement(self, name, attrs):
self._complete_text_node()
XMLFilterBase.startElement(self, name, attrs)
return
def startElementNS(self, name, qname, attrs):
self._complete_text_node()
#A bug in Python 2.3 means that we can't just defer to parent, which is broken
#XMLFilterBase.startElementNS(self, name, qname, attrs)
self._cont_handler.startElementNS(name, qname, attrs)
return
def endElement(self, name):
self._complete_text_node()
XMLFilterBase.endElement(self, name)
return
def endElementNS(self, name, qname):
self._complete_text_node()
XMLFilterBase.endElementNS(self, name, qname)
return
def processingInstruction(self, target, body):
self._complete_text_node()
XMLFilterBase.processingInstruction(self, target, body)
return
def comment(self, body):
self._complete_text_node()
#No such thing as an XMLFilterBase.comment :-(
#XMLFilterBase.comment(self, body)
self._cont_handler.comment(body)
return
def characters(self, text):
self._accumulator.append(text)
return
def ignorableWhitespace(self, ws):
self._accumulator.append(text)
return
#Must be overridden because of a bug in Python 2.0 through 2.4
#And even still in PyXML 0.8.4. Missing "return"
def resolveEntity(self, publicId, systemId):
return self._ent_handler.resolveEntity(publicId, systemId)
# Enhancement suggested by James Kew:
# Override XMLFilterBase.parse to connect the LexicalHandler
# Can only do this by setting the relevant property
# May throw SAXNotSupportedException
def parse(self, source):
#import inspect; import pprint; pprint.pprint(inspect.stack())
self._parent.setProperty(property_lexical_handler, self)
# Delegate to XMLFilterBase for the rest
XMLFilterBase.parse(self, source)
return
#
# From xml.dom
#
#ELEMENT_NODE = 1
#ATTRIBUTE_NODE = 2
#TEXT_NODE = 3
#CDATA_SECTION_NODE = 4
#ENTITY_REFERENCE_NODE = 5
#ENTITY_NODE = 6
#PROCESSING_INSTRUCTION_NODE = 7
#COMMENT_NODE = 8
#DOCUMENT_NODE = 9
#DOCUMENT_TYPE_NODE = 10
#DOCUMENT_FRAGMENT_NODE = 11
#NOTATION_NODE = 12
|
This document serves as a set of guidelines to inform decision making within VPSB Information Technology Services (ITS) as well as a guide for community members to better understand the support services provided by ITS. The document is limited in scope in that it sets official policy for providing help through the VPSB Help Desk & Support Center (ITSHD).
By setting down these policies and channeling requests for help through ITSHD, we hope to provide better service, have an accurate profile of user training needs, and demonstrate the appropriate level of staff needed to handle the volume of requests. Through tracking the kinds of questions received at Support Center, ITS plans to develop and offer timely and appropriate training sessions as well as a knowledgebase for reference by our users.
This agreement is intended to cover all VPSB departments, units, and programs.
The Support Center acts as a central point of contact for all technical support, including hardware and software questions and consulting, installations, networking, network connection requests, and troubleshooting. Faculty and staff should submit requests online or if the issue is critical by calling 337-239-1675. Please note that due to staffing limitations phone support may not be immediately available at all times.
Assistance will normally be available from 8:00 AM to 2:30 PM, Monday through Friday, except when the District is closed due to holidays, administrative closings, or inclement weather. Phone coverage may not be available at all times due to staffing limitations. Requests can be submitted online, 24 hours a day.
IRM Number from Device or if no IRM number, the IRM from your computer.
A clear and specific description of the problem or request, including information regarding any error messages you may have received, screen shots, time of event, user logged in at the time.
Information Technology Services, using tracking software, will maintain records of incoming requests as they are received. If a user reaches a person by telephone, a Support Center staff member will try to determine the nature of the problem and refer the query to the appropriate specialist. Based on the “Priorities and Response Times” statement below the requests will be answered. However a Support Center staff member will make personal contact with the requestor within seven working days.
The Support Center will use the following guidelines in prioritizing requests and will strive to begin working on the problem within the target timeframe. Actual response times may be shorter or longer depending on the volume of requests at any one time.
Building or District-wide network or server outage.
Security breach affecting the well-being of staff or students.
Testing: Testing issues facing the whole district, a number of schools, or a whole school building.
Support Center will contact technicians for immediate response.
Incident that halts instruction/productivity where a functional workaround is not available.
Initial response within 4 working hours.
Incident may disrupt instruction/productivity where a functional workaround is available.
Hardware or software upgrade requests.
Changes to existing, working systems.
Initial response within five working days.
Initial response within ten working days.
* Target Response Time is defined as the time between receipt of the call and the time than a Support Center member begins working on the problem. Due to the wide diversity of problems that can occur and the methods needed to resolve them, response time is NOT defined as the time between the receipt of a call and problem resolution.
The target replacement cycle for district owned computers is 4 years. This includes all administrative, staff, faculty and student computers.
Computer equipment (eg. iPads, iPods and document cameras) originally acquired through grant funding or direct departmental funds, will be replaced with suitable recycled systems and treated as secondary placement equipment, if additional grant money or departmental funding is available.
Addition, transfer, or replacement of computer equipment within buildings must be requested through the appropriate building administrator or department head and is dependent upon funding availability.
Equipment must remain in the classroom and school to which it was originally assigned.
Requests to add new computing equipment to our existing inventory must be submitted to the Technology Administrator, through the appropriate building administrator or department director. These requests must explain where the funding for the request will come from. All approvals are contingent on an existing, approved, funding source.
In an effort to improve support and keep costs down ITS selects computer configurations and models that will meet the computing needs of the average user for the term of the replacement cycle. ITS will upgrade components of the system as appropriate to keep it at a reasonable level of functionality for the term of the replacement cycle.
ITS will not assist in installing, using, or troubleshooting any products not approved or provided by the district. ITS reserves the right to remove any unsupported software and/or hardware if it is suspected the software and/or hardware causes a recurring problem on a district-owned system that ITS supports.
By clicking on this link and accessing the Helpdesk/IRM system, I agree that I have read and understand the policies outlined above. |
__author__ = 'Artur Barseghyan <[email protected]>'
__copyright__ = 'Copyright (c) 2013 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = (
'BaseVideoWidget', 'Video1x1Widget', 'Video2x2Widget', 'Video3x3Widget',
'Video4x4Widget', 'Video5x5Widget'
)
from django.template.loader import render_to_string
from dash.base import BaseDashboardPluginWidget
# **********************************************************************
# *********************** Base Video widget plugin *********************
# **********************************************************************
class BaseVideoWidget(BaseDashboardPluginWidget):
"""
Base video plugin widget.
"""
media_css = (
'css/dash_plugin_video.css',
)
def render(self, request=None):
context = {'plugin': self.plugin}
return render_to_string('video/render.html', context)
# **********************************************************************
# ************************** Specific widgets **************************
# **********************************************************************
class Video1x1Widget(BaseVideoWidget):
"""
Video plugin 1x1 widget.
"""
plugin_uid = 'video_1x1'
class Video2x2Widget(BaseVideoWidget):
"""
Video plugin 2x2 widget.
"""
plugin_uid = 'video_2x2'
cols = 2
rows = 2
class Video3x3Widget(BaseVideoWidget):
"""
Video plugin 3x3 widget.
"""
plugin_uid = 'video_3x3'
cols = 3
rows = 3
class Video4x4Widget(BaseVideoWidget):
"""
Video plugin 4x4 widget.
"""
plugin_uid = 'video_4x4'
cols = 4
rows = 4
class Video5x5Widget(BaseVideoWidget):
"""
Video plugin 5x5 widget.
"""
plugin_uid = 'video_5x5'
cols = 5
rows = 5
|
Another day of nothing, another day of emptiness. I’m only glad that you were able to reschedule for tomorrow. I firmly believe you’ll find it at least enlightening.
Thursday. It meant a Program Board meeting. A rather stressful meeting at that, one that lasted from 6:30 to 9pm. Honestly, it’s too much to handle sometimes. But I love it still.
You’re lying in bed next to me. Anything I need to say to you can be addressed directly to you at this moment in time. |
#!/usr/bin/python
# -*- coding: utf-8 -*-
class ForbRecord:
def __init__(self,line='',hit=0, key='', ref=''):
if line != '':
(h,self.key,ref) = line.split('|',2)
self.hit = int(h)
self.ref = set(ref.split())
return
if key != '':
self.key = key
if ref != '':
self.ref = set(ref.split())
if hit == 0:
self.hit = 1
else:
self.hit = hit
return
else:
self.ref = set()
self.hit = hit
return
self.key = ''
self.hit = 0
self.ref = set()
def __repr__(self):
line=str(self.hit)+"|"+self.key+"|"
if len(self.ref)>0:
for ref in self.ref:
line+= ref + " "
return line[:-1]
else:
return line
def __add__(self,other):
result = ForbRecord()
if self.key =='':
if other.key =='':
return result
result.key = other.key
if self.key != other.key:
pass # fixme: grosse erreur, mais je ne sais pas la remonter... (il faudrait que j'apprenne le python un jour)
result.hit = self.hit + other.hit
result.ref = self.ref | other.ref
#print(self)
#print(other)
#print(result)
#print("--------")
return result
def __cmp__(self,other):
return cmp(self.hit,other.hit)
if __name__ == '__main__':
r1 = ForbRecord('173|laclef|nico fred')
r2 = ForbRecord(key='laclef', ref='nico toto')
print(r1)
print(r2)
print(r1+r2)
|
I’m frequently asked to advise young people trying to break into the PR agency world. Given the extremely collaborative, service-oriented nature of our business, a high degree of emotional and social intelligence is a true asset for any aspiring agency pro.
Wondering if you’ve got the chops to function in a fast-paced environment where clients call the shots, colleagues test your patience, and elusive media stonewall your pitching attempts? I’ve devised this quick quiz to help assess your agency potential. You can thank me later.
If you answered “3” to all the questions, you clearly have the kind of emotional intelligence, collaborative spirit, and service orientation that makes a great PR agency professional. As a matter of fact, I might even want to hire you. Good luck on your career path, and don’t let the tools get you down.
PR Mama (a.k.a. Stephanie Smirnov) works at a mid-sized consumer PR firm in NYC. She chronicles the challenges of balancing life as a PR executive with being a wife and mother at her personal blog. You can connect with her on Twitter. |
#coding: utf-8
'''
python3
md_split2ulysses_xcall.py
2017-04-07 at 09:55 EDT
Will take Markdown file as input or from clipboard and split it to Ulysses sheets
at chosen heading levels.
A new Ulysses group for these sheets will be created at root level.
'''
import subprocess
import json
import sys
import re
import urllib.parse
SPLIT_LEVEL = 2
ACCESS_TOKEN = "5a5887277e324caabb4e9229c79141b3"
XCALLAPP = "/Applications/xcall.app/Contents/MacOS/xcall"
X_NEW_GROUP = "ulysses://x-callback-url/new-group?name={name}"
X_NEW_SHEET = "ulysses://x-callback-url/new-sheet?groupId={id}&index={index}&text={text}"
arg1 = ""
if len(sys.argv) > 1:
if sys.argv[1] != "":
arg1 = sys.argv[1]
def main():
if arg1 == "":
md_text = read_from_clipboard()
else:
md_text = read_file(arg1)
md_lines = md_text.splitlines()
title = re.sub(r'^#+ ', '', md_lines[0])
group_id = new_ul_group(title)
pattern = '#' + '#?' * (SPLIT_LEVEL - 1) + " "
break_points = []
index = 1
for line in md_lines[1:]:
if re.match(pattern, line):
print(line)
break_points.append(index)
index += 1
break_points.append(len(md_lines))
start = 0
index = 0
for end in break_points:
print(str(index) + '. ' + md_lines[start])
new_sheet(md_lines[start:end], group_id, index)
start = end
index += 1
# View result group in Ulysses:
url = "ulysses://x-callback-url/open-group?id=" + group_id
subprocess.call(["open", url])
# NOTE! subprocess.call() with "open" does not wait! So should only be used as last command!
def new_ul_group(title):
print('New group = Splitted: ' + title)
gr_name = urllib.parse.quote('Splitted: ' + title[:20])
url = X_NEW_GROUP.format(name=gr_name)
args = XCALLAPP + ' -url "' + url + '"'
output = subprocess.check_output(args, shell=True)
ret_dict = json.loads(output.decode('UTF-8'))
return ret_dict['targetId']
def new_sheet(chunk, group_id, index):
md_text = '\n'.join(chunk)
url = X_NEW_SHEET.format(id=group_id, index=index, text=urllib.parse.quote(md_text))
args = XCALLAPP + ' -url "' + url + '"'
ret_code = subprocess.call(args, shell=True)
if ret_code != 0:
print('Error: ' + str(ret_code) + ': ' + chunk[1])
def read_from_clipboard():
return subprocess.check_output(
'pbpaste', env={'LANG': 'en_US.UTF-8'}).decode('utf-8')
if __name__ == '__main__':
main()
|
Use the Best Lavish Alice Discount Code and deals for a big Discount when you check out at Lavish Alice. Catch the chance to spend much less with Lavish Alice Promo Codes. Today you can enjoy as much as 76% off savings by using our 96 Lavish Alice Promo Codes. All the coupon codes are valid in a limited time. Try now! We are sure that you will save time and money certainly!
Grab your savings today when you apply Lavish Alice best coupon code . The most groundbreaking shopping experience you are going to have, try it today.
Use this Lavish Alice best promo codes and save money at lavishalice.com . This sale will end soon . It's all about you and what you can get for the dollar.
Use lavishalice.com coupon codes, select your dream products and save more . Trust us when we say right now is the best time to buy . Bargains at these amazingly low prices won't last long.
Check out the popular deals when using this Lavish Alice best coupon code . Enter code at checkout.
Save big when you apply lavishalice.com coupons at lavishalice.com today . Thank you for being our loyal fans.
Use Lavish Alice best coupon codes, select your favorite items and save more at Lavish Alice . They are yours only if you want it.
To save more, use our Lavish Alice coupons . Thank you for shopping with us.
Use this lavishalice.com warehouse best coupon codes and save money at lavishalice.com . Your amazing products are waiting for you at check-out.
Shop and enjoy amazing discounts at lavishalice.com with the lavishalice.com promo code . Great stores . If you've been eyeing it for a while, now is the time to buy.
Receive 10% on your order by applying lavishalice.com promo codes . Time to go shopping.
Apply lavishalice.com discount codes for promotions on popular items . Don't eye it any longer.
Shop and enjoy amazing discounts with lavishalice.com coupons . Sale for limited time only . We have your interest at heart.
Enjoy amazing savings with lavishalice.com coupon at lavishalice.com . Hot specials don't last forever.
Shop and enjoy shocking discounts with our lavishalice.com discount codes . Extraordinary savings, only for a limited time . Our customers love good bargains and we know you do too.
Discount and lavishalice.com free shipping, the best of both worlds . Buy now, because these are the final days to save.
Check out the steep discounts at lavishalice.com with this lavishalice.com best coupon code . Be the first to shop before the deal ends . Be the first.
These promo codes won't last long, select your favorite items and use lavishalice.com best promo codes . Take action and make an excellent deal now.
Are you ready to get 30% with lavishalice.com coupon? Take action now . Apply shop . Such quality and price are hard to come by.
Use Lavish Alice best coupon codes at check out and get great deals . Makes you feel like shopping.
Save when using lavishalice.com best coupon codes while supplies last . Trust us when we say right now is the best time to buy.
For limited time only, lavishalice.com is offering lavishalice.com discount code to help you save big . Get yours now.
Choose your favorite items from lavishalice.com and use Lavish Alice best coupon . Click through to shop . Feel-good shopping.
Apply Lavish Alice best discount code for awesome deals at lavishalice.com . Remember to close the deal before it's too late . Goundbreaking sale with never before seen prices.
Use Lavish Alice best coupon, save huge on your order . Hurry before the deals are gone.
Apply Lavish Alice best discount codes at Lavish Alice . Get yours now.
Deals end soon, apply lavishalice.com parking best promo codes today . This deal expires soon, so check out now . Stack coupons for maximum savings.
Receive lavishalice.com coupon in the mail when you spend $1 . before this great sale ends.
With lavishalice.com promo codes, you can shop happy and worry less about your wallet . Be the first to discover the amazing bargains . Best sellers will be the first to go.
lavishalice.com promo codes is required for this promotion . This is shopping as it should be . Apply code at checkout to receive this offer.
Be budget savvy with this amazing deal using the Lavish Alice best promo code . More stores.
Use Lavish Alice best coupon codes while this promo lastslavishalice.com Get it now or neverlavishalice.com . The time to make your purchase is now.
Be budget savvy with new england lavishalice.com coupon codes . Grab it now . Prices like this won't stay long, add to your cart now.
Find amazing items at hard-to-beat prices when you apply Lavish Alice best coupon codes . Be the first to know, first to shop, and first to save.
Apply lavishalice.com coupon to Lavish Alice . You will only find the best deals here . Act now while offer lasts.
For limited time only, lavishalice.com is offering lavishalice.com coupon codes to help you save big . Remember to finish your transaction . Your place to shop and discover amazing deals.
Click and enjoy 10% with this lavishalice.com coupon codes . Prices may vary daily.
Save big by using lavishalice.com coupon at johnandginger . Make them yours now.
Enjoy 30% when you use our lavishalice.com promo codes for today only . Take action and make an excellent deal now . Created with your shopping experience in mind.
Use the lavishalice.com coupon code, be budget savvy at this lavishalice.com promotion . We help you find amazing deals so you can shop without a worry . Experience the lifestyle when you shop with us.
It's time to select and purchase your favorite items with Lavish Alice best coupon code . More value . Look no further than here for the most amazing deals.
Act Now . At lavishalice.com, it is currently at its best price ever . Check out now before this deal expires.
lavishalice.com coupon codes is needed for this deal . Big savings while they last . Hurry before the deals are gone.
Experience major savings with this great deal using this lavishalice.com promo codes . for today only . Check out now before this deal expires.
Get amazing savings on select items using our Lavish Alice best value inn coupons . Don't hesitate any longer . At our site, you will always find the most amazing discounts.
Use lavishalice.com discount codes on your favorite items at lavishalice.com . Surround yourself with a world of happiness once you check out . Remember to close the deal before it's too late.
Choose your favorite items and apply Lavish Alice best coupon . surf today.
Applying this Lavish Alice best coupon codes, receive huge price discounts during this sale at lavishalice.com . Your place to shop and discover amazing deals.
It's time to select and purchase your favorite items with Lavish Alice best coupon code . This bargain is guaranteed to make you a happy customer.
Enjoy great deals with lavishalice.com best coupon at lavishalice.com . Sensational deals that you can only find on our site.
Lavish Alice best coupon could be used storewide at lavishalice.com . Be the first to discover the amazing bargains.
Use Lavish Alice best coupon codes and be on your way to great savings . today and take advantage of huge savings.
Use our lavishalice.com coupons to enjoy great savings . Remember to check out . This price is at its lowest ever.
It's time to buy your favorite items with Lavish Alice best coupons . These deals won't last, so make the purchase today . Final days to save.
Make your dream selections and apply Lavish Alice best promo code before the sale ends . Extraordinary savings, only today.
Get great Lavish Alice . This is where shopping meets fun . Take action now.
Take this budget-friendly offer and save money with our lavishalice.com coupon . Nothing feel as good as when you check out . Be the first to enjoy savings at unbeatable prices.
Excellent savings at Lavish Alice . Final days to save.
Great chance to save money with lavishalice.com coupons . Expect the unexpected . Check out now before this deal expires.
Save big by using lavishalice.com coupon at johnandginger . because sale season is here.
Save 20% right now with Lavish Alice best coupons . Live life on full.
Check out Promos & Deals at Lavish Alicemedia . Sale for limited time only . Thank you for trusting us.
Check out the popular deals when using this lavishalice.com online best coupon codes . Prices like this won't stay long, add to your cart now . Last chance to buy.
Check out the steep discounts at lavishalice.com with this lavishalice.com best promo codes . You will only find the best deals here . If you've been eyeing it for a while, now is the time to buy.
Take this great opportunity to save a ton of money using lavishalice.com coupon code . An exciting place for the whole family to shop.
Shop through lavishalice.com with this lavishalice.com best promo codes and enjoy this attractive price discounts . enjoy big savings . Seasonal sale for an extended time only.
Select your favorite products and save more with Lavish Alice best coupon codes . Bargains at these amazingly low prices won't last long.
With lavishalice.com coupon codes, you can shop happy and worry less about your wallet . This is where families shop.
Grab your savings today when you apply lavishalice.com promo code . Discover your favorite place to shop . Make them yours now.
It's time to select and purchase your favorite items with lavishalice.com best promo codes . Affordable and highly recommended by users, make your purchase today . It's now or never.
Experience major savings with this great deal at lavishalice.com . Remember, you have only a day left to redeem this offer . Thanks for choosing us to help you discovery amazing bargains.
lavishalice.com is now offering great discounts . This deal expires soon, so check out now.
Select your favorite products and save more with lavishalice.com discount codes . It is currently at its best price ever . When is the best time? Now.
Receive 20% select items when you use our lavishalice.com coupon . Created with your shopping experience in mind.
Treat yourself and your loved ones by using Lavish Alice best discount codes today . Experience the lifestyle when you shop with us . Click through to shop.
Make purchases on top sale items at Lavish Alice . It's now or never.
These promo codes won't last long, apply lavishalice.com best promo codes to your purchases before they expire . Don't hesitate any longer . Come and check it out.
Treat yourself when you use Lavish Alice best coupon codes . save money . Discover the difference at our website.
Save big by using lavishalice.com coupon code at lavishalice.com . Don't wait any longer . Get yours now.
Select your favorite products and save more with Lavish Alice . When is the best time? Now.
Select your favorite products and save more with lavishalice.com car insurance best promo codes . Remember that good deals are hard to come by.
Select your favorite items using Lavish Alice best coupon code while this promo lasts . Thank you for being our loyal fans.
Click and get 30% with our lavishalice.com coupon today . Enter code at checkout . If you've been eyeing it for a while, now is the time to buy.
For today time only, lavishalice.com is offering bargains at never-before-seen prices . Beat the crowd and start saving . Shop for what you want and we will help you close the deal.
Become budget savvy with Lavish Alice best coupons . Check out now before this deal expires . Act immediately before the sale ends.
Take advantage of the great deals and save even more using this lavishalice.com best coupon codes . The most groundbreaking shopping experience you are going to have, try it today . Serious shopping for serious shoppers.
Take this great opportunity to save big with lavishalice.com best coupon . Buy now, because these are the final days to save.
Enjoy great savings when you use lavishalice.com coupon code today . Apply code at checkout to receive this offer . They are yours only if you want it.
Great chance to save money with lavishalice.com pro shop best coupons . Buy now, instead of regret later.
With lavishalice.com promo codes, you can shop happy and worry less about your wallet . Beat the crowd and buy now . Feel-good shopping.
How to find and use Lavish Alice coupon?
TIPS: Watch the banner ads at Lavish Alice to find a list of all their current promotional offers. You can also sign up for the Lavish Alice email list to have special offers and coupons sent to your inbox as they become available. Additional promotions and coupons from Lavish Alice can be found on their social media pages. In addition you can find the best deals at Lavish Alice in the Clearance and Deal of The Day sections of their website.
STEP 1: Pick up your favorite items and add to your shopping cart at Lavish Alice. Make sure they meet the requirements of the Lavish Alice coupon prior to application.
STEP 2: Copy the Lavish Alice coupon code you want to use and paste the Lavish Alice promo code in the coupon box. Then click the "Apply" or "Submit" button.
STEP 3: If the Lavish Alice coupon code meets the Lavish Alice requirements, your discount will be reflected in your order subtotal immediately! And be sure all the discounts have been applied before continuing to the payment process. |
from datetime import datetime
from dino.exceptions import NoSuchRoomException
from flask import request
import logging
from dino.utils import b64e
from dino.utils.decorators import timeit
from dino.rest.resources.base import BaseResource
from dino import environ
logger = logging.getLogger(__name__)
class RoomsForUsersResource(BaseResource):
def __init__(self):
super(RoomsForUsersResource, self).__init__()
self.last_cleared = datetime.utcnow()
self.request = request
def _do_get(self, user_id):
output = list()
channel_ids = dict()
channel_names = dict()
rooms = environ.env.db.rooms_for_user(user_id)
for room_id, room_name in rooms.items():
try:
if room_id in channel_ids:
channel_id = channel_ids[room_id]
else:
channel_id = environ.env.db.channel_for_room(room_id)
channel_ids[room_id] = channel_id
if channel_id in channel_names:
channel_name = channel_names[channel_id]
else:
channel_name = environ.env.db.get_channel_name(channel_id)
channel_names[channel_id] = channel_name
output.append({
'room_id': room_id,
'room_name': b64e(room_name),
'channel_id': channel_id,
'channel_name': b64e(channel_name)
})
except NoSuchRoomException:
# can ignore, already deleted or old cache value
pass
return output
def do_get_with_params(self, user_id):
return self._do_get(user_id)
@timeit(logger, 'on_rest_rooms_for_users')
def do_get(self):
is_valid, msg, json = self.validate_json(self.request, silent=False)
if not is_valid:
logger.error('invalid json: %s' % msg)
return dict()
if 'users' not in json:
return dict()
logger.debug('GET request: %s' % str(json))
output = dict()
for user in json['users']:
output[user] = self.do_get_with_params(user)
return output
def _get_lru_method(self):
return self.do_get_with_params
def _get_last_cleared(self):
return self.last_cleared
def _set_last_cleared(self, last_cleared):
self.last_cleared = last_cleared
|
Info Cynthia Coffman is a dedicated public servant, attorney and leader. Her legal career began 22 years ago in the Georgia Attorney General’s Office. As a courtroom attorney, Cynthia defended the state’s juvenile justice system and public health department. A few years later, the opportunity to work as a lawyer for the 1996 Centennial Olympic Games in Atlanta turned into a life-changing experience. Following her initial assignment in Finance & Management Services, Cynthia went on to work in risk management during the Games. On July 27, 1996, a domestic terrorist detonated pipe bombs in Olympic Park during a celebratory concert. One innocent spectator was killed and scores more were badly injured. Cynthia acted as the primary liaison with the victims and their families demonstrating her compassion and resolve to fight for justice. In 1997, on a post-Olympics visit to the Rocky Mountains, Cynthia decided to pursue her dream of moving to Colorado. She has been a proud and happy resident of the Centennial State for the last fifteen years.
Cynthia’s first job in Denver was working for the Colorado General Assembly’s Office of Legislative Council. She staffed the Senate Judiciary Committee and assisted with a study of the state’s adult parole system. After a brief time in private practice, Cynthia was recruited by Jane Norton to join her senior management team at the Colorado Department of Public Health and Environment. Cynthia served first as the agency’s Director of Legal & Regulatory Affairs and later as its Deputy Executive Director. She gained extensive experience with environmental issues related to air and water quality, hazardous waste regulation, and environmental health. On the public health side of the house, Cynthia worked on statewide bioterrorism and emergency planning efforts, disease control and prevention, maternal and child health programs, and hospital and health facility regulation.
In 2004, Cynthia moved to the State Capitol, where she had the honor of serving Colorado Governor Bill Owens as his Chief Counsel. Then, in March of 2005, newly- appointed Attorney General John Suthers selected Cynthia as his Chief Deputy. She has proudly filled this role for the past 8 ½ years, acting as chief of staff and chief operating officer for the largest law firm in the State of Colorado.
Cynthia also serves as vice chairman of the Safe2Tell™ board of directors. Safe2Tell™ is a nonprofit organization providing students in all Colorado schools with the ability to prevent and report violence by making anonymous calls, web and text reports. She currently is a member of the Colorado Supreme Court Chief Justice Commission on the Legal Profession. In September 2012, Colorado Law Week recognized Cynthia’s accomplishments by naming her as the Best Public Sector Lawyer.
A native Missourian, Cynthia graduated from the University of Missouri-Columbia. She worked in development for children’s hospitals and pediatric research for several years before completing law school at Georgia State University in Atlanta. Cynthia is married to Colorado U.S. Representative Mike Coffman. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.