text
stringlengths
29
850k
#!/usr/bin/env python # -*- coding: UTF-8 -*- # # Imports # from future import standard_library standard_library.install_aliases() from builtins import object import os import requests import sys import urllib.request, urllib.parse, urllib.error import xbmcgui import xbmcplugin import json from roosterteeth_const import RESOURCES_PATH, HEADERS, LANGUAGE, convertToUnicodeString, log, \ FIRST_MEMBER_ONLY_VIDEO_TITLE_PREFIX, ROOSTERTEETH_BASE_URL # # Main class # class Main(object): def __init__(self): # Get the command line arguments # Get the plugin url in plugin:// notation self.plugin_url = sys.argv[0] # Get the plugin handle as an integer number self.plugin_handle = int(sys.argv[1]) # log("ARGV", repr(sys.argv)) # # Parse parameters... self.url = urllib.parse.parse_qs(urllib.parse.urlparse(sys.argv[2]).query)['url'][0] self.next_page_possible = urllib.parse.parse_qs(urllib.parse.urlparse(sys.argv[2]).query)['next_page_possible'][0] self.show_serie_name = urllib.parse.parse_qs(urllib.parse.urlparse(sys.argv[2]).query)['show_serie_name'][0] # log("self.next_page_possible", self.next_page_possible) # Make the next page url if self.next_page_possible == 'True': # Determine current item number, next item number, next_url pos_of_page = self.url.rfind('page=') # log("pos_of_page", pos_of_page) if pos_of_page >= 0: page_number_str = str( self.url[pos_of_page + len('page='):pos_of_page + len('page=') + len('000')]) page_number = int(page_number_str) self.page_number_next = page_number + 1 if self.page_number_next >= 100: page_number_next_str = str(self.page_number_next) elif self.page_number_next >= 10: page_number_next_str = '0' + str(self.page_number_next) else: page_number_next_str = '00' + str(self.page_number_next) self.next_url = self.url.replace('page=' + page_number_str, 'page=' + page_number_next_str) # log("self.next_url", self.next_url) # # Get the videos... # self.getVideos() # # Get videos... # def getVideos(self): # # Init # # Create a list for our items. listing = [] # # Get HTML page # response = requests.get(self.url, headers=HEADERS) html_source = response.text html_source = convertToUnicodeString(html_source) # log("html_source", html_source) try: json_data = json.loads(html_source) except (ValueError, KeyError, TypeError): xbmcgui.Dialog().ok(LANGUAGE(30000), LANGUAGE(30109)) exit(1) for item in json_data['data']: # log("item", item) episode_title = item['attributes']['title'] caption = item['attributes']['caption'] length = item['attributes']['length'] channel_slug = item['attributes']['channel_slug'] # the url should be something like: # https://svod-be.roosterteeth.com/api/v1/episodes/ffc530d0-464d-11e7-a302-065410f210c4/videos" # or even # https://svod-be.roosterteeth.com/api/v1/episodes/lets-play-2011-2/videos technical_episode_url_last_part = item['links']['videos'] technical_episode_url = ROOSTERTEETH_BASE_URL + technical_episode_url_last_part technical_url = technical_episode_url log("technical_url", technical_url) functional_episode_url_middle_part = item['links']['self'] functional_url = ROOSTERTEETH_BASE_URL + functional_episode_url_middle_part + '/videos' log("functional_url", functional_url) thumb = item['included']['images'][0]['attributes']['thumb'] serie_title = item['attributes']['show_title'] original_air_date = item['attributes']['original_air_date'] original_air_date = original_air_date[0:10] # The backend still calls it sponsor instead of first member is_first_member_only = item['attributes']['is_sponsors_only'] # let's put some more info in the title of the episode if self.show_serie_name == "True": title = serie_title + ' - ' + episode_title else: title = episode_title if is_first_member_only: title = FIRST_MEMBER_ONLY_VIDEO_TITLE_PREFIX + ' ' + title title = convertToUnicodeString(title) thumbnail_url = thumb plot = caption duration_in_seconds = length studio = channel_slug studio = convertToUnicodeString(studio) studio = studio.replace("-", " ") studio = studio.capitalize() # Add to list... list_item = xbmcgui.ListItem(title) list_item.setInfo("video", {"title": title, "studio": studio, "mediatype": "video", \ "plot": plot + '\n' + LANGUAGE(30318) + ' ' + original_air_date, \ "aired": original_air_date, "duration": duration_in_seconds}) list_item.setArt({'thumb': thumbnail_url, 'icon': thumbnail_url, 'fanart': os.path.join(RESOURCES_PATH, 'fanart-blur.jpg')}) list_item.setProperty('IsPlayable', 'true') # let's remove any non-ascii characters from the title, to prevent errors with urllib.parse.parse_qs # of the parameters title = title.encode('ascii', 'ignore') parameters = {"action": "play", "functional_url": functional_url, "technical_url": technical_url, "title": title, "is_first_member_only": is_first_member_only, "next_page_possible": "False"} plugin_url_with_parms = self.plugin_url + '?' + urllib.parse.urlencode(parameters) is_folder = False # Add refresh option to context menu list_item.addContextMenuItems([('Refresh', 'Container.Refresh')]) # Add our item to the listing as a 3-element tuple. listing.append((plugin_url_with_parms, list_item, is_folder)) # Make a next page item, if a next page is possible total_pages_str = json_data['total_pages'] total_pages = int(total_pages_str) if self.page_number_next <= total_pages: # Next page entry if self.next_page_possible == 'True': list_item = xbmcgui.ListItem(LANGUAGE(30200)) list_item.setArt({'thumb': os.path.join(RESOURCES_PATH, 'next-page.png'), 'fanart': os.path.join(RESOURCES_PATH, 'fanart-blur.jpg')}) list_item.setProperty('IsPlayable', 'false') parameters = {"action": "list-episodes", "url": str(self.next_url), "next_page_possible": self.next_page_possible, "show_serie_name": self.show_serie_name} url = self.plugin_url + '?' + urllib.parse.urlencode(parameters) is_folder = True # Add refresh option to context menu list_item.addContextMenuItems([('Refresh', 'Container.Refresh')]) # Add our item to the listing as a 3-element tuple. listing.append((url, list_item, is_folder)) # Add our listing to Kodi. # Large lists and/or slower systems benefit from adding all items at once via addDirectoryItems # instead of adding one by ove via addDirectoryItem. xbmcplugin.addDirectoryItems(self.plugin_handle, listing, len(listing)) # Set initial sorting xbmcplugin.addSortMethod(handle=self.plugin_handle, sortMethod=xbmcplugin.SORT_METHOD_DATEADDED) # Finish creating a virtual folder. xbmcplugin.endOfDirectory(self.plugin_handle)
TEUM ---> It is under a insane Momentum and as we can see in weekly chart above, a break above $1.76 could boost it easily to $3.00´s! Long and strong. LENS ---> After shares rising more than 17% on high volume today, this bottom player looks ready for higher prices! With technical indicators giving strong Buy Signals, a break above its 20 Moving Average could give us a nice squeeze until its 200 Moving Average as initial target. Worth watch. MBII ---> Steady action today on volume! Technical indicators are showing that buyers are in control, which the upside movement should continue. We could get here a nice trade until its 200 Moving Average. RSYS ---> Very interesting relative strength today! I´ll try a trade with a Buy on the break above its 50 Moving Average and target at $1.23 as initial target. AIRI ---> Nice action today under strong volume and Buying Pressure! Watching closely for the Breakout ahead! Could be intense. MICT ---> Broke above its 200 and 20 Moving Averages with decent volume! I´ll watching for a test to $1.15 level. A break on volume could boost it to $1.50 area easily. Stay tuned. AQMS ---> We have here a great Bottom and a Low Float Stock with about 14% of Short Float. It made a very nice trade session with high volume and its technical indicators are giving positive divergences. We may have here a runner candidate and its 20 Moving Average could be the next target! On radar. MYOS ---> Another Low Float Stock with high Short Interest that came up on my screen with high relative strength. A big squeeze could happen over $1.56.
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from datetime import datetime from datetime import timedelta import eventlet import functools from oslo_config import cfg from oslo_log import log as logging from oslo_utils import timeutils from karbor import exception from karbor.i18n import _ from karbor.services.operationengine.engine import triggers from karbor.services.operationengine.engine.triggers.timetrigger import utils CONF = cfg.CONF LOG = logging.getLogger(__name__) class TriggerOperationGreenThread(object): def __init__(self, first_run_time, function): super(TriggerOperationGreenThread, self).__init__() self._is_sleeping = True self._pre_run_time = None self._running = False self._thread = None self._function = function self._start(first_run_time) def kill(self): self._running = False if self._is_sleeping: self._thread.kill() @property def running(self): return self._running @property def pre_run_time(self): return self._pre_run_time def _start(self, first_run_time): self._running = True now = datetime.utcnow() initial_delay = 0 if first_run_time <= now else ( int(timeutils.delta_seconds(now, first_run_time))) self._thread = eventlet.spawn_after( initial_delay, self._run, first_run_time) self._thread.link(self._on_done) def _on_done(self, gt, *args, **kwargs): self._is_sleeping = True self._pre_run_time = None self._running = False self._thread = None def _run(self, expect_run_time): while self._running: self._is_sleeping = False self._pre_run_time = expect_run_time expect_run_time = self._function(expect_run_time) if expect_run_time is None or not self._running: break self._is_sleeping = True now = datetime.utcnow() idle_time = 0 if expect_run_time <= now else int( timeutils.delta_seconds(now, expect_run_time)) eventlet.sleep(idle_time) class TimeTrigger(triggers.BaseTrigger): TRIGGER_TYPE = "time" IS_ENABLED = (CONF.scheduling_strategy == 'default') def __init__(self, trigger_id, trigger_property, executor): super(TimeTrigger, self).__init__( trigger_id, trigger_property, executor) self._trigger_property = self.check_trigger_definition( trigger_property) self._greenthread = None def shutdown(self): self._kill_greenthread() def register_operation(self, operation_id, **kwargs): if operation_id in self._operation_ids: msg = (_("The operation_id(%s) is exist") % operation_id) raise exception.ScheduledOperationExist(msg) if self._greenthread and not self._greenthread.running: raise exception.TriggerIsInvalid(trigger_id=self._id) self._operation_ids.add(operation_id) if self._greenthread is None: self._start_greenthread() def unregister_operation(self, operation_id, **kwargs): if operation_id not in self._operation_ids: return self._operation_ids.remove(operation_id) if 0 == len(self._operation_ids): self._kill_greenthread() def update_trigger_property(self, trigger_property): valid_trigger_property = self.check_trigger_definition( trigger_property) if valid_trigger_property == self._trigger_property: return timer = self._get_timer(valid_trigger_property) first_run_time = self._compute_next_run_time( datetime.utcnow(), trigger_property['end_time'], timer) if not first_run_time: msg = (_("The new trigger property is invalid, " "Can not find the first run time")) raise exception.InvalidInput(msg) if self._greenthread is not None: pre_run_time = self._greenthread.pre_run_time if pre_run_time: end_time = pre_run_time + timedelta( seconds=self._trigger_property['window']) if first_run_time <= end_time: msg = (_("The new trigger property is invalid, " "First run time%(t1)s must be after %(t2)s") % {'t1': first_run_time, 't2': end_time}) raise exception.InvalidInput(msg) self._trigger_property = valid_trigger_property if len(self._operation_ids) > 0: # Restart greenthread to take the change of trigger property # effect immediately self._kill_greenthread() self._create_green_thread(first_run_time, timer) def _kill_greenthread(self): if self._greenthread: self._greenthread.kill() self._greenthread = None def _start_greenthread(self): # Find the first time. # We don't known when using this trigger first time. timer = self._get_timer(self._trigger_property) first_run_time = self._compute_next_run_time( datetime.utcnow(), self._trigger_property['end_time'], timer) if not first_run_time: raise exception.TriggerIsInvalid(trigger_id=self._id) self._create_green_thread(first_run_time, timer) def _create_green_thread(self, first_run_time, timer): func = functools.partial( self._trigger_operations, trigger_property=self._trigger_property.copy(), timer=timer) self._greenthread = TriggerOperationGreenThread( first_run_time, func) def _trigger_operations(self, expect_run_time, trigger_property, timer): """Trigger operations once returns: wait time for next run """ # Just for robustness, actually expect_run_time always <= now # but, if the scheduling of eventlet is not accurate, then we # can do some adjustments. entry_time = datetime.utcnow() if entry_time < expect_run_time and ( int(timeutils.delta_seconds(entry_time, expect_run_time)) > 0): return expect_run_time # The self._executor.execute_operation may have I/O operation. # If it is, this green thread will be switched out during looping # operation_ids. In order to avoid changing self._operation_ids # during the green thread is switched out, copy self._operation_ids # as the iterative object. operation_ids = self._operation_ids.copy() sent_ops = set() window = trigger_property.get("window") end_time = expect_run_time + timedelta(seconds=window) for operation_id in operation_ids: if operation_id not in self._operation_ids: # Maybe, when traversing this operation_id, it has been # removed by self.unregister_operation LOG.warning("Execute operation %s which is not exist, " "ignore it", operation_id) continue now = datetime.utcnow() if now >= end_time: LOG.error("Can not trigger operations to run. Because it is " "out of window time. now=%(now)s, " "end time=%(end_time)s, expect run time=%(expect)s," " wating operations=%(ops)s", {'now': now, 'end_time': end_time, 'expect': expect_run_time, 'ops': operation_ids - sent_ops}) break try: self._executor.execute_operation( operation_id, now, expect_run_time, window) except Exception: LOG.exception("Submit operation to executor failed, operation" " id=%s", operation_id) sent_ops.add(operation_id) next_time = self._compute_next_run_time( expect_run_time, trigger_property['end_time'], timer) now = datetime.utcnow() if next_time and next_time <= now: LOG.error("Next run time:%(next_time)s <= now:%(now)s. Maybe the " "entry time=%(entry)s is too late, even exceeds the end" " time of window=%(end)s, or it was blocked where " "sending the operation to executor.", {'next_time': next_time, 'now': now, 'entry': entry_time, 'end': end_time}) return next_time @classmethod def check_trigger_definition(cls, trigger_definition): return utils.check_trigger_definition(trigger_definition) @classmethod def _compute_next_run_time(cls, start_time, end_time, timer): return utils.compute_next_run_time(start_time, end_time, timer) @classmethod def _get_timer(cls, trigger_property): return utils.get_timer(trigger_property) @classmethod def check_configuration(cls): utils.check_configuration()
I love the oxide inks and have a set. They are easy to work with especially for the women I teach at church. We just started a class and so far I have had about 10 to 12 women a session. I consider card making and, of course, sending a ministry and am glad to share it with others. Its one of those “no fail” techniques. You are my fav artist I track online. In fact, I am ordering the Fortune Cookie and So Matcha sets to make programs for our Women’s Club spring program. So thankful for your clear, practical and creative blogs! Jennifer, I have never used any Distress Oxide Inks. Watching your videos, however, has awakened my interest in this unique ink and I would love to win a set. My daughter has caught my addiction to your videos and came over the other day and we had to make top edge die cut cards immediately! When we were done so tells me she bought the micro glaze based on the beautiful backgrounds you made. Thank you in advance and for your kindness campaign! I have not yet tried the new colors of Oxide Ink Pads, but I sure would like to! I love the ones I have from the earlier colors! I have not tried the Distress Oxide inks yet but am loving the way they look on all the card examples. The technique used on your cards in this blog post is awesome! I love the distress oxides and they have become my go to inks. So far I have the first set so wwould be fun to get more colors. Thanks for thinking of us. Wow, it says you have 997 comments already! You are such an inspiration to all of us! I LOVE these backgrounds! , I have played with Distress Oxides – I was inspired by you to purchase the first set! I love them and I can’t wait to try this technique! Thank you for the wonderful giveway and for taking the time to make your inspirational videos! I have not tried the distress oxide inks yet, but would like to try them. Thanks for the chance to win. I have several distress oxide ink colors and I love them all! Don’t have any of the new colors – thanks for the opportunity to win some! I have a grand total of 3 oxide inks. I have played a little with them but after watching your video, I definitely have more playing to do. I do like that they’re easier to blend for me, less sponge marks on the paper 🙂 Thanks for the chance to win this awesome prize. I love seeing instructions with products that I am not familiar with. I have only two colors and just trying to play around to find a good techniques with the distress oxide inks. Thank you for your beautiful cards and instructions, they are very well done. You inspire all of us. I would love to have more colors to use and learn. Thanks for all your instruction. You come up with the best techniques and designs, Jennifer. These scenes are so realistic looking and colorful. No, I haven’t taken the plunge yet on Distress Oxide inks but would love to try them. I’m in the market for better impression inks anyway. Thanks for the generous giveaway! but would love to have some of my very own to try! Especially love how the flowers look! I haven’t yet tried the distress oxide inks, but look forward to utilizing some of the techniques you have shared. I have tried the distress oxide inks. I made some amazing night sky and outer space backgrounds using them. So easy and fun to use! Ur designs are an inspiration and make me wanna try whatever u make…so distress oxides investment here I come….. I have made several backgrounds with this technique and buffed the paper after it was dry to get a shine and set all the inks. I also tried some perfect pearls in water and used that as the water sprayed on the inks. It gives the perfect finishing touch with a little glitter. Can’t wait to use these on cards. I have not used these as of yet. Fairly new to the stamping world. I am being very inspired by your blog, & youtube channel. Looking forward to more stamping in my future. Thanks for the change to win.. Yes I have tried them and thank you for more ideas on how to use them. Waiting to get my 3rd set, have first and second ones and enjoy working with them. Ooohh…I love how you used the inks!! I have one Oxide ink pad…the Black Soot. I used it on a web stencil and I loved how it turned out!!! I’d love to get more colors. These ink pads are perfect for blending. I love all these videos that you post and I have bought some oxide distress inks. I haven’t put in enough time to play with them yet but would love more colors to try all these techniques that you show us. They are all so inspiring! I just started buying these recently after seeing one of your videos! I am pretty new to stamps and card making but I LOVE the way they stamp and blend!!! I actually use the couple I have more than any other ink I own!! Thx for all your work you do! You are truly an inspiration. Love the Distress Oxides! Now to play! Thank You! I received one, Broken China, as a Christmas gift, and love it. I wish I had other colors though. Love the look of these pieces you have created. I’ve been trying to stock up my stash of backgrounds lately and these would be beautiful to add. I haven’t tried the oxide inks yet, but sure would love to. SIGH…. maybe someday. Thanks for all the instruction and inspiration. I bought the original set of distress oxides and love them I added the black when I saw one of your videos. I love your creatitivity. Thank you for sharing your ideas. I have not tried these inks. The effects are amazing. Would love to try them!!! I have tried them and love them. I have all the other sets and so want to get this one too. Would love to win..thank you for your generosity and your wonderful videos. I bought ALL the Distress Oxide ink pads (no refills yet) but I didn’t know about the Tim Holtz Distress Micro Glaze. What a difference that makes! That’s on my “to-buy” list. Yes I have used the distress oxide inks, love them. Can’t wait to try these new techniques with them. Thanks for sharing! Fabulous cards – great video! I have five colors of the distress oxide inks. I haven’t had a chance to use them much – will have to try this technique. Thank you for the chance to win! so excited to give them a try! I have! Still trying them out. You make it look easy. I just got 2 more, so I have 5 now. I loved the videos you have done. In the first video…the colors are outstanding, and the beautiful combo of blue, green and teals, …”I love you to the moon and back” and the balloons with ‘wish” and the scraps… all were over the top. I think I have watched each of them 5 times. I want to ink oxide something…and not just paper! Thank you for being such an helpful and outstanding teacher! Distress Oxides have opened up a new set of options. I haven’t tried it yet, but as I have been inspired by your videos, I had purchased six oxide inks and they just arrived today. I can’t wait to try it out. Thanks for your videos as it is very helpful for newbies like me. I have only tried one distress oxide ink, love it, I look forward to trying more! I only have one color of the Distressed Oxide inks. I am so glad to see how you used it. I would love to have a set of more colors. How in the world do you figure out what to do every post??? Something new each time. You are the one youtube channel that I won’t miss watching and look forward to each new one. Thank you for sharing your talent. I love the oxide inks! I would love to get more colors. They are so fun to play with! I love the look of distressed oxide inks, but have not tried them yet. I am anxious to try them. I haven’t had a chance to try them yet but sure would like to win a set!! I have all the Distress, so now I need the Oxides! I’ve not had a chance to use the Distress Oxide inks as I live in a country where they are not available. I’ve watched many videos and would love to try them out! Beautiful cards, I love to win this oxide inkts.
from a10sdk.common.A10BaseClass import A10BaseClass class Stats(A10BaseClass): """This class does not support CRUD Operations please use parent. :param nat_resp: {"description": "(NAT) No. of responses", "format": "counter", "type": "number", "oid": "8", "optional": true, "size": "2"} :param slb_resp_no_match: {"description": "No. of requests with no response", "format": "counter", "type": "number", "oid": "5", "optional": true, "size": "2"} :param nat_xid_reused: {"description": "(NAT) No. of requests reusing a transaction id", "format": "counter", "type": "number", "oid": "13", "optional": true, "size": "2"} :param slb_req: {"description": "No. of requests", "format": "counter", "type": "number", "oid": "1", "optional": true, "size": "2"} :param slb_no_resp: {"description": "No. of resource failures", "format": "counter", "type": "number", "oid": "3", "optional": true, "size": "2"} :param nat_req: {"description": "(NAT) No. of requests", "format": "counter", "type": "number", "oid": "7", "optional": true, "size": "2"} :param slb_req_rexmit: {"description": "No. of request retransmits", "format": "counter", "type": "number", "oid": "4", "optional": true, "size": "2"} :param nat_no_resource: {"description": "(NAT) No. of resource failures", "format": "counter", "type": "number", "oid": "12", "optional": true, "size": "2"} :param nat_no_resp: {"description": "(NAT) No. of resource failures", "format": "counter", "type": "number", "oid": "9", "optional": true, "size": "2"} :param nat_req_rexmit: {"description": "(NAT) No. of request retransmits", "format": "counter", "type": "number", "oid": "10", "optional": true, "size": "2"} :param nat_resp_no_match: {"description": "(NAT) No. of requests with no response", "format": "counter", "type": "number", "oid": "11", "optional": true, "size": "2"} :param slb_no_resource: {"description": "No. of resource failures", "format": "counter", "type": "number", "oid": "6", "optional": true, "size": "2"} :param slb_resp: {"description": "No. of responses", "format": "counter", "type": "number", "oid": "2", "optional": true, "size": "2"} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py` """ def __init__(self, **kwargs): self.ERROR_MSG = "" self.b_key = "stats" self.DeviceProxy = "" self.nat_resp = "" self.slb_resp_no_match = "" self.nat_xid_reused = "" self.slb_req = "" self.slb_no_resp = "" self.nat_req = "" self.slb_req_rexmit = "" self.nat_no_resource = "" self.nat_no_resp = "" self.nat_req_rexmit = "" self.nat_resp_no_match = "" self.slb_no_resource = "" self.slb_resp = "" for keys, value in kwargs.items(): setattr(self,keys, value) class Dns(A10BaseClass): """Class Description:: Statistics for the object dns. Class dns supports CRUD Operations and inherits from `common/A10BaseClass`. This class is the `"PARENT"` class for this module.` :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py` URL for this object:: `https://<Hostname|Ip address>//axapi/v3/slb/dns/stats`. """ def __init__(self, **kwargs): self.ERROR_MSG = "" self.required=[] self.b_key = "dns" self.a10_url="/axapi/v3/slb/dns/stats" self.DeviceProxy = "" self.stats = {} for keys, value in kwargs.items(): setattr(self,keys, value)
2009 Chrysler Town Country. 2009 chrysler town country. 2009 chrysler town and country information and photos. used 2009 chrysler town country lx sports van 7. 2009 chrysler town and country limited beige summit auto. 2009 chrysler town country touring mitula cars. 2009 chrysler town country dodge caravan minivan car. first drive 2009 chrysler town country limited photo. 2009 chrysler town country limited first drive truck. chrysler town country 25th anniversary 39 2009. car and driver. [Dinarjat.com].
#!/usr/bin/env python2.7 from pydhcplib.dhcp_packet import * from pydhcplib.dhcp_network import * netopt = {'client_listen_port':"68", 'server_listen_port':"67", 'listen_address':"0.0.0.0"} goodservers_str = [ '192.168.251.2', '192.168.251.3' ] goodservers = [ [ int(n) for n in s.split('.') ] for s in goodservers_str ] class Server(DhcpServer): def __init__(self, options): DhcpServer.__init__(self,options["listen_address"], options["client_listen_port"], options["server_listen_port"]) def HandleDhcpDiscover(self, packet): pass def HandleDhcpRequest(self, packet): server_identifier = packet.GetOption('server_identifier') if server_identifier is not None: if server_identifier != []: if server_identifier not in goodservers: with open('badpackets.txt','a') as f: f.write(packet.str()) f.write('-----------------------------------------------------\n\n') print packet.str() def HandleDhcpDecline(self, packet): pass def HandleDhcpRelease(self, packet): pass def HandleDhcpInform(self, packet): pass def main(): server = Server(netopt) while True : server.GetNextDhcpPacket() if __name__ == "__main__": main()
Online Shopping is rather latest thing nowadays. We acquire everything from food to electronic tools online serta mattress wiki. When it comes to getting an item like a bed mattress that we are too used to going to the store and get, we have a tendency to be hesitant serta mattress wiki occasionally. This could be rather justified given that a cushion is a fairly a substantial buy and returning such an item can be a hassle if you occur making an error with your selection. Below are a few of the things that you should understand prior to you buy mattress online. Are you looking for mattress sales to give your bed a brand-new life? Well, the mattress market is all concerning uncomplicated points stood for in rather serta mattress wikicomplex way. It is great to know that mattress sales are obtaining more and more constant. Some of the top producers provide routine deals on regular and also limited time products. These mattress sales are best for everybody. For suppliers, these sales bring a whole lot of advertising and some fast cash money flow. Whereas in customer’s perspective – mattress sales have unequaled savings and buyers looking for spending plan products even, occasionally discover it possible making a much better and also top quality acquisition. Being a customer and also specifically the one that is out on an extremely essential objective of getting a mattress serta mattress wiki – you should maintain yourself stick to some principals. Always, think prior to you click the “order” switch as today’ s marketing tactics are type of catches. Complying with are a couple of, basic standards for people that will buy bed linen products. In our view, it is not wise to wind up with an inferior acquisition for saving some bucks. Hundreds of study’ s have currently concluded that rest is really important to our physical as well as mental wellness and wellbeing. Hence, you need to be very cautious while getting anything from mattress sales. Fortunately talking, there is no rocket scientific research entailed in all. Keep on your own stick to the trusted suppliers either online or on the traditional market, as well as that’s all. Relied on names normally don’t place their name as well as service in danger for generating even more by marketing inadequate or obsolete products. Well, never buy an inappropriate bed mattress even if it is being offered at a rate a lot lower than its routine cost. Continue as well as leave this for others. As a private everybody picks up from his/her experience. In case if you are not still sure concerning exactly what is mosting likely to work far better for you than you can always opt for FREE serta mattress wikiconsultation chances being used below and there. Those who currently are enduring from sleep problems or are encountering chronic aches need to constantly speak with their orthopedic or a specialist as an easy modification in your rest gear or sleep habits sometimes can bring excellent relief. As an example think about soft bed mattress. These type of bed mattress fits those that such as to sleep on their sides. Hundreds of vendors are producing as well as offering strong mattress on the market. These mattresses are recognized to be much better for sleepers that like to sleep on their back or belly. Once again, you have to deal very meticulously below as it is an issue of your sleep or insomnia. Attempting something brand-new for a pair of days and specifically when products are offered with complimentary trials, is not a bad concept. Well, by type we could classify them into three various teams. The memory foam bed mattress comes on the top as they are one of the most preferred ones. These cushions provide a long lifespan, they are readily available at an affordable cost, as well as their toughness ratings are way better than their counterparts. serta mattress wiki The 2nd kind of bed mattress we intend to mention here is called Latex Foam. They are additionally incredibly popular in offline and also on-line markets. You can locate latex foam’s natural and also synthetic versions. For the most parts, they come comprising on a poly foam base whereas their core is composed of several latex layers to provide sleepers with even more comfort. Right here comes the 3rd as well as the last primary kind of bed mattress. You could call it either innerspring cushion or spring cushion directly. These were typical in the USA and also Europe. The innerspring cushions come geared up with mechanical springtimes to supply the customer with type of a lift. Nonetheless, today’s generation has begun disliking innerspring cushions. All of us know, our routines are altering with the flow of time. Being components of an internet-based culture, we currently have brand-new problems to resolve, and our way of life is bringing modifications in our rest habits as well. Currently that you understand acquiring a mattress online will not make you regret at the end, allow’s see the points you can do it make your choice as better as feasible. There are cushions which are better for back sleepers compared to side sleepers. Know specifically what you are going for, and after that start searching for the mattress that would match all your requirements. As discussed over as well, one of the largest benefits of on-line buying is the access to thousands of individual testimonials. serta mattress wiki There are unusual possibilities that an item is a lot much better compared to what it has been explained by the customers, however many of the time if the testimonials are qualified, it would certainly offer you a wonderful understanding into the experience of utilizing the bed mattress that you could not get from anywhere else. When it comes to buying an item like a cushion that we are as well made use of to going to the shop and buy, we often tend to be hesitant often. Here are some of the points that you require to understand before you buy mattress online. Are you trying to find mattress sales to offer your bed a brand-new life? Well, the cushion market is all concerning simple points represented in fairly serta mattress wikidifficult fashion. It is good to recognize that mattress sales are obtaining extra and also much more regular. Some of the top manufacturers provide regular deals on routine and limited time products. These mattress sales are right for every person. For vendors, these sales bring a great deal of advertising and marketing and some fast capital. Whereas in consumer’s viewpoint – mattress sales have unparalleled cost savings and buyers seeking budget products also, often find it possible making a far better and also high quality acquisition. Being a client and particularly the one that is out on a very important objective of purchasing a cushion serta mattress wiki – you ought to keep yourself stick with some principals. Always, assume before you click the “order” switch as today’ s advertising and marketing strategies are type of traps. Complying with are a few, basic standards for individuals who are concerning to acquire bed linen items. In our view, it is not important to finish up with an inferior acquisition for saving some bucks. After all, numerous study’ s have actually currently concluded that rest is critically important to our physical as well as emotional health and wellness as well as health and wellbeing. Hence, you need to be very cautious while getting anything from mattress sales. Luckily talking, there is no brain surgery included in any way. Maintain yourself stick to the trusted vendors either online or on the traditional market, which’s all. Trusted names generally do not place their name and also company in jeopardy for generating even more by offering bad or outdated products. Well, never ever acquire an improper mattress even if it is being supplied at a rate a lot lower than its routine price. Relocate in advance and leave this one for others. As a private everyone finds out from his or her experience. In case if you are not still sure regarding exactly what is going to work far better for you compared to you can always go for FREE serta mattress wikiexamination possibilities being offered occasionally. Those who already are experiencing from sleep problems or are facing chronic aches ought to always seek advice from with their orthopedic or a professional as a straightforward adjustment in your sleep equipment or rest habits sometimes can bring wonderful alleviation. As an instance think about soft mattresses. These sort of cushions matches those that prefer to sleep on their sides. In a similar way, numerous vendors are producing and marketing solid cushion on the market. These mattresses are understood to be better for sleepers that want to rest on their back or belly. Again, you need to deal really carefully below as it refers your rest or sleeplessness. We are not below to discourage small ones in business. However, staying aligned with considerable brands could constantly be taken into consideration as a risk-free way to go. Heavyweights gained all their fame and magnificence by spending substantial initiatives and also resources. Nevertheless, trying something brand-new for a few days as well as particularly when products are offered with totally free trials, is not a bad concept. Well, by type we can classify them into 3 various groups. The memory foam cushion comes on the top as they are the most preferred ones. These cushions provide a long life expectancy, they are readily available at an affordable price, and also their longevity scores are way far better than their counterparts. serta mattress wiki The second kind of mattresses we wish to state here is called Latex Foam. They are additionally really popular in offline and on-line markets. You could locate latex foam’s all-natural as well as synthetic versions. They come comprising on a poly foam base whereas their core consists of several latex layers to offer sleepers with more comfort. Here comes the 3rd and also the last main sort of bed mattress. You could call it either innerspring cushion or spring cushion straight. These were really typical in the USA and also Europe. The innerspring mattresses come geared up with mechanical springtimes to supply the consumer with sort of a lift. Today’s generation has started doing not like innerspring mattresses. We all recognize, our behaviors are altering with the flow of time. Being components of an internet-based society, we now have brand-new problems to resolve, and also our way of life is bringing changes in our sleep routines. Now that you recognize acquiring a bed mattress online will not make you be sorry for at the end, allow’s see the points you can do it make your choice as far better as feasible. There are cushions which are better for back sleepers compared to side sleepers. Know exactly just what you are going for, and then start surfing for the mattress that would certainly match all your requirements. As mentioned over too, one of the largest benefits of online shopping is the access to countless user testimonials. If the users are confirmed, the opportunities are that they would discuss extra the weaknesses of an item that its assets– this is natural for us human beings and also it great for you as a new buyer to remove the products that do not suit you. Review a minimum of 20 evaluations by previous users prior to you make the last choice. The even more testimonials you could read the better. There are rare possibilities that an item is a lot far better compared to exactly what it has been described by the users, yet the majority of the moment if the reviews are trustworthy, it would certainly provide you a great insight right into the experience of making use of the cushion that you could not obtain from anywhere else.
"""Entry point and related functionality""" __author__ = "Stephan Sokolow (deitarion/SSokolow)" __license__ = "GNU GPL 2.0 or later" import errno, logging, os, subprocess, sys from ConfigParser import RawConfigParser try: import pygtk pygtk.require('2.0') except ImportError: pass # Apparently Travis-CI's build environment doesn't add this import gtk, wnck import gtkexcepthook gtkexcepthook.enable() from . import commands, layout from .util import fmt_table, XInitError from .version import __version__ from .wm import WindowManager #: Location for config files (determined at runtime). XDG_CONFIG_DIR = os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')) #: Default content for the config file DEFAULTS = { 'general': { # Use Ctrl+Alt as the default base for key combinations 'ModMask': '<Ctrl><Alt>', 'UseWorkarea': True, 'ColumnCount': 3 }, 'keys': { "KP_Enter": "monitor-switch", "KP_0": "maximize", "KP_1": "bottom-left", "KP_2": "bottom", "KP_3": "bottom-right", "KP_4": "left", "KP_5": "middle", "KP_6": "right", "KP_7": "top-left", "KP_8": "top", "KP_9": "top-right", "V": "vertical-maximize", "H": "horizontal-maximize", "C": "move-to-center", } } KEYLOOKUP = { ',': 'comma', '.': 'period', '+': 'plus', '-': 'minus', } #: Used for resolving certain keysyms wnck.set_client_type(wnck.CLIENT_TYPE_PAGER) # pylint: disable=no-member class QuickTileApp(object): """The basic Glib application itself.""" keybinder = None dbus_name = None dbus_obj = None def __init__(self, winman, commands, keys=None, modmask=None): """Populate the instance variables. @param keys: A dict mapping X11 keysyms to L{CommandRegistry} command names. @param modmask: A modifier mask to prefix to all keybindings. @type winman: The L{WindowManager} instance to use. @type keys: C{dict} @type modmask: C{GdkModifierType} """ self.winman = winman self.commands = commands self._keys = keys or {} self._modmask = modmask or '' def run(self): """Initialize keybinding and D-Bus if available, then call C{gtk.main()}. @returns: C{False} if none of the supported backends were available. @rtype: C{bool} @todo 1.0.0: Retire the C{doCommand} name. (API-breaking change) """ # Attempt to set up the global hotkey support try: from . import keybinder except ImportError: logging.error("Could not find python-xlib. Cannot bind keys.") else: self.keybinder = keybinder.init( self._modmask, self._keys, self.commands, self.winman) # Attempt to set up the D-Bus API try: from . import dbus_api except ImportError: logging.warn("Could not load DBus backend. " "Is python-dbus installed?") else: self.dbus_name, self.dbus_obj = dbus_api.init( self.commands, self.winman) # If either persistent backend loaded, start the GTK main loop. if self.keybinder or self.dbus_obj: try: gtk.main() # pylint: disable=no-member except KeyboardInterrupt: pass return True else: return False def show_binds(self): """Print a formatted readout of defined keybindings and the modifier mask to stdout. @todo: Look into moving this into L{KeyBinder} """ print "Keybindings defined for use with --daemonize:\n" print "Modifier: %s\n" % (self._modmask or '(none)') print fmt_table(self._keys, ('Key', 'Action')) def main(): """setuptools entry point""" from optparse import OptionParser, OptionGroup parser = OptionParser(usage="%prog [options] [action] ...", version="%%prog v%s" % __version__) parser.add_option('-d', '--daemonize', action="store_true", dest="daemonize", default=False, help="Attempt to set up global " "keybindings using python-xlib and a D-Bus service using dbus-python. " "Exit if neither succeeds") parser.add_option('-b', '--bindkeys', action="store_true", dest="daemonize", default=False, help="Deprecated alias for --daemonize") parser.add_option('--debug', action="store_true", dest="debug", default=False, help="Display debug messages") parser.add_option('--no-workarea', action="store_true", dest="no_workarea", default=False, help="Overlap panels but work better with " "non-rectangular desktops") help_group = OptionGroup(parser, "Additional Help") help_group.add_option('--show-bindings', action="store_true", dest="show_binds", default=False, help="List all configured keybinds") help_group.add_option('--show-actions', action="store_true", dest="show_args", default=False, help="List valid arguments for use " "without --daemonize") parser.add_option_group(help_group) opts, args = parser.parse_args() # Hook up grep to filter out spurious libwnck error messages that we # can't filter properly because PyGTK doesn't expose g_log_set_handler() if not opts.debug: glib_log_filter = subprocess.Popen( ['grep', '-v', 'Unhandled action type _OB_WM'], stdin=subprocess.PIPE) # Redirect stderr through grep os.dup2(glib_log_filter.stdin.fileno(), sys.stderr.fileno()) # Set up the output verbosity logging.basicConfig(level=logging.DEBUG if opts.debug else logging.INFO, format='%(levelname)s: %(message)s') # Load the config from file if present # TODO: Refactor all this cfg_path = os.path.join(XDG_CONFIG_DIR, 'quicktile.cfg') first_run = not os.path.exists(cfg_path) config = RawConfigParser() config.optionxform = str # Make keys case-sensitive # TODO: Maybe switch to two config files so I can have only the keys in the # keymap case-sensitive? config.read(cfg_path) dirty = False if not config.has_section('general'): config.add_section('general') # Change this if you make backwards-incompatible changes to the # section and key naming in the config file. config.set('general', 'cfg_schema', 1) dirty = True for key, val in DEFAULTS['general'].items(): if not config.has_option('general', key): config.set('general', key, str(val)) dirty = True mk_raw = modkeys = config.get('general', 'ModMask') if ' ' in modkeys.strip() and '<' not in modkeys: modkeys = '<%s>' % '><'.join(modkeys.strip().split()) logging.info("Updating modkeys format:\n %r --> %r", mk_raw, modkeys) config.set('general', 'ModMask', modkeys) dirty = True # Either load the keybindings or use and save the defaults if config.has_section('keys'): keymap = dict(config.items('keys')) else: keymap = DEFAULTS['keys'] config.add_section('keys') for row in keymap.items(): config.set('keys', row[0], row[1]) dirty = True # Migrate from the deprecated syntax for punctuation keysyms for key in keymap: # Look up unrecognized shortkeys in a hardcoded dict and # replace with valid names like ',' -> 'comma' transKey = key if key in KEYLOOKUP: logging.warn("Updating config file from deprecated keybind syntax:" "\n\t%r --> %r", key, KEYLOOKUP[key]) transKey = KEYLOOKUP[key] dirty = True if dirty: cfg_file = file(cfg_path, 'wb') config.write(cfg_file) cfg_file.close() if first_run: logging.info("Wrote default config file to %s", cfg_path) ignore_workarea = ((not config.getboolean('general', 'UseWorkarea')) or opts.no_workarea) # TODO: Rearchitect so this hack isn't needed commands.cycle_dimensions = commands.commands.add_many( layout.make_winsplit_positions(config.getint('general', 'ColumnCount')) )(commands.cycle_dimensions) try: winman = WindowManager(ignore_workarea=ignore_workarea) except XInitError as err: logging.critical(err) sys.exit(1) app = QuickTileApp(winman, commands.commands, keymap, modmask=modkeys) if opts.show_binds: app.show_binds() if opts.show_args: print commands.commands if opts.daemonize: if not app.run(): logging.critical("Neither the Xlib nor the D-Bus backends were " "available") sys.exit(errno.ENOENT) # FIXME: What's the proper exit code for "library not found"? elif not first_run: if args: winman.screen.force_update() for arg in args: commands.commands.call(arg, winman) while gtk.events_pending(): # pylint: disable=no-member gtk.main_iteration() # pylint: disable=no-member elif not opts.show_args and not opts.show_binds: print commands.commands print "\nUse --help for a list of valid options." sys.exit(errno.ENOENT) if __name__ == '__main__': main() # vim: set sw=4 sts=4 expandtab :
The Chairman, Senate Committee on Local and Foreign Debt, Shehu Sani, has announced his intention to contest against Governor Nasir El-Rufai, in the 2019 Governorship election in Kaduna State. The Lawmaker, made this known in an exclusive interview with the Guardian. The Human Rights Activist, also condemned the outcome of the Local Government elections in Kaduna State. He said: “It was very clear that the results of the recently-conducted Local Council elections in Kaduna State, did not reflect what happened. “Even, there was no Ward Congress in the State; they just wrote a list of names and submitted, and this situation has worsened the internal crisis among those who used their money to buy forms.
from __future__ import with_statement import pytest from redis._compat import unichr, u, unicode from .conftest import r as _redis_client class TestEncoding(object): @pytest.fixture() def r(self, request): return _redis_client(request=request, decode_responses=True) def test_simple_encoding(self, r): unicode_string = unichr(3456) + u('abcd') + unichr(3421) r['unicode-string'] = unicode_string cached_val = r['unicode-string'] assert isinstance(cached_val, unicode) assert unicode_string == cached_val def test_list_encoding(self, r): unicode_string = unichr(3456) + u('abcd') + unichr(3421) result = [unicode_string, unicode_string, unicode_string] r.rpush('a', *result) assert r.lrange('a', 0, -1) == result def test_object_value(self, r): unicode_string = unichr(3456) + u('abcd') + unichr(3421) r['unicode-string'] = Exception(unicode_string) cached_val = r['unicode-string'] assert isinstance(cached_val, unicode) assert unicode_string == cached_val class TestCommandsAndTokensArentEncoded(object): @pytest.fixture() def r(self, request): return _redis_client(request=request, charset='utf-16') def test_basic_command(self, r): r.set('hello', 'world')
Patients with personality disorders need targeted treatments which are able to deal with the specific aspects of the core pathology and to tackle the challenges they present to the treatment clinicians. Such patients, however, are often difficult to engage, are prone to ruptures in the therapeutic alliance, and have difficulty adhering to a manualized treatment. Giancarlo Dimaggio, Antonella Montano, Raffaele Popolo and Giampaolo Salvatore aim to change this, and have developed a practical and systematic manual for the clinician, using Metacognitive Interpersonal Therapy (MIT), and including detailed procedures for dealing with a range of personality disorders. The book is divided into two parts, Pathology, and Treatment, and provides precise instructions on how to move from the basic steps of forming an alliance, drafting a therapy contract and promoting self-reflections, to the more advanced steps of promoting change and helping the patient move toward health and adaptation. With clinical examples, summaries of therapies, and excerpts of session transcripts, Metacognitive Interpersonal Therapy for Personality Disorders will be welcomed by psychotherapists, clinical psychologists and other mental health professionals involved in the treatment of personality disorders. "A new inspiring, reader-friendly, clear, concise book written to help clinicians deal better with the most difficult-to-treat patients in their caseloads. Introduction. Personality Disorder Psychopathology: Form and Contents of Subjective Experience. Personality Disorder Psychopathology: Functions. Assessment and Case Formulation in Metacognitive Interpersonal Therapy. Step-By-Step Formalized Procedures. Therapeutic Relationship. Shared Formulation of Functioning: Enriching Autobiographical Memory, Improving Access to Inner States and Reconstructing Schemas. Promoting Differentiation. Construction of New Self-Aspects: Access to Healthy Parts, Exploration, Increase in Agency, Overcoming Avoidances. Promoting the Understanding of the Other’s Mind and Integration. Treating Symptoms and Promoting Mastery of Relational Problems. Comparison of Metacognitive Interpersonal Therapy and Cognitive-Behavioral Therapies. Appendix: Diagnostic Instruments Usually Adopted in MIT. Giancarlo Dimaggio is a co-founding member of the Center for Metacognitive Interpersonal Therapy. He is the author of four books and numerous articles. Antonella Montano is a founding member and director of the psychotherapy school A.T. Beck Institute for Cognitive Behavior Therapy. He is a teacher and supervisor of the Associazione Italiana Analisi e Modificazione del Comportamento (AIAMC), a certified trainer/consultant/speaker/supervisor of the Academy of Cognitive Therapy (ACT), a member of the International Association for Cognitive Psychotherapy (IACP) and a member of the International Society for Sexual Medicine (ISSM). Raffaele Popolo is a co-founding member of the Center for Metacognitive Interpersonal Therapy, a trainer at the Società Italiana di Terapia Comportamentale e Cognitiva (SITCC)a trainer of the psychotherapy school ‘Studi Cognitivi’. Giampaolo Salvatore is a co-founding member of the Center for Metacognitive Interpersonal Therapy, a trainer at A.T. Beck Institute for Cognitive Behavior Therapy.
#!/usr/env/python3 from subprocess import * from collections import * class windo: def __init__(self, windata): self.windata = {} def getwindata(): with Popen(['wmctrl -lG | tr -s " "'], shell = True, stdout=PIPE, universal_newlines=True) as wmctrlg: winout = wmctrlg.stdout.read().splitlines() wincontainer = [] for line in winout: winline = line.split(' ') windict = {} windict['hexid'] = winline[0] windict['desktop'] = winline[1] windim = {} windim['xpos'] = winline[2] windim['ypos'] = winline[3] windim['width'] = winline[4] windim['height'] = winline[5] windict['dimensions'] = windim wincontainer.append(windict) return wincontainer def movewin(windata, newsizestrng): winhxid = windata['hexid'] subprocess.call('wmctrl', '-i', '-r', winhxid, '-e', newsizstrng) def sortwindos(screendictlist, shift): listlen = len(screendictlist) movedwinlist = [] def get_active_screen(): with Popen(['wmctrl', '-d'], stdout=PIPE, universal_newlines=True) as wmctrld: wmctrlout = wmctrld.stdout.read().splitlines() for line in wmctrlout: if "*" in line: values = line.split(' ') deskid = values[0] screensize = values[11] try: screenx, screeny = screensize.split('x') return deskid, screenx, screeny except: print('Not Running and EWMH compliant Window Manager') continue AllScreenDictList = windo.getwindata() ActiveDeskNum, ScreenXDim, ScreenYDim = get_active_screen() for windo in AllScreenDictList: if windo['desktop'] == ActiveDeskNum: print(windo['hexid'])
This topic provides links to examples in this documentation. Example: Add Labels to a Map - How to manually add labels to a map. Example: Spectacular Images and Data from Web Servers - A must see topic providing a gallery of views illustrating how Manifold can use web servers such as image servers and other free resources to provide a seemingly endless selection of spectacular background maps, satellite images and GIS data with nearly zero effort. Example: An Imageserver Tutorial - An extensive tutorial showing step by step how to add new data sources that are image servers, how to show them as layers in a map, how to create a new drawing that matches the projection of the map and how to trace over what is seen in an image server layer to create an area object in the drawing. Example: Change Point Style - Using new Style panel controls to change point style, either very rapidly one property at a time, or using the total Style button to compose a new style with changes to several properties at once. Example: Bounded Areas - Given line objects, the Bounded Areas transform template creates areas in regions entirely enclosed by overlapping or otherwise touching lines. A quick look at how the Transform panel in the Contents pane helps create new drawings. Example: Trace an Area in a Map over an Image Background - In a map with a drawing layer above an image layer (served dynamically by an image server), create an area object in the drawing by tracing over the outlines of something seen in the image layer below. Example: Transfer DEM Terrain Heights to Areas in a Drawing - Given a map with an image layer that shows terrain heights taken from a DEM, and a drawing layer that contains areas, using a small SQL query we transfer the average terrain height within each area to that area as a Height attribute for the area. Easy! Example: Create a Map Showing OSM Use by Country - A start-to-finish real life example of map creation that combines various Manifold capabilities. Copying a table of numbers from a web site, we create a map that is thematically colored to show usage of OpenStreetMap by country in proportion to the population of that country. Example: Compare Sizes of Countries - An example that combines multiple facilities within Manifold to create a presentation that allows comparison of the relative sizes of different countries. Example: Create a Table and Add a Record - Create a table with required fields and then add a record with value for those fields. Creates the OGR-required table to prepare a Manifold project for use by OGR as detailed in the Example: Connect to Radian from QGIS topic. Example: Add a Spatial Index to a Table - A typical use of an index is to provide a spatial index on a geom field in a table, so the geom data can be visualized in a drawing. This example shows how to add a spatial index using a geom field in a table. Example: Create a Geocoded Table from a Drawing - A partner example to Example: Create a Drawing from a Geocoded Table A geocoded table has records with a latitude and longitude for each record. This example starts with a table for a drawing of points where the geom field in the table contains geometry information for each point. We extract the Y and X locations for each point from the geom field to create latitude and longitude fields in the table for each record. Example: Street Address Geocoding - Geocode a table of street addresses using the Google geocoder. SQL Example: Using the mfd_id Field and mfd_id_x Index - A sequence of SQL examples of working with the mfd_id field and mfd_id_x index. Example: Export the Results of a Query - How to export the results table of a query for use in other applications. SQL Example: Miscellaneous SQL Functions - A variety of small examples using SQL functions. SQL Example: GeomOverlayAdjacent Function - Using the GeomOverlayAdjacent function, an example that shows how this function and similar functions such as GeomOverlayContained, GeomOverlayContaining, GeomOverlayIntersecting and GeomOverlayTouching operate. SQL Example: GeomOverlayTopologyUnion Function - A continuation of the SQL Example: GeomOverlayAdjacent Function example, using the GeomOverlayTopologyUnion function, an example that shows how this function and similar functions such as GeomOverlayTopologyIdentity, GeomOverlayTopologyIntersect and GeomOverlayTopologyUpdate operate. Example: Create a New Data Source from a Manifold Image Server - Manifold image server modules are snippets of code which use the Manifold Image Server Interface (ISI) to automatically fetch image tiles from popular image servers like Virtual Earth, Wikimapia, Yahoo!, Google Maps, Yandex and many others. Image servers can provide street maps, overhead satellite imagery, combinations of streets and satellite imagery and other data as well. Using Manifold Image Servers is one of the most popular Manifold features. Example: Create a New Data Source from a MAP File - Create a new data source from an existing Manifold .map project file. This is the classic way to nest projects, one calling another, to create libraries of data and projects. Access to nested projects has effectively zero performance loss and the links within projects take up effectively zero space so we can create huge constellations of data at our fingertips. Example: Create a Data Source within an Existing Data Source - When a data source is writable, for example, if the data source is a Manifold .map file, we can work within that data source as if it were at the top level of our project. For example, we can create a new data source that is nested within the existing data source. This example shows how. Example: Create and Use New Data Source using an MDB Database - This example Illustrates the step-by-step creation of a new data source using an .mdb file database, followed by use of SQL. Although now deprecated in favor of the more current Access Database Engine formats, .mdb files are ubiquitous in the Microsoft world, one of the more popular file formats in which file databases are encountered. Example: Modify GPKG Geometry with SQL then Add Drawing - This topic provides a "Hello, World" example that shows a simple, but typical, task involving spatial data. We will take a country-sized data set in GeoPackage (GPKG) format and change all areas in the data to the boundary lines for those areas and then save those boundary lines as a new table. We add a spatial index to the table and create a new drawing to visualize the new table. Example: Select a Range of Records in a Table - We can select or deselect a range of records between any two records in a table window using fast and easy mouse commands. Example: Combining Selections using the Select Panel - How to use selections to solve a one-off problem casually that would be more difficult to solve rigorously with SQL. The objective in the example's scenario is to visit a point of tourist interest while travelling. We will use the Transform panel as well as the Select panel. Example: Using the Select Panel Saved Tab - How to save selections and then use them later. Example: Repair a Wrong Edit using a Backup - How to quickly make a backup table and to then copy and paste geometry from that table to repair errors made when editing objects. This technique is a life-saver when edits go astray. Example: Create a Drawing from a Geocoded Table - A partner example to Example: Create a Geocoded Table from a Drawing A geocoded table has records with a latitude and longitude for each record. This example starts with a table containing a list of cities with a latitude and longitude field for the location of each city. We create a geom from the latitude and longitude fields using a template in the Transform panel and then we create a drawing that shows the cities as points. This example shows all the infrastructure steps involved. Example: Create a Drawing from Read-Only Geocoded CSV File - A detailed example using a mix of dialogs and SQL to create a drawing that shows data from an external, read-only geocoded CSV file that is linked into the project. Example: Create a Drawing from a Query - Everybody knows we can create a drawing from a table, but we can also create a drawing from a query. When the query reports different results the drawing changes too. This example show step by step how to create a query and then how to create a drawing from that query. We show how to command Manifold to write a query for us that grabs a selection, and then how to create a drawing based on that new query. This example duplicates the Drawings from Queries video using the Mexico_queries.mxb sample project. SQL Example: Kriging - We use SQL functions to create a raster terrain elevation image from vector contour lines in a drawing, using SQL functions for Kriging interpolation. Example: Formatting Tricks - The visualization capabilities of Manifold using Style can be exploited to provide many visual effects. This topic provides some examples of how to use Style in unexpected ways to create a range of more elaborate effects. Example: Add, Delete and Edit Thematic Formatting Intervals - This topic provides a step by step example of adding, deleting and editing intervals in the Style dialog that are used for thematic formatting. Example: Style Overrides - Working with style overrides to individually style areas, to use or not use style overrides, to find all records using style overrides and to clear style overrides. Example: Complex Point Style using a Circle Box - This example creates a complex point style, which uses a variety of different colors within the different effects tabs in the Point Style dialog. Example: Point Style using Move and Rotate - The Move parameter for symbols allows us to move symbols in an angular direction even as we rotate them. This example shows how to create point symbols that are clock faces with hands, using Move and Rotate. Example: Line Style with Multiple Effects - We can use effects from all of the Line Style dialog tabs to create a more complex line style. This example shows how to create a line style with an arrowhead symbol at the end of the line, a symbol at the beginning of the line and accessory lines in different colors to the left and right of the main line. Example: Fill Areas with Bitmap Images - We can use bitmap images as "fill" symbology for areas, including for the fill of the area itself, or as fill for Inner or Outer effects. In this example we use Style Overrides to fill different areas in a map of provinces with a different bitmap image pattern. Example: Use Repeating Images to Fill Areas - Areas are often filled with bitmap images that form a seamless pattern when tiled. If we like, we can use any bitmap image that can be used as a symbol, which will repeat within the area. Example: Inner and Outer Effects using a Bitmap - The Inner and Outer effects with area styles can use bitmap images for fills. We first illustrate an Outer effect using a bitmap, and then add an Inner effect. Example: Style Properties in the mfd_meta Table - Style properties for drawings such as colors for areas are stored in human readable JSON values as properties in the mfd_meta system table. This example shows how we can copy formatting from one drawing to another by simply copying values between records in the mfd_meta table. Example: Change the Contrast of an Image - In this example we use the Edit - Style dialog to change the contrast of an image. Example: Set Image Transparency using Alpha - The A row in the Style dialog allows us to specify what transparency we want to apply to the image, either by applying the same value for A for all pixels or by using one of the other channels to also control the A value. Example: Autocontrast and Hill Shading Images using Style - This example shows how the Edit - Style dialog can hill shade an image using the values of pixels as heights and generating shadows as if the Sun were located at the specified azimuth and altitude. This capability is used most frequently with raster images to give an impression of three dimensionality in cases where the values of pixels represent terrain elevations. Example: Style Applied to an Image Server Image - Because the Edit - Style dialog simply changes the way an image is displayed and not the data, it can operate on read-only data served by various web servers such as WMS REST servers. In this example we look at every detail of creating a data source using a WMS REST server and then manipulating the appearance of the display with Style. We will connect to a WMS server that provides LiDAR data in various forms, including as terrain elevation. SQL Example: Process Images with 3x3 Filters - Shows a step-by-step example of developing an SQL query that takes a query written by the Edit Query button and then modifies that query into a general purpose query that can apply any 3x3 filter. This makes it easy to use matrix filters we find on the web for custom image processing. We extend the query by using parameters and adding a function, and then show how it can be adapted to use a 5x5 filter. SQL Example: Process Images using Dual 3x3 Filters - A continuation of the above topic, extending the example query to utilize two filters for processing, as commonly done with Sobel and Prewitt two filter processing. SQL Example: Process RGB Images using Matrix Filters - A continuation of the above two topics, extending the example query to process three channel, RGB images. SQL Example: Create NDVI Displays - How to create a query that creates an NDVI display from a four-band NAIP image, with tips and tricks on how to copy and paste existing information to get the result we want. Example: Enhance Terrain with Curvatures - We enhance a terrain showing Crater Lake, Oregon, by using mean curvature calculation to bring out details. The example uses a 4 GB project containing a large terrain elevation surface. Using a point-and-click dialog with no SQL, we apply automatic CPU parallelism and GPU parallelism to absolutely crush a task in two and a half minutes that would take non-parallel software days. Example: Layout Properties - Editing properties which appear in the mfd_meta table for a layout changes the content of that layout. We can exploit that effect to create standardized layouts which are then re-cycled for different content. Example: Import an ESRI ADF File and Apply Style and Hillshading - In this example we import an ESRI ADF format file that provides a raster terrain elevation data set for the region near Yosemite National Park in the United States. The data set imports as an image. We then use Style to style the image and to provide hillshading. Example: Import BIL and Combine 3 Bands - A very technical example in which we import a BIL file that produces three images, each using one band from the BIL. We then create an image that uses the three bands for R, G and B. Example: Import CSV and Create a Drawing - Import a CSV file by creating a New Data Source, copying and pasting the table conveyed by the CSV, prepping the table for use as a geocoded table and then creating a drawing. Example: Import DDF SDTS DEM Raster File - We import a raster data terrain elevation surface from USGS SDTS format using DDF files. Example: Import DDF SDTS DLG Vector File - We import a vector data roads drawing from USGS SDTS format using DDF files. We use the Select panel to quickly reduce the data to line objects only, excluding points and area objects. Example: Import E00 and Rearrange - An intricate example showing how to use Manifold tools to adapt legacy data imported from E00 format into useful, modern form. Example: Import or Link ECW - Step by step examples showing how to link an image from an ECW file into a project and also how to import an image from an ECW file into a project. Example: Import GeoJSON / JSON File - Import vector footprints for all buildings in the District of Columbia, using a GeoJSON file published as open data by Microsoft. Example: Import GIRAS vector LULC File and Style - A companion topic to the Example: Import CTG Grid Cell File and Style topic. We import a USGS land use file in GIRAS vector format and then we use Style to provide a more understandable display. Example: Import GML / OS Open Map Local - Import an Ordnance Survey, UK, vector map in GML format from the OS Open Map - Local series. Example: Link GPKG and Save Style - A companion topic to the GPKG topic. How to link a GPKG, open a drawing, Style it and then save so the styling is retained within the GPKG file. Example: Import KML with GroundOverlay Image - KML files can contain GroundOverlay references to images that are stored in other files and which are intended to be used as part of the KML. Manifold will automatically import GroundOverlay images that are stored on the local machine or which are accessible through local network references as part of the KML import. Example: Import Multiple MapInfo TAB Images - MapInfo TAB format can store images as well as vector drawings. This example shows how multiple images at the same time can be imported from TAB format. Example: Link NLCD using Scan Raw Binary File - Use the Scan Raw Binary File tool to scan and to prepare a configuration file, which we use to link an NLCD raw binary file providing land cover data for Delaware as a raster image. We use a standard palette to color the land cover data and then we assign a projection to the newly imported image so it can be used as a correctly georegistered layer in maps. Example: Import ERDAS GIS with GDAL and Assign Coordinate System - A multistep example that covers import of an unwieldy format using GDAL, followed by use of Assign Initial Coordinate System and Repair Initial Coordinate System to deal quickly with real-life complications. Example: Transform Elevation Image to Flatten Bathymetry to Zero - Using the Transform panel with an image, which contains a single data channel for terrain elevation data for land together bathymetry data for oceans, we use the Expression tab of the Transform panel to reset all pixel values less than zero to zero. This takes all below-zero elevations and sets them to zero, in effect removing bathymetry effects so that ocean areas are represented with zero elevation. Example: Zoom In to See Transform Previews for Big Images - A short example showing how previews for the Transform panel will appear in large images only when zoomed in far enough so computation of the preview does not cause objectionable delays. Example: Parallel Speed Increase in an Image Transform - A short example illustrating how checking the Allow parallel execution option (on by default) increases speed by a factor of four in a simple use of the Transform panel to modify an image. Example: Transform Template Units - Templates in the Transform panel often include parameter boxes using units of measure. This example uses the Buffer drawing template to show how to change units of measure quickly to whatever units are desired. Example: Transform Field Values using an Expression in the Transform Panel - How the Expressions tab of the Transform panel may be used to change the values of fields. Example: Overlay Contained - A frequent use of overlays is to sum the values of many points that fall within an area and to transfer that sum to a new field for an area. In this example we take a drawing that has cities in the US with a population value for each city. We use Overlay Contained to sum the population of each city within a state and to transfer that sum to a total population for the state. Example: Overlay Containing - One of the most common uses of overlays is to transfer fields from areas to points that are contained in those areas. Tasks such as transferring a census block group number or zip code number from a drawing of areas to points that fall within each area are extremely common. In this example we transfer the name of a French region to the points that represent cities which fall within each region. Example: Overlay Topology Intersect - In this example we use the Overlay Topology, Intersect template in the Transform panel to trim a drawing of points so that all points which do not fall within areas in a second drawing are deleted. The drawing of points we trim will become the US cities drawing that is used in the Example: Overlay Contained topic. Example: Easy Guide to Diversity and Other Transfer Rules - Illustrated examples using Overlay Touching with easily-understood visual diagrams to show the results produced by Average, Diversity, Diversity Index, Major, Maximum, Median, Minimum, and Sum transfer rules. No more troubles understanding Diversity! Example: Union Areas - Combine multiple area objects into a single area. A drawing of French regions shows some regions as more than one area. We would like each region to be one area so the table of regions has one record per region. Example: Construct JSON String using Select and Transform - Use the Select panel and Transform panel to manually construct a JSON string using values from other fields in a table. Shows how we can manipulate text to build desired contents in a field. Example: Edit a Drawing with Transform Templates - In this example we open a drawing and edit objects in the drawing using Transform panel templates. Includes examples of using the Add Component button and also the Edit Query button. SQL Example: Custom Contour Intervals - This example builds on the Example: Contour Areas and Contour Lines topic, using the Edit Query button in the Transform panel for images to learn how to slightly alter the generated SQL to create contour areas or lines on whatever intervals we like, and not just evenly spaced intervals like the default transform creates. It's incredibly easy. Example: Trace Vector Areas from Raster Pixels - This example follows the Trace Vector Areas from Raster Pixels video on the Gallery page. We use the Trace Areas template in the Transform panel for images to create a drawing with vector areas covering regions of similarly-colored pixels. Next, we use a simple query to add classification codes from a USGS table of classes to the resulting drawing, using a simple INNER JOIN SQL statement. Example: Connect to a Shapefile usng GDAL/OGR - Instead of using Manifold's native ability to connect to shapefiles, use the Manifold GDAL dataport to connect to a shapefile.
#!/usr/bin/env python import tornado.ioloop import tornado.web import tornado.gen import tornado.websocket import tornado.auth import tornado.escape import hmac import hashlib import functools import os import momoko import urlparse import time import datetime import parsedatetime import prettytable import ascii_graph import logging from tornado.options import define, options define("port", default=8888, help="run on the given port", type=int) define("host", default='localhost:8888', help="server host", type=str) define("db_host", default="localhost", help="database hostname", type=str) define("db_port", default=5432, help="database port", type=int) define("db_name", default="callum", help="database name", type=str) define("db_user", default="callum", help="database username", type=str) define("db_pass", default="", help="database password", type=str) class HumanDateParser(object): def __init__(self): self.calendar = parsedatetime.Calendar() def parse(self, str): return datetime.datetime.fromtimestamp( time.mktime(self.calendar.parse(str)[0])) def get_psql_credentials(): try: urlparse.uses_netloc.append("postgres") url = urlparse.urlparse(os.getenv("DATABASE_URL")) credentials = {"host": url.hostname, "port": url.port, "dbname": url.path[1:], "user": url.username, "password": url.password} except: credentials = {"host": options.db_host, "port": options.db_port, "dbname": options.db_name, "user": options.db_user, "password": options.db_pass} return credentials def _get_secret(filename, envvar): try: with open(os.path.join(os.path.dirname(__file__), filename)) as f: return f.read().strip() except IOError: return os.getenv(envvar) get_hmac_secret = \ functools.partial(_get_secret, ".hmac_secret", "ITTF_HMAC_SECRET") get_cookie_secret = \ functools.partial(_get_secret, ".cookie_secret", "ITTF_COOKIE_SECRET") get_google_secret = \ functools.partial(_get_secret, ".google_secret", "ITTF_GOOGLE_SECRET") def hmac_authenticated(method): @functools.wraps(method) def wrapper(self, *args, **kwargs): hash = hmac.new( self.settings["hmac_secret"], self.get_argument("data"), hashlib.sha256 ) if self.get_argument("token") != hash.hexdigest(): raise tornado.web.HTTPError(401, "Invalid token") return method(self, *args, **kwargs) return wrapper def bool2str(boolean): return "yes" if boolean else "no" class HasFreeWebSocketHandler(tornado.websocket.WebSocketHandler): connections = set() def open(self): HasFreeWebSocketHandler.connections.add(self) def on_message(self, message): pass def on_close(self): HasFreeWebSocketHandler.connections.remove(self) class BaseHandler(tornado.web.RequestHandler): @property def db(self): return self.application.db def get_current_user(self): return self.get_secure_cookie("ittf_user") @tornado.gen.coroutine def has_free_toilet(self): cursor = yield self.db.callproc("any_are_free") raise tornado.gen.Return(cursor.fetchone()[0]) @tornado.gen.coroutine def has_free_shower(self): cursor = yield self.db.execute( "SELECT is_free FROM latest_events() WHERE toilet_id = 2") raise tornado.gen.Return(cursor.fetchone()[0]) class GoogleLoginHandler(BaseHandler, tornado.auth.GoogleOAuth2Mixin): @tornado.gen.coroutine def get(self): if self.get_argument("code", False): access = yield self.get_authenticated_user( redirect_uri=self.settings["login_url"], code=self.get_argument("code")) user = yield self.oauth2_request( "https://www.googleapis.com/oauth2/v1/userinfo", access_token=access["access_token"]) if user["email"].endswith("@madebymany.co.uk") or \ user["email"].endswith("@madebymany.com"): self.set_secure_cookie("ittf_user", user["email"]) self.redirect("/stats") else: self.redirect("/") else: yield self.authorize_redirect( redirect_uri=self.settings["login_url"], client_id=self.settings["google_oauth"]["key"], scope=["profile", "email"], response_type="code", extra_params={"approval_prompt": "auto"}) class MainHandler(BaseHandler): @tornado.gen.coroutine def get(self): has_free = bool2str((yield self.has_free_toilet())) self.render("index.html", has_free_toilet=has_free) @hmac_authenticated @tornado.gen.coroutine def post(self): values = yield [self.db.mogrify( "(%(toilet_id)s, %(is_free)s, %(timestamp)s)", t) for t in tornado.escape.json_decode(self.get_argument("data")) ] yield self.db.execute( "INSERT INTO events (toilet_id, is_free, recorded_at) " "VALUES %s;" % ", ".join(values)) self.notify_has_free() self.finish() @tornado.gen.coroutine def notify_has_free(self): has_free = bool2str((yield self.has_free_toilet())) for connected in HasFreeWebSocketHandler.connections: try: connected.write_message({ "hasFree": has_free }) except: logging.error("Error sending message", exc_info=True) class ShowerHandler(BaseHandler): @tornado.gen.coroutine def get(self): has_free = bool2str((yield self.has_free_shower())) self.render("shower.html", has_free_shower=has_free) class StatsHandler(BaseHandler): @tornado.web.authenticated @tornado.gen.coroutine def get(self): parser = HumanDateParser() text = None op = None where = "" and_where = "" start = self.get_argument("from", None) end = self.get_argument("to", None) if start and end: parsed_start = parser.parse(start) parsed_end = parser.parse(end) text = "Showing from %s to %s" % (parsed_start, parsed_end) op = ("WHERE recorded_at BETWEEN %s AND %s", (parsed_start, parsed_end)) elif start: parsed_start = parser.parse(start) text = "Showing from %s onward" % parsed_start op = ("WHERE recorded_at >= %s", (parsed_start,)) elif end: parsed_end = parser.parse(end) text = "Showing from %s backward" % parsed_end op = ("WHERE recorded_at <= %s", (parsed_end,)) if op: where = yield self.db.mogrify(*op) and_where = where.replace("WHERE", "AND", 1) queries = [ ("Number of visits", "SELECT toilet_id, count(*) " "AS num_visits FROM visits %(where)s " "GROUP BY toilet_id ORDER BY toilet_id;"), ("Average visit duration", "SELECT toilet_id, avg(duration) " "AS duration_avg FROM visits %(where)s " "GROUP BY toilet_id ORDER BY toilet_id;"), ("Minimum visit duration", "SELECT toilet_id, min(duration) " "AS duration_min FROM visits %(where)s " "GROUP BY toilet_id ORDER BY toilet_id;"), ("Maximum visit duration", "SELECT toilet_id, max(duration) " "AS duration_max FROM visits %(where)s " "GROUP BY toilet_id ORDER BY toilet_id;"), ("Visits by hour", "SELECT s.hour AS hour_of_day, count(v.hour) " "FROM generate_series(0, 23) s(hour) " "LEFT OUTER JOIN (SELECT recorded_at, " "EXTRACT('hour' from recorded_at) " "AS hour FROM visits %(where)s) v on s.hour = v.hour " "GROUP BY s.hour ORDER BY s.hour;"), ("Visits by day", "SELECT s.dow AS day_of_week, count(v.dow) " "FROM generate_series(0, 6) s(dow) " "LEFT OUTER JOIN (SELECT recorded_at, " "EXTRACT('dow' from recorded_at) " "AS dow FROM visits %(where)s) v on s.dow = v.dow " "GROUP BY s.dow ORDER BY s.dow;") ] results = yield [self.db.execute(q % {"where": where, "and_where": and_where}) for _, q in queries] cursor = yield self.db.execute(( "SELECT (s.period * 10) AS seconds, count(v.duration) " "FROM generate_series(0, 500) s(period) " "LEFT OUTER JOIN (SELECT EXTRACT(EPOCH from duration) " "AS duration FROM visits) v on s.period = FLOOR(v.duration / 10) " "GROUP BY s.period HAVING s.period <= 36 ORDER BY s.period;" )) graph = "\n".join(ascii_graph.Pyasciigraph() .graph("Frequency graph", cursor.fetchall())) self.render("stats.html", text=text, start=start, end=end, tables=[(queries[i][0], prettytable.from_db_cursor(r)) for i, r in enumerate(results)], frequency_graph=graph) class APIHandler(BaseHandler): @tornado.gen.coroutine def get(self): response = tornado.escape.json_encode({ "has_free_toilet": (yield self.has_free_toilet()) }) callback = self.get_argument("callback", None) if callback: response = "%s(%s)" % (callback, response) self.set_header("content-type", "application/json") self.write(response) if __name__ == "__main__": tornado.options.parse_command_line() app = tornado.web.Application( [(r"/login", GoogleLoginHandler), (r"/", MainHandler), (r"/shower", ShowerHandler), (r"/stats", StatsHandler), (r"/api", APIHandler), (r"/hasfreesocket", HasFreeWebSocketHandler)], template_path=os.path.join(os.path.dirname(__file__), "templates"), hmac_secret=get_hmac_secret(), cookie_secret=get_cookie_secret(), login_url="http://%s/login" % options.host, google_oauth=dict(key=os.getenv("ITTF_GOOGLE_KEY"), secret=get_google_secret()) ) ioloop = tornado.ioloop.IOLoop.instance() app.db = momoko.Pool( dsn=" ".join(["%s=%s" % c for c in get_psql_credentials().iteritems()]), size=6, ioloop=ioloop ) future = app.db.connect() ioloop.add_future(future, lambda f: ioloop.stop()) ioloop.start() future.result() app.listen(options.port) try: ioloop.start() except KeyboardInterrupt: pass
Receive free embroidery on your next purchase. Value= 1-Hit/Location, Up to 4000 stitches. Digitizing is not included. This does not apply to customer supplied items. Apparel must be purchased through us and/or our suppliers. Do you have questions about our products? Fill out the form below and we’ll get back to you. If you are interested in Embroidery or Custom Apparel for your business or sports team in Moncton, let us know! Contact Us Today!
# -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst """ Handles the "Unicode" unit format. """ from . import console, utils class Unicode(console.Console): """ Output-only format to display pretty formatting at the console using Unicode characters. For example:: >>> import astropy.units as u >>> print(u.bar.decompose().to_string('unicode')) kg 100000 ──── m s² """ _times = "×" _line = "─" @classmethod def _get_unit_name(cls, unit): return unit.get_format_name('unicode') @classmethod def format_exponential_notation(cls, val): m, ex = utils.split_mantissa_exponent(val) parts = [] if m: parts.append(m.replace('-', '−')) if ex: parts.append("10{0}".format( cls._format_superscript(ex))) return cls._times.join(parts) @classmethod def _format_superscript(cls, number): mapping = { '0': '⁰', '1': '¹', '2': '²', '3': '³', '4': '⁴', '5': '⁵', '6': '⁶', '7': '⁷', '8': '⁸', '9': '⁹', '-': '⁻', '−': '⁻', # This is actually a "raised omission bracket", but it's # the closest thing I could find to a superscript solidus. '/': '⸍', } output = [] for c in number: output.append(mapping[c]) return ''.join(output)
Being one of the oldest and most treasured forms of currency in the world, much has been said about gold. Here are 10 compelling quotes about gold that will help you appreciate the yellow metal and its cultural importance. If these quotes have inspired you to own gold, call American Bullion today at 1-800-326-9598 or request your Free Gold Guide by submitting the form at the top of this page. There is no obligation. Our precious metal brokers can assist you with purchasing physical gold coins and bars for secure home delivery, or even help you add gold to your retirement account. We will handle all of the details for you. Invest in something real!
import types def is_valid_signed(bits, val): if type(val) not in (types.IntType, types.LongType): return False e = bits - 1 min_val = -(2 ** e) if val < min_val: return False max_val = (2 ** e) - 1 if val > max_val: return False return True def is_valid_unsigned(bits, val): if type(val) not in (types.IntType, types.LongType): return False if val < 0: return False if val > 2 ** bits: return False return True def is_valid_string(val): if val is None: return True return type(val) in (types.StringType, types.UnicodeType) class Type(object): def __init__(self, def_val): self.c_type_override = None self.set_val(def_val) def set_val(self, val): assert self.is_valid_val(val), "%s is not a valid value of %s" % (str(self.val), str(self)) self.val = val def c_type(self): if self.c_type_override is not None: return self.c_type_override return self.c_type_class def get_type_typ_enum(self): return self.type_enum def is_struct(self): return isinstance(self, Struct) def is_array(self): return isinstance(self, Array) class Bool(Type): c_type_class = "bool" type_enum = "TYPE_BOOL" def __init__(self, def_val): super(Bool, self).__init__(def_val) def is_valid_val(self, val): return val in (True, False) class U16(Type): c_type_class = "uint16_t" type_enum = "TYPE_U16" def is_valid_val(self, val): return is_valid_unsigned(16, val) class I32(Type): c_type_class = "int32_t" type_enum = "TYPE_I32" def __init__(self, def_val=0): super(I32, self).__init__(def_val) def is_valid_val(self, val): return is_valid_signed(32, val) class U32(Type): c_type_class = "uint32_t" type_enum = "TYPE_U32" def is_valid_val(self, val): return is_valid_unsigned(32, val) class U64(Type): c_type_class = "uint64_t" type_enum = "TYPE_U64" def is_valid_val(self, val): return is_valid_unsigned(64, val) # behaves like uint32_t, using unique name to signal intent class Color(U32): type_enum = "TYPE_COLOR" class String(Type): c_type_class = "const char *" type_enum = "TYPE_STR" def is_valid_val(self, val): return is_valid_string(val) class WString(Type): c_type_class = "const WCHAR *" type_enum = "TYPE_WSTR" def is_valid_val(self, val): return is_valid_string(val) class Float(Type): c_type_class = "float" type_enum = "TYPE_FLOAT" def is_valid_val(self, val): return type(val) in (types.IntType, types.LongType, types.FloatType) # struct is just a base class # subclasses should have class instance fields which is a list of tuples: # defining name and type of the struct members: # fields = [ ("boolField", Bool(True), ("u32Field", U32(32))] # # TODO: implement struct inheritance i.e. a subclass should inherit all # fields from its parent class Struct(Type): c_type_class = "" type_enum = "TYPE_STRUCT_PTR" fields = [] def __init__(self, *vals): # fields must be a class variable in Struct's subclass self.values = [Field(f.name, f.typ, f.flags) for f in self.fields] self.c_type_override = "%s *" % self.name() self.offset = None for i in range(len(vals)): self.values[i].set_val(vals[i]) def is_valid_val(self, val): return issubclass(val, Struct) def name(self): return self.__class__.__name__ def as_str(self): s = str(self) + "\n" for v in self.values: if isinstance(v, Field): s += "%s: %s\n" % (v.name, str(v.val)) return s def __setattr__(self, name, value): # special-case self.values, which we refer to if name == "values": object.__setattr__(self, name, value) return for field in self.values: if field.name == name: field.set_val(value) return object.__setattr__(self, name, value) class Array(Type): c_type_class = "" type_enum = "TYPE_ARRAY" def __init__(self, typ, values): # TODO: we don't support arrays of primitve values, just structs assert issubclass(typ, Struct) self.typ = typ self.values = values for v in values: assert self.is_valid_val(v) self.c_type_override = "Vec<%s*> *" % typ.__name__ self.offset = None def is_valid_val(self, val): return isinstance(val, self.typ) def name(self): try: return self.typ.__name__ except: print(self.typ) raise # those are bit flags NoStore = 1 Compact = 2 class Field(object): def __init__(self, name, typ_val, flags=0): self.name = name self.typ = typ_val self.flags = flags if self.is_no_store(): assert not self.is_compact() if self.is_compact(): to_test = typ_val if typ_val.is_array(): to_test = typ_val.typ else: assert to_test.is_struct() for field in to_test.fields: assert not field.is_struct() if typ_val.is_struct(): # TODO: support NULL values for the struct, represented by using # class for typ_val self.val = typ_val elif typ_val.is_array(): self.val = typ_val else: self.val = typ_val.val def c_type(self): return self.typ.c_type() def is_struct(self): return self.typ.is_struct() def is_signed(self): return type(self.typ) == I32 def is_unsigned(self): return type(self.typ) in (Bool, U16, U32, U64, Color) def is_bool(self): return type(self.typ) == Bool def is_color(self): return type(self.typ) == Color def is_string(self): return type(self.typ) in (String, WString) def is_float(self): return type(self.typ) == Float def is_no_store(self): return self.flags & NoStore == NoStore def is_compact(self): return self.flags & Compact == Compact def is_array(self): return type(self.typ) == Array def set_val(self, val): # Note: we don't support this for struct or arrays assert not (self.is_struct() or self.is_array()) assert self.typ.is_valid_val(val) self.val = val def get_typ_enum(self, for_bin=False): type_enum = self.typ.get_type_typ_enum() # binary doesn't have a notion of compact storage is_compact = self.is_compact() and not for_bin if self.is_no_store() or is_compact: s = "(Type)(" + type_enum if self.is_no_store(): s = s + " | TYPE_NO_STORE_MASK" if self.is_compact(): s = s + " | TYPE_STORE_COMPACT_MASK" return s + ")" return type_enum
My pleasure! Is this yr picture? so i can say-you are beautiful from outside as well as inside-Rare combination! I want to see your photo! Haha. I will let you know when I post my picture. Great! But I love you for yr inner beauty. That’s a huge compliment and especially coming from you, makes it priceless. Thank you! No priya! I am only being honest with you! This is deep and dope. 👍👍👍. Love this one. That is so true. A story is not over until the life ends, but even when the life ends a new story will begin. For on the next the faces still reveal sister…. When the sequel finishes, the story still continues. It’s just that the author had ostensibly ended it for the readers. The rest of the story becomes a sequel. Sequel too comes to an end at some point of time. What do you think, does the story end after that? There can always be #3. Check with Sylvester Stallone for more details. The story never ends…Does death kill it however ?Maybe,maybe not…I can’t tell…. I believe that is O so well stated.Awesome! Anytime Zealous lady.You are welcome. Always a pleasure to have you here! No better place than around genuine friends and warmth. Hahahahahah mischievous students always pass their tests…then after they have time to be mischievous. Hahaha. I actually don’t. Cause here, hitting a student isn’t allowed. Oooooh then PePa is a happy go lucky today. Hahaha. What’s your zodiac sign? Am a bull sweerie…what of you? Gemini…let me check that out in a bit….I love constellations literally…Read the blog “Eat and be merry” you will see that. Oh. Nice area of interest! I’ll surely read that blog. Can you send me the link? It is just a portion from the whole read but am sure you will enjoy it. Yes, I would surely read it! Oooh cool I hope you will.Good afternoon to you? Gemini,I now know it actually means twins,18beautiful stars and I must admit it looks like two beautiful girls side by side….quite a sight I must thus look for.Did you know that Gemini and Taurus lie almost side by side in the sky? I have no idea about these zodiac signs and its constellations. Thanks for the information! Something new to know! I knoww..Haha most of us never know what they actually mean but I am quite a curious one and the skies and heavens have always amazed me.You are welcome. Hahahaha awesome Teacher Ms Zealous.Am ready for it. Yes… Like next part starts with one ends..
# -*- coding: utf-8 -*- from __future__ import absolute_import from . import base class Section(base.SectionBase): @base.returns_single_item def add(self, peer, *peers, **kwargs): """Adds peers to the bootstrap list. Parameters ---------- peer : str IPFS MultiAddr of a peer to add to the list Returns ------- dict """ args = (peer,) + peers return self._client.request('/bootstrap/add', args, decoder='json', **kwargs) @base.returns_single_item def list(self, **kwargs): """Returns the addresses of peers used during initial discovery of the IPFS network. Peers are output in the format ``<multiaddr>/<peerID>``. .. code-block:: python >>> client.bootstrap.list() {'Peers': [ '/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYER … uvuJ', '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRa … ca9z', '/ip4/104.236.179.241/tcp/4001/ipfs/QmSoLPppuBtQSGwKD … KrGM', … '/ip4/178.62.61.185/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3p … QBU3' ]} Returns ------- dict +-------+-------------------------------+ | Peers | List of known bootstrap peers | +-------+-------------------------------+ """ return self._client.request('/bootstrap', decoder='json', **kwargs) @base.returns_single_item def rm(self, peer, *peers, **kwargs): """Removes peers from the bootstrap list. Parameters ---------- peer : str IPFS MultiAddr of a peer to remove from the list Returns ------- dict """ args = (peer,) + peers return self._client.request('/bootstrap/rm', args, decoder='json', **kwargs)
So Your Station Submitted a Successful Incentive Auction Bid, Now What? The $88.4 Billion Question – What’s Next in the Incentive Auction? The Auction Block: It’s March 29 – Now What?
""" Django settings for walladog project. Generated by 'django-admin startproject' using Django 1.8.6. For more information on this file, see https://docs.djangoproject.com/en/1.8/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.8/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '=e8m1h9)0lazq)%7bcho@a9w^_kfd)_plf_teg8_jp^ax9&k!p' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', #proyect apps 'categories', 'images', 'products', 'races', 'saveserches', 'states', 'transactions', 'users', 'walladog', #Rest_framework 'rest_framework', 'oauth2_provider', 'corsheaders', 'django.contrib.gis', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'corsheaders.middleware.CorsMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware' ) ROOT_URLCONF = 'walladog.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'walladog.wsgi.application' # Database # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.mysql', 'NAME': 'walladog_dev', 'USER': 'walladog', 'PASSWORD': 'Keepcoding123', 'HOST': 'mysql.develjitsu.com', 'PORT': '3306', }, # 'prod': { # 'ENGINE': 'django.db.backends.mysql', # 'NAME': 'walladog', # 'USER': 'walladog', # 'PASSWORD': 'xxx', # 'HOST': 'mysql.develjitsu.com', # 'PORT': '3306', # }, } # POR SI QUEREIS TRABAJAR EN LOCAL CON SQLITE # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.sqlite3', # 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), # } # } # Internationalization # https://docs.djangoproject.com/en/1.8/topics/i18n/ LANGUAGE_CODE = 'es-es' # TIME_ZONE = 'UTC' # Europe/Madrid TIME_ZONE = 'Europe/Madrid' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.8/howto/static-files/ # REST FRAMEWORK REST_FRAMEWORK = { 'EXCEPTION_HANDLER': 'rest_framework.views.exception_handler', 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', 'DEFAULT_AUTHENTICATION_CLASSES': ( 'oauth2_provider.ext.rest_framework.OAuth2Authentication', ), 'DEFAULT_PERMISSION_CLASSES': ( 'rest_framework.permissions.IsAuthenticated', ), 'DEFAULT_METADATA_CLASS': 'rest_framework.metadata.SimpleMetadata' } OAUTH2_PROVIDER = { # this is the list of available scopes 'SCOPES': { 'read': 'Read scope', 'write': 'Write scope', 'groups': 'Access to your groups'} } # CORS # APPEND_SLASH = False CORS_ORIGIN_ALLOW_ALL = True CORS_ORIGIN_WHITELIST = ( 'http://localhost:9000', 'http://www.walladog.com', 'walladog.com' ) # CORS_URLS_REGEX = r'^/api/.*$' STATIC_URL = '/static/' # DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' # # AWS_ACCESS_KEY_ID = 'AKIAJYDV7TEBJS6JWEEQ' # AWS_SECRET_ACCESS_KEY = '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' # AWS_STORAGE_BUCKET_NAME = 'walladog' # AWS_QUERYSTRING_AUTH = False # AWS_ACCESS_KEY_ID = os.environ['AKIAJYDV7TEBJS6JWEEQ'] # AWS_SECRET_ACCESS_KEY = os.environ['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'] # AWS_STORAGE_BUCKET_NAME = os.environ['walladog'] # MEDIA_URL = 'http://%s.s3.amazonaws.com/' % AWS_STORAGE_BUCKET_NAME # DEFAULT_FILE_STORAGE = "storages.backends.s3boto.S3BotoStorage"
On the one hand we hear that we need each other, that it’s not good to be alone, that friends are forever. On the other hand we hear that all we need is Yeshua, He is all-sufficient and provides all our needs. Both of these are true, and so we are faced with something of a paradox on the topic of friendship. As believers in Yeshua, it is through our friendships that we are can be connected not only in the flesh or through emotions but also in the Spirit. A good example of this is the way that John the Baptist responded with excitement to Yeshua while they were still in their mothers’ wombs. So it can be with us when we see someone we know and love. The spirit within us gets excited when we come in contact with each other. Why is this? It can be emotional, physical and spiritual. It may be because we are in relationship; we have something in common and we understand each other. This is what the congregation is built on; this is what it means to connect. In Hebrew the word connect or connection comes from the same root as the word for friend (Chaver). A friend takes our focus off self and puts it on to someone else. We are connected and concerned for each other. Yeshua said, “No longer do I call you slaves, for the slave does not know what his master is doing; but I have called you friends, for all things that I have heard from My Father I have made known to you” (John 15:15). Why is it so important to be a friend? A friend in this context does not control or manipulate but is genuinely concerned. Friendship like this knows no selfishness or jealousy but its purpose is to build up the other person out of a desire to truly care for one another. The solution to the friendship paradox is the love God revealed when he came and walked among us. All we need is Yeshua. Yeshua is our example of the ultimate faithful friend who sacrificed everything for us and in spite of our failings, loves us to the end. And … if the focus of our lives is a personal relationship with God through Yeshua, then we will also be empowered to share His quality of love with others, becoming a true friend to those around us. In July the “Branch of the Galilee” Congregation in Nazareth (a ministry of Return to Zion), organized a kids day-camp together with the neighboring congregation “Zur Yeshuati” and a group of youth from America. Forty children ages 7-15 speaking Russian, Hebrew and Arabic gathered in unity as one team! This is the fifth year in a row this camp has been conducted. The camp lasted eight days, most of which were spent at the congregational facility. The children and youth engaged in various activities and enjoyed cooked meals. On the eighth day they went to a water park on the shores of the Sea of Galilee. The delight of the children and the joy of their parents were very touching. We praise the Lord for His protection. No one was hurt, and all were blessed by these special days together. Each day we started and finished with prayer, and throughout the day the children were taught lessons from the Word of God. Through the children, their parents also learned about Scriptures. We believe the seeds that were sown in the children will sprout and bear good fruit! We have just finished a week that was a little taste of heaven, and a test flight into the joint destiny of Israel and the nations. How could all that happen while conducting a camp for 60 Israeli children? Tents of Mercy Congregation organized and sponsored the logistics for the Israeli end of the camp. As has happened every year for the last eleven years, a large team of selfless volunteer servants of God came from Beltway Park Church in Abilene, TX, to intercede, serve in every way imaginable, and in general help shine God’s love into the hearts of the kids. The children, along with their selfless young Israeli counselors came from our northern Tikkun-related network of congregations. The inspiration for our camp theme came from the almost magical way a hot air balloon rises from the ground and travels through the air, moved along by the wind, something we cannot even see with our physical eyes (bear in mind that in Hebrew the word spirit and the word wind are one and the same – “ruach”). We desire to see our kids lifted off the ground and flying in life – powered by God’s Spirit and not our/their own strength. Hence camp week was a kind of test flight. Of course there was lots of fun and games, and food and water to cool off the hot days. Then came the evening worship and impartation of encouragement to live life in dependence on God’s spirit. We saw a new level of breakthrough in worship, in prayer and in the general atmosphere. Children heard God speak to them, and we could see they were truly touched. And come to think of it we did have a taste of joint destiny in “Acts 2:44 style” community – which also happened 2000 years ago through the power of Gods spirit. In a small way, during a week of kids’ camp, Israel and the Nations were becoming one. This happens in mutual service and in relying on God’s Spirit – the only source that can empower us to live lives of witness.
from collections import defaultdict from numba.core import config class Rewrite(object): '''Defines the abstract base class for Numba rewrites. ''' def __init__(self, state=None): '''Constructor for the Rewrite class. ''' pass def match(self, func_ir, block, typemap, calltypes): '''Overload this method to check an IR block for matching terms in the rewrite. ''' return False def apply(self): '''Overload this method to return a rewritten IR basic block when a match has been found. ''' raise NotImplementedError("Abstract Rewrite.apply() called!") class RewriteRegistry(object): '''Defines a registry for Numba rewrites. ''' _kinds = frozenset(['before-inference', 'after-inference']) def __init__(self): '''Constructor for the rewrite registry. Initializes the rewrites member to an empty list. ''' self.rewrites = defaultdict(list) def register(self, kind): """ Decorator adding a subclass of Rewrite to the registry for the given *kind*. """ if kind not in self._kinds: raise KeyError("invalid kind %r" % (kind,)) def do_register(rewrite_cls): if not issubclass(rewrite_cls, Rewrite): raise TypeError('{0} is not a subclass of Rewrite'.format( rewrite_cls)) self.rewrites[kind].append(rewrite_cls) return rewrite_cls return do_register def apply(self, kind, state): '''Given a pipeline and a dictionary of basic blocks, exhaustively attempt to apply all registered rewrites to all basic blocks. ''' assert kind in self._kinds blocks = state.func_ir.blocks old_blocks = blocks.copy() for rewrite_cls in self.rewrites[kind]: # Exhaustively apply a rewrite until it stops matching. rewrite = rewrite_cls(state) work_list = list(blocks.items()) while work_list: key, block = work_list.pop() matches = rewrite.match(state.func_ir, block, state.typemap, state.calltypes) if matches: if config.DEBUG or config.DUMP_IR: print("_" * 70) print("REWRITING (%s):" % rewrite_cls.__name__) block.dump() print("_" * 60) new_block = rewrite.apply() blocks[key] = new_block work_list.append((key, new_block)) if config.DEBUG or config.DUMP_IR: new_block.dump() print("_" * 70) # If any blocks were changed, perform a sanity check. for key, block in blocks.items(): if block != old_blocks[key]: block.verify() # Some passes, e.g. _inline_const_arraycall are known to occasionally # do invalid things WRT ir.Del, others, e.g. RewriteArrayExprs do valid # things with ir.Del, but the placement is not optimal. The lines below # fix-up the IR so that ref counts are valid and optimally placed, # see #4093 for context. This has to be run here opposed to in # apply() as the CFG needs computing so full IR is needed. from numba.core import postproc post_proc = postproc.PostProcessor(state.func_ir) post_proc.run() rewrite_registry = RewriteRegistry() register_rewrite = rewrite_registry.register
This is an exclusive business profile of Special Purpose Machines located in , Wadhwan. From this Page, you can directly contact Special Purpose Machines from the enquiry form provided on the right. On the left you can get the Verified Mobile Number of Special Purpose Machines – feel free to call us to know more about our products & services. We will soon update our Catalog which you can download to get latest information about all our products & services and latest deals & offers by Special Purpose Machines. Do check out our Job Openings section to know about all the vacancies in Special Purpose Machines. You can also check out our Photo Gallery section to see latest photos of Special Purpose Machines. Before leaving, do not forget to give us your review and rating if you have been a customer of Special Purpose Machines in the past. Do you want to receive special deals and offers from Special Purpose Machines? Daily Download limit reached! Do you want to alert Special Purpose Machines to receive Catalogue?
import datetime import os import unittest import docker import pytest from docker.constants import ( DEFAULT_DOCKER_API_VERSION, DEFAULT_TIMEOUT_SECONDS, DEFAULT_MAX_POOL_SIZE, IS_WINDOWS_PLATFORM ) from docker.utils import kwargs_from_env from . import fake_api try: from unittest import mock except ImportError: import mock TEST_CERT_DIR = os.path.join(os.path.dirname(__file__), 'testdata/certs') POOL_SIZE = 20 class ClientTest(unittest.TestCase): @mock.patch('docker.api.APIClient.events') def test_events(self, mock_func): since = datetime.datetime(2016, 1, 1, 0, 0) mock_func.return_value = fake_api.get_fake_events()[1] client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.events(since=since) == mock_func.return_value mock_func.assert_called_with(since=since) @mock.patch('docker.api.APIClient.info') def test_info(self, mock_func): mock_func.return_value = fake_api.get_fake_info()[1] client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.info() == mock_func.return_value mock_func.assert_called_with() @mock.patch('docker.api.APIClient.ping') def test_ping(self, mock_func): mock_func.return_value = True client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.ping() is True mock_func.assert_called_with() @mock.patch('docker.api.APIClient.version') def test_version(self, mock_func): mock_func.return_value = fake_api.get_fake_version()[1] client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.version() == mock_func.return_value mock_func.assert_called_with() def test_call_api_client_method(self): client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) with pytest.raises(AttributeError) as cm: client.create_container() s = cm.exconly() assert "'DockerClient' object has no attribute 'create_container'" in s assert "this method is now on the object APIClient" in s with pytest.raises(AttributeError) as cm: client.abcdef() s = cm.exconly() assert "'DockerClient' object has no attribute 'abcdef'" in s assert "this method is now on the object APIClient" not in s def test_call_containers(self): client = docker.DockerClient( version=DEFAULT_DOCKER_API_VERSION, **kwargs_from_env()) with pytest.raises(TypeError) as cm: client.containers() s = cm.exconly() assert "'ContainerCollection' object is not callable" in s assert "docker.APIClient" in s @pytest.mark.skipif( IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux' ) @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool") def test_default_pool_size_unix(self, mock_obj): client = docker.DockerClient( version=DEFAULT_DOCKER_API_VERSION ) mock_obj.return_value.urlopen.return_value.status = 200 client.ping() base_url = "{base_url}/v{version}/_ping".format( base_url=client.api.base_url, version=client.api._version ) mock_obj.assert_called_once_with(base_url, "/var/run/docker.sock", 60, maxsize=DEFAULT_MAX_POOL_SIZE ) @pytest.mark.skipif( not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows' ) @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool") def test_default_pool_size_win(self, mock_obj): client = docker.DockerClient( version=DEFAULT_DOCKER_API_VERSION ) mock_obj.return_value.urlopen.return_value.status = 200 client.ping() mock_obj.assert_called_once_with("//./pipe/docker_engine", 60, maxsize=DEFAULT_MAX_POOL_SIZE ) @pytest.mark.skipif( IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux' ) @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool") def test_pool_size_unix(self, mock_obj): client = docker.DockerClient( version=DEFAULT_DOCKER_API_VERSION, max_pool_size=POOL_SIZE ) mock_obj.return_value.urlopen.return_value.status = 200 client.ping() base_url = "{base_url}/v{version}/_ping".format( base_url=client.api.base_url, version=client.api._version ) mock_obj.assert_called_once_with(base_url, "/var/run/docker.sock", 60, maxsize=POOL_SIZE ) @pytest.mark.skipif( not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows' ) @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool") def test_pool_size_win(self, mock_obj): client = docker.DockerClient( version=DEFAULT_DOCKER_API_VERSION, max_pool_size=POOL_SIZE ) mock_obj.return_value.urlopen.return_value.status = 200 client.ping() mock_obj.assert_called_once_with("//./pipe/docker_engine", 60, maxsize=POOL_SIZE ) class FromEnvTest(unittest.TestCase): def setUp(self): self.os_environ = os.environ.copy() def tearDown(self): os.environ = self.os_environ def test_from_env(self): """Test that environment variables are passed through to utils.kwargs_from_env(). KwargsFromEnvTest tests that environment variables are parsed correctly.""" os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376', DOCKER_CERT_PATH=TEST_CERT_DIR, DOCKER_TLS_VERIFY='1') client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.api.base_url == "https://192.168.59.103:2376" def test_from_env_with_version(self): os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376', DOCKER_CERT_PATH=TEST_CERT_DIR, DOCKER_TLS_VERIFY='1') client = docker.from_env(version='2.32') assert client.api.base_url == "https://192.168.59.103:2376" assert client.api._version == '2.32' def test_from_env_without_version_uses_default(self): client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.api._version == DEFAULT_DOCKER_API_VERSION def test_from_env_without_timeout_uses_default(self): client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.api.timeout == DEFAULT_TIMEOUT_SECONDS @pytest.mark.skipif( IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux' ) @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool") def test_default_pool_size_from_env_unix(self, mock_obj): client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) mock_obj.return_value.urlopen.return_value.status = 200 client.ping() base_url = "{base_url}/v{version}/_ping".format( base_url=client.api.base_url, version=client.api._version ) mock_obj.assert_called_once_with(base_url, "/var/run/docker.sock", 60, maxsize=DEFAULT_MAX_POOL_SIZE ) @pytest.mark.skipif( not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows' ) @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool") def test_default_pool_size_from_env_win(self, mock_obj): client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) mock_obj.return_value.urlopen.return_value.status = 200 client.ping() mock_obj.assert_called_once_with("//./pipe/docker_engine", 60, maxsize=DEFAULT_MAX_POOL_SIZE ) @pytest.mark.skipif( IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux' ) @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool") def test_pool_size_from_env_unix(self, mock_obj): client = docker.from_env( version=DEFAULT_DOCKER_API_VERSION, max_pool_size=POOL_SIZE ) mock_obj.return_value.urlopen.return_value.status = 200 client.ping() base_url = "{base_url}/v{version}/_ping".format( base_url=client.api.base_url, version=client.api._version ) mock_obj.assert_called_once_with(base_url, "/var/run/docker.sock", 60, maxsize=POOL_SIZE ) @pytest.mark.skipif( not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows' ) @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool") def test_pool_size_from_env_win(self, mock_obj): client = docker.from_env( version=DEFAULT_DOCKER_API_VERSION, max_pool_size=POOL_SIZE ) mock_obj.return_value.urlopen.return_value.status = 200 client.ping() mock_obj.assert_called_once_with("//./pipe/docker_engine", 60, maxsize=POOL_SIZE )
RUSSELL HOTEL has immediate employment opportunities. R.H intends to invite experienced individuals/expatria tes or Consultancy firm capable of rendering expertise services in various fields of Business Management, M.B.A (Finance), Administrative and Commissioning, M.H has immediate employment/Vacancies in following Designations. Accountant, Duty Manager, Front Manager, Sales Manager, Receptionists, Drivers, Bar boy, Cook, Food & Beverage Cost Controller/Manager, Customer Manager, Computer Applicant, Software/Hardware Engineers, Elect. Engineer, Auto Engineer. E.t.c. Experienced is preferred, but we're willing to train an eager individual who wants to learn all aspects of pastry.
import sys from math import sqrt, ceil import logging logger = logging.getLogger(__name__) class NoNegativeTestException(Exception): pass def ochiai(executed_passing, executed_failing, total_passing, total_failing): if not total_failing > 0: raise NoNegativeTestException() if executed_failing + executed_passing == 0: return 0 return executed_failing / sqrt(total_failing * (executed_passing + executed_failing)) def jaccard(executed_passing, executed_failing, total_passing, total_failing): if not total_failing > 0: raise NoNegativeTestException() return executed_failing / (total_failing + executed_passing) def tarantula(executed_passing, executed_failing, total_passing, total_failing): if not total_failing > 0: raise NoNegativeTestException() if executed_failing + executed_passing == 0: return 0 return ((executed_failing / total_failing) / ((executed_failing / total_failing) + (executed_passing / total_passing))) class Localizer: def __init__(self, config, lines): self.lines = lines self.config = config def __call__(self, test_suite, all_positive, all_negative): ''' test_suite: tests under consideration all_positive, all_negative: (test * trace) list trace: expression list computes config['suspicious']/config['group_size'] groups each consisting of config['group_size'] suspicious expressions ''' group_size = self.config['group_size'] suspicious = self.config['suspicious'] if self.config['localization'] == 'ochiai': formula = ochiai elif self.config['localization'] == 'jaccard': formula = jaccard elif self.config['localization'] == 'tarantula': formula = tarantula # first, remove irrelevant information: positive = [] negative = [] if not self.config['invalid_localization']: for test, trace in all_positive: if test in test_suite: positive.append((test, trace)) for test, trace in all_negative: if test in test_suite: negative.append((test, trace)) else: positive = all_positive negative = all_negative all = set() for _, trace in positive: all |= set(trace) for _, trace in negative: all |= set(trace) # update suspcious if self.config['localize_only']: suspicious = len(all) logger.info('trace size: {}'.format(suspicious)) executed_positive = dict() executed_negative = dict() for e in all: executed_positive[e] = 0 executed_negative[e] = 0 for _, trace in positive: executed = set(trace) for e in executed: executed_positive[e] += 1 for _, trace in negative: executed = set(trace) for e in executed: executed_negative[e] += 1 with_score = [] def is_selected(expr): return expr[0] in self.lines if self.lines is not None: filtered = filter(is_selected, all) all = list(filtered) for e in all: try: score = formula(executed_positive[e], executed_negative[e], len(positive), len(negative)) if not (score == 0.0): # 0.0 mean not executed by failing test with_score.append((e, score)) except NoNegativeTestException: logger.info("No negative test exists") exit(0) ranking = sorted(with_score, key=lambda r: r[1], reverse=True) if self.config['group_by_score']: top = ranking[:suspicious] else: if self.config['localize_from_bottom']: top = sorted(ranking[:suspicious], key=lambda r: r[0][0], reverse=True) # sort by location backward else: top = sorted(ranking[:suspicious], key=lambda r: r[0][0]) # sort by location groups_with_score = [] for i in range(0, ceil(suspicious / group_size)): if len(top) == 0: break group = [] total_score = 0 for j in range(0, group_size): if len(top) == 0: break expr, score = top.pop(0) total_score += score group.append(expr) groups_with_score.append((group, total_score)) sorted_groups = sorted(groups_with_score, key=lambda r: r[1], reverse=True) groups = [] for (group, score) in sorted_groups: groups.append(group) logger.info("selected expressions {} with group score {:.5} ".format(group, score)) return groups
Let the vivacious diva in you get a new breath of life. with this pair of belly shoes by Glameous. Crafted using Polyvinyl Chloride, the sole will keep your feet at comfort all day long. Team perfectly with a midi skirt and a floral crop top for an ultimate look.
# Copyright 2016 Paul Balanca. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """test code to make sure the preprocessing works all right""" import numpy as np import tensorflow as tf from datasets import dataset_factory from preprocessing import ssd_vgg_preprocessing from tf_extended import seglink as tfe_seglink import util slim = tf.contrib.slim # =========================================================================== # # I/O and preprocessing Flags. # =========================================================================== # tf.app.flags.DEFINE_integer( 'num_readers', 8, 'The number of parallel readers that read data from the dataset.') tf.app.flags.DEFINE_integer( 'num_preprocessing_threads', 4, 'The number of threads used to create the batches.') # =========================================================================== # # Dataset Flags. # =========================================================================== # tf.app.flags.DEFINE_string( 'dataset_name', 'synthtext', 'The name of the dataset to load.') tf.app.flags.DEFINE_string( 'dataset_split_name', 'train', 'The name of the train/test split.') tf.app.flags.DEFINE_string( 'dataset_dir', '~/dataset/SSD-tf/SynthText', 'The directory where the dataset files are stored.') tf.app.flags.DEFINE_string( 'model_name', 'ssd_vgg', 'The name of the architecture to train.') tf.app.flags.DEFINE_integer( 'batch_size', 2, 'The number of samples in each batch.') tf.app.flags.DEFINE_integer( 'train_image_size', 512, 'Train image size') tf.app.flags.DEFINE_integer('max_number_of_steps', None, 'The maximum number of training steps.') FLAGS = tf.app.flags.FLAGS # =========================================================================== # # Main training routine. # =========================================================================== # def main(_): if not FLAGS.dataset_dir: raise ValueError('You must supply the dataset directory with --dataset_dir') tf.logging.set_verbosity(tf.logging.DEBUG) batch_size = FLAGS.batch_size; with tf.Graph().as_default(): # Select the dataset. dataset = dataset_factory.get_dataset( FLAGS.dataset_name, FLAGS.dataset_split_name, FLAGS.dataset_dir) util.proc.set_proc_name(FLAGS.model_name + '_' + FLAGS.dataset_name) # =================================================================== # # Create a dataset provider and batches. # =================================================================== # with tf.device('/cpu:0'): with tf.name_scope(FLAGS.dataset_name + '_data_provider'): provider = slim.dataset_data_provider.DatasetDataProvider( dataset, num_readers=FLAGS.num_readers, common_queue_capacity=20 * batch_size, common_queue_min=10 * batch_size, shuffle=True) # Get for SSD network: image, labels, bboxes. [image, shape, gignored, gbboxes, x1, x2, x3, x4, y1, y2, y3, y4] = provider.get(['image', 'shape', 'object/ignored', 'object/bbox', 'object/oriented_bbox/x1', 'object/oriented_bbox/x2', 'object/oriented_bbox/x3', 'object/oriented_bbox/x4', 'object/oriented_bbox/y1', 'object/oriented_bbox/y2', 'object/oriented_bbox/y3', 'object/oriented_bbox/y4' ]) gxs = tf.transpose(tf.stack([x1, x2, x3, x4])) #shape = (N, 4) gys = tf.transpose(tf.stack([y1, y2, y3, y4])) image = tf.identity(image, 'input_image') # Pre-processing image, labels and bboxes. image_shape = (FLAGS.train_image_size, FLAGS.train_image_size) image, gignored, gbboxes, gxs, gys = \ ssd_vgg_preprocessing.preprocess_image(image, gignored, gbboxes, gxs, gys, out_shape=image_shape, is_training = True) gxs = gxs * tf.cast(image_shape[1], gxs.dtype) gys = gys * tf.cast(image_shape[0], gys.dtype) gorbboxes = tfe_seglink.tf_min_area_rect(gxs, gys) image = tf.identity(image, 'processed_image') with tf.Session() as sess: coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=sess, coord=coord) i = 0 while i < 2: i += 1 image_data, label_data, bbox_data, xs_data, ys_data, orbboxes = \ sess.run([image, gignored, gbboxes, gxs, gys, gorbboxes]) image_data = image_data + [123., 117., 104.] image_data = np.asarray(image_data, np.uint8) h, w = image_data.shape[0:-1] bbox_data = bbox_data * [h, w, h, w] I_bbox = image_data.copy() I_xys = image_data.copy() I_orbbox = image_data.copy() for idx in range(bbox_data.shape[0]): def draw_bbox(): y1, x1, y2, x2 = bbox_data[idx, :] util.img.rectangle(I_bbox, (x1, y1), (x2, y2), color = util.img.COLOR_WHITE) def draw_xys(): points = zip(xs_data[idx, :], ys_data[idx, :]) cnts = util.img.points_to_contours(points); util.img.draw_contours(I_xys, cnts, -1, color = util.img.COLOR_GREEN) def draw_orbbox(): orbox = orbboxes[idx, :] import cv2 rect = ((orbox[0], orbox[1]), (orbox[2], orbox[3]), orbox[4]) box = cv2.cv.BoxPoints(rect) box = np.int0(box) cv2.drawContours(I_orbbox, [box], 0, util.img.COLOR_RGB_RED, 1) draw_bbox() draw_xys(); draw_orbbox(); print util.sit(I_bbox) print util.sit(I_xys) print util.sit(I_orbbox) print 'check the images and make sure that bboxes in difference colors are the same.' coord.request_stop() coord.join(threads) if __name__ == '__main__': tf.app.run()
Tired of bad audio on your smartphone videos? Weak lighting, and boring shots? The SmartCine is an all-in-one kit for adding video production superpowers to any smartphone. It’s incredibly compact, effective, and inexpensive.
import uuid from django.db import models from django.template import Context, Template from django.urls import reverse from django.utils.translation import ugettext_lazy as _ from jsonfield import JSONField from lbutils import create_instance, get_or_none from lbworkflow import settings from lbworkflow.core.helper import safe_eval class ProcessCategory(models.Model): uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False) name = models.CharField(_("Name"), max_length=255, db_index=True) oid = models.IntegerField(_("Order"), default=999) is_active = models.BooleanField(_("Is active"), default=True) class Meta: ordering = ["oid"] def __str__(self): return self.name def natural_key(self): return (self.uuid,) def get_can_apply_processes(self, user, force_fetch=False): processes = getattr(self, "__cache__can_apply_processes", None) if processes and not force_fetch: return processes return self.process_set.all() def get_report_links(self): return ProcessReportLink.objects.filter(category=self) def get_all_process(self): return self.process_set.order_by("oid") class ProcessReportLink(models.Model): uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False) category = models.ForeignKey( ProcessCategory, blank=True, null=True, on_delete=models.SET_NULL, verbose_name="Category", ) name = models.CharField("Name", max_length=255) url = models.CharField("URL", max_length=255) open_in_new_window = models.BooleanField( "Open in new window", default=False ) perm = models.CharField( "Permission", max_length=255, blank=True, help_text="Permission to view this report", ) oid = models.IntegerField("Order", default=999) is_active = models.BooleanField("Is active", default=True) class Meta: ordering = ["oid"] def __str__(self): return self.name def get_url(self): try: url_component_list = self.url.split(" ") return reverse(url_component_list[0], args=url_component_list[1:]) except Exception: return self.url def natural_key(self): return (self.uuid,) class Process(models.Model): """ A process holds the map that describes the flow of work. """ code = models.CharField( "Code", max_length=100, unique=True, help_text="A unique code to identify process", ) prefix = models.CharField( "Prefix", max_length=8, default="", blank=True, help_text="prefix for process NO.", ) name = models.CharField( "Name", max_length=255, help_text="Name for this process" ) category = models.ForeignKey( ProcessCategory, blank=True, null=True, on_delete=models.SET_NULL, verbose_name="Category", ) note = models.TextField("Note", blank=True) oid = models.IntegerField("Order", default=999) is_active = models.BooleanField("Is active", default=True) ext_data = JSONField(default="{}") class Meta: verbose_name = "Process" ordering = ["oid"] permissions = (("sft_mgr_process", "workflow - Config"),) def natural_key(self): return (self.code,) def __str__(self): return self.name def get_draft_active(self): return get_or_none(Node, process=self, status="draft") def get_rejected_active(self): return get_or_none(Node, process=self, status="rejected") def get_given_up_active(self): return get_or_none(Node, process=self, status="given up") def get_rollback_transition(self, in_node, out_node): transition = Transition( name="Rollback", code="rollback", process=self, is_agree=False, can_auto_agree=False, input_node=in_node, output_node=out_node, ) return transition def get_give_up_transition(self, in_node): output = self.get_given_up_active() transition = Transition( name="Give up", code="give up", process=self, is_agree=False, can_auto_agree=False, input_node=in_node, output_node=output, ) return transition def get_back_to_transition(self, in_node, out_node=None): transition = Transition( name="Back to", code="back to", process=self, is_agree=False, can_auto_agree=False, input_node=in_node, output_node=out_node, ) return transition def get_reject_transition(self, in_node): transition = Transition( name="Reject", code="reject", process=self, is_agree=False, can_auto_agree=False, input_node=in_node, output_node=self.get_rejected_active(), ) return transition def get_add_assignee_transition(self, in_node): transition = Transition( name="Add assignee", code="add assignee", process=self, is_agree=False, can_auto_agree=False, input_node=in_node, output_node=in_node, ) return transition class Node(models.Model): """ Node is the states of an instance. """ STATUS_CHOICES = ( ("draft", "Draft"), ("given up", "Given up"), ("rejected", "Rejected"), ("in progress", "In Progress"), ("completed", "Completed"), ) AUDIT_PAGE_TYPE_CHOICES = ( ("view", "view"), ("edit", "Edit"), ) TYPE_CHOICES = ( ("node", "Node"), ("router", "Router"), ) uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False) process = models.ForeignKey( "Process", on_delete=models.CASCADE, verbose_name="Process" ) name = models.CharField("Name", max_length=255) code = models.CharField("Code", max_length=255, blank=True) step = models.IntegerField( "Step", default=0, help_text="", ) status = models.CharField( "Status", max_length=16, default="in progress", choices=STATUS_CHOICES ) audit_page_type = models.CharField( "Audit page type", max_length=64, choices=AUDIT_PAGE_TYPE_CHOICES, help_text="If this node can edit, will auto goto edit mode when audit.", default="view", ) node_type = models.CharField( "Status", max_length=16, default="node", choices=TYPE_CHOICES ) can_edit = models.BooleanField("Can edit", default=False) can_reject = models.BooleanField("Can reject", default=True) can_give_up = models.BooleanField("Can give up", default=True) operators = models.TextField("Audit users", blank=True) notice_users = models.TextField("Notice users", blank=True) share_users = models.TextField("Share users", blank=True) note = models.TextField("Note", blank=True) # if not audit after xx day send a remind is_active = models.BooleanField("Is active", default=True) ext_data = JSONField(default="{}") def __str__(self): return self.name def natural_key(self): return (self.uuid,) def is_submitted(self): return self.status in ["in progress", "completed"] def get_operators(self, owner, operator, instance=None): return create_instance( settings.USER_PARSER, self.operators, instance, operator, owner ).parse() def get_notice_users(self, owner, operator, instance=None): return create_instance( settings.USER_PARSER, self.notice_users, instance, operator, owner ).parse() def get_share_users(self, owner, operator, instance=None): return create_instance( settings.USER_PARSER, self.share_users, instance, operator, owner ).parse() def get_users(self, owner, operator, instance=None): operators = self.get_operators(owner, operator, instance) notice_users = self.get_notice_users(owner, operator, instance) share_users = self.get_share_users(owner, operator, instance) return operators, notice_users, share_users class Transition(models.Model): """ A Transition connects two node: a From and a To activity. """ ROUTING_RULE_CHOICES = ( ("split", "split"), ("joint", "Joint"), ) uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False) process = models.ForeignKey( "Process", on_delete=models.CASCADE, verbose_name="Process" ) name = models.CharField( "Name", max_length=100, default="Agree", help_text="It also the action's name, like: Agree/Submit", ) code = models.CharField( # 'back to', 'rollback' "Code", max_length=100, blank=True, ) is_agree = models.BooleanField( "Is agree", default=True, help_text="User only need agree one time in one workflow", ) can_auto_agree = models.BooleanField( "If can auto agree", default=True, help_text="If user agreed in previous steps will auto agree", ) routing_rule = models.CharField( "Routing rule", max_length=16, default="split", choices=ROUTING_RULE_CHOICES, help_text="joint: do transition after all work item finished. joint: do transition immediately", ) input_node = models.ForeignKey( Node, verbose_name="Input node", null=True, on_delete=models.SET_NULL, related_name="input_transitions", help_text="", ) output_node = models.ForeignKey( Node, verbose_name="Output node", null=True, on_delete=models.SET_NULL, related_name="output_transitions", help_text="", ) app = models.ForeignKey( "App", verbose_name="Application to perform", null=True, on_delete=models.SET_NULL, blank=True, ) app_param = models.CharField( max_length=100, verbose_name="Param for application", null=True, blank=True, help_text="Depend on App config", ) condition = models.TextField( "Condition", blank=True, help_text="Uses the Python syntax.ex: `o.leave_days>3`", ) # eval(t.condition, {'o': obj}) note = models.TextField("Note", blank=True) oid = models.IntegerField("Order", default=999) is_active = models.BooleanField("Is active", default=True) ext_data = JSONField(default="{}") def __str__(self): return "%s - %s" % (self.process.name, self.name) def natural_key(self): return (self.uuid,) def is_match_condition(self, wf_obj): if not self.condition.strip(): return True return safe_eval(self.condition, {"o": wf_obj}) def get_condition_descn(self): return self.condition.split("#")[-1].strip() def as_simple_agree_transition(self): self.pk = None if not self.code: self.code = "agree" def get_app_url(self, task): return self.app.get_url(task, self) class App(models.Model): """ An application is a python view that can be called by URL. """ APP_TYPE_CHOICES = (("url", "URL"),) uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False) name = models.CharField(max_length=100, help_text="Name") app_type = models.CharField( "Type", max_length=255, choices=APP_TYPE_CHOICES, default="url" ) action = models.CharField( max_length=255, blank=True, help_text="URL: It can be url or django's url name. If it's blank will use transition's app param", ) note = models.TextField(blank=False) def __str__(self): return self.name def natural_key(self): return (self.uuid,) def get_url(self, task, transition): def render(templ_str, ctx): return Template(templ_str).render(Context(ctx)) ts_id = transition.pk or transition.code ctx = { "wi": task, "wf_code": transition.process.code, "ts": transition, "ts_id": ts_id, "in": task.instance, "o": task.instance.content_object, } url = "wf_process" if self.action: url = self.action elif transition.app_param: url = transition.app_param try: url_component_list = url.split(" ") url_param = [render(e, ctx) for e in url_component_list[1:]] url = reverse(url_component_list[0], args=url_param) except Exception: pass if "?" not in url: url = "%s?a=" % url url = "%s&ts_id={{ts_id}}&wi_id={{wi.id}}" % url return render(url, ctx)
Hi crafty peeps! I am back today to share one last Valentine's Day card in honor of the day today. I added a beautiful band and bow made from ribbon that can be found at Really Reasonable Ribbon. I used several shades of mauve and red along with Bronze shimmer watercolors from my Gansai Tambi watercolor set to create an artful background. I then die cut a heart from 40lb velum and heat embossed my sentiment and hearts onto it with bright white embossing powder by WOW. I then attached the velum heart to the watercolor panel. I mounted the watercolor panel to an A2 white base that has 7/8" White Satin with Grosgrain Edge Ribbon wrapped around it. I also wrapped Washi tape from my collection around the ribbon as well. I created a double looped bow with the same 7/8" White Satin with Grosgrain Edge Ribbon and adhered that to the panel. As a final touch I added some gold glitter enamel dots and clear rhinestones. The whole thing was then foam mounted to an A2 card base. I hope you enjoy my card and have a wonderful Valentine's Day! Awe so sweet and charming, beautiful design and fab colors. Thanks for visiting and leaving the sweet luv. I am 31 years old, and a loving wife to a wonderful husband. I am a mother to 3 crazy cats (Jasmine, Basil, and Felix). I am an avid scrapbooker, stamper, and overall craft enthusiast. I love to create smiles on loved ones faces with my handcrated creations. I am Standard Copic Certified! Come join our group of talented crafters every quarter for an inspiration filled blog hop! 12 inspirational full color tutorials delivered to your inbox monthly! I was featured on All That Scraps!
from django.conf import settings from django.db import connections from django.http import HttpResponse from rest_framework import permissions, schemas from rest_framework.permissions import AllowAny from rest_framework.renderers import JSONRenderer from rest_framework.response import Response from rest_framework.views import APIView from rest_framework_swagger.renderers import OpenAPIRenderer, SwaggerUIRenderer def handle_internal_server_error(_request): """Notify the client that an error occurred processing the request without providing any detail.""" return _handle_error(500) def handle_missing_resource_error(_request, exception=None): # pylint: disable=unused-argument """Notify the client that the requested resource could not be found.""" return _handle_error(404) def _handle_error(status_code): info = { 'status': status_code } renderer = JSONRenderer() content_type = '{media}; charset={charset}'.format(media=renderer.media_type, charset=renderer.charset) return HttpResponse(renderer.render(info), content_type=content_type, status=status_code) class SwaggerSchemaView(APIView): """ Renders the swagger schema for the documentation regardless of permissions. """ permission_classes = [AllowAny] renderer_classes = [ OpenAPIRenderer, SwaggerUIRenderer ] def get(self, _request): generator = schemas.SchemaGenerator(title='Analytics API') return Response(generator.get_schema()) class StatusView(APIView): """ Simple check to determine if the server is alive Return no data, a simple 200 OK status code is sufficient to indicate that the server is alive. This endpoint is public and does not require an authentication token to access it. """ permission_classes = (permissions.AllowAny,) def get(self, request, *args, **kwargs): # pylint: disable=unused-argument return Response({}) class AuthenticationTestView(APIView): """ Verifies that the client is authenticated Returns HTTP 200 if client is authenticated, HTTP 401 if not authenticated """ def get(self, request, *args, **kwargs): # pylint: disable=unused-argument return Response({}) class HealthView(APIView): """ A more comprehensive check to see if the system is fully operational. This endpoint is public and does not require an authentication token to access it. The returned structure contains the following fields: - overall_status: Can be either "OK" or "UNAVAILABLE". - detailed_status: More detailed information about the status of the system. - database_connection: Status of the database connection. Can be either "OK" or "UNAVAILABLE". """ permission_classes = (permissions.AllowAny,) def get(self, request, *args, **kwargs): # pylint: disable=unused-argument OK = 'OK' UNAVAILABLE = 'UNAVAILABLE' overall_status = UNAVAILABLE db_conn_status = UNAVAILABLE try: connection_name = getattr(settings, 'ANALYTICS_DATABASE', 'default') cursor = connections[connection_name].cursor() try: cursor.execute("SELECT 1") cursor.fetchone() overall_status = OK db_conn_status = OK finally: cursor.close() except Exception: # pylint: disable=broad-except pass response = { "overall_status": overall_status, "detailed_status": { 'database_connection': db_conn_status } } return Response(response, status=200 if overall_status == OK else 503)
Not enough senior- and middle-management executives in the St. Louis area possess fully up-to-date leadership skills – and without those skills, their companies won’t be able to take fullest advantage of the opportunities the surging business environment affords. Leadership USA(TM) St. Louis is a newly formed regional organization that aims to solve this dilemma by bringing nationally recognized leadership gurus to local business executives. St. Louis, MO, November 13, 2017 --(PR.com)-- Leadership USA(TM) St. Louis said today that the national economy appears ready to roar, but some St. Louis-area CEOs haven’t yet popped the champagne corks in celebration. Reason: according Leadership USA St. Louis, these CEOs fret that not enough of their senior- and middle-management executives possess fully up-to-date leadership skills – and without those skills, their companies won’t be able to take fullest advantage of the opportunities the surging business environment affords. Leadership USA St. Louis is a newly formed regional organization that aims to solve this dilemma by bringing nationally recognized leadership gurus to local business executives. “We’re dedicated to helping St. Louis companies develop strategic decision-makers who can create cutting-edge business practices,” says Kathy Cooperman, regional director of Leadership USA St. Louis. “If you are responsible for leadership development for your company and want easy access to the latest and best insights for more efficiently attracting or creating outstanding decision-makers, then Leadership USA St. Louis is the group for you,” she added. Cooperman said the group debuted last month, hosting a kickoff breakfast attended by representatives of a number of key local companies. She indicated Leadership USA St. Louis will soon begin conducting monthly full-day leadership seminars featuring notable professional presenters with deep leadership expertise. The dates and locations of these seminars will be announced in coming weeks. “The presenters we’ve already scheduled will talk on a variety of leadership topics, but they all have an incredible ability to cultivate the best in the people who’ll be participating,” Cooperman assured. In addition to the monthly seminars, Leadership USA St. Louis will offer quarterly webinars, an online learning portal, a women-in-leadership program, and other valuable benefits of membership. Leadership USA St. Louis is a region of Leadership USA(TM), a national membership-based leadership education organization offering high-quality development seminars for member companies via live, monthly events featuring world-class instructors with expertise on diverse topics. The offerings of Leadership USA St. Louis are designed to supplement member companies’ established leadership programs and provide to an outsourced leadership development resource for small-to-medium-sized companies.
import os import numpy as np from skimage import morphology from plantcv.plantcv import params from plantcv.plantcv import print_image from plantcv.plantcv import plot_image from plantcv.plantcv import fatal_error def closing(gray_img, kernel=None): """Wrapper for scikit-image closing functions. Opening can remove small dark spots (i.e. pepper). Inputs: gray_img = input image (grayscale or binary) kernel = optional neighborhood, expressed as an array of 1s and 0s. If None, use cross-shaped structuring element. :param gray_img: ndarray :param kernel = ndarray :return filtered_img: ndarray """ params.device += 1 # Make sure the image is binary/grayscale if len(np.shape(gray_img)) != 2: fatal_error("Input image must be grayscale or binary") # If image is binary use the faster method if len(np.unique(gray_img)) == 2: bool_img = morphology.binary_closing(image=gray_img, selem=kernel) filtered_img = np.copy(bool_img.astype(np.uint8) * 255) # Otherwise use method appropriate for grayscale images else: filtered_img = morphology.closing(gray_img, kernel) if params.debug == 'print': print_image(filtered_img, os.path.join(params.debug_outdir, str(params.device) + '_opening' + '.png')) elif params.debug == 'plot': plot_image(filtered_img, cmap='gray') return filtered_img
The more this guy is around, the more I like him. Part of the reason I left the Catholic church was the blatant hypocracy of the prior popes. So far so good Francis. Rainmaker never understand why anyone leaves the One Holy Catholic and Apostolic Church. It's like quitting the New York Yankees to go play for the Witchita Wingnuts or Beer league Softball. NomSayin?
from scrapy.spider import Spider from scrapy.selector import Selector from scrapy.http import Request from admision.items import IngresanteItem class IngresantesSpider(Spider): name = "ingresantes" allowed_domains = ["admision.uni.edu.pe"] start_urls = [ "http://www.admision.uni.edu.pe/resultado_adm.php" ] def parse(self, response): base_url = response.url paginas = range(1,271) for pagina in paginas: url = '%s?pagina=%s' % (base_url, pagina) yield Request(url, callback=self.parse_page) def parse_page(self, response): sel = Selector(response) codigos = sel.xpath('/html/body/table/tr/td[2]/div/table/tr[2]/td/table/tr/td/table/tr/td/table[3]/tr[position()>1]') items = [] for codigo in codigos: item = IngresanteItem() item['codigo'] = codigo.xpath('td[2]/text()').extract() item['nombres'] = codigo.xpath('td[3]/text()').extract() item['p1'] = codigo.xpath('td[4]/text()').extract() item['p2'] = codigo.xpath('td[5]/text()').extract() item['p3'] = codigo.xpath('td[6]/text()').extract() item['acumulado'] = codigo.xpath('td[7]/text()').extract() item['vocacional'] = codigo.xpath('td[8]/text()').extract() item['cne'] = codigo.xpath('td[9]/text()').extract() item['arq'] = codigo.xpath('td[10]/text()').extract() item['final'] = codigo.xpath('td[11]/text()').extract() item['ingreso'] = codigo.xpath('td[12]/text()').extract() item['merito_modalidad'] = codigo.xpath('td[13]/text()').extract() item['modalidad_ingreso'] = codigo.xpath('td[14]/text()').extract() item['especialidad_ingreso'] = codigo.xpath('td[15]/text()').extract() item['obs'] = codigo.xpath('td[16]/text()').extract() items.append(item) return items
Applicant must be a rising senior student majoring in a health-related profession. Minimum 3.0 GPA required. Selection based upon academic honors, leadership positions, community service, and work experience during college.
#!flask/bin/python from flask import Blueprint, jsonify, abort, request, make_response from Authentication.Authentication import * from util import * api_stocks = Blueprint('api_stocks', __name__) @api_stocks.route("/stocks", methods=['GET']) @auth.login_required def get_stocks(): documentStocks = db.stocks stocks = [] for stock in documentStocks.find({}): stock.pop("_id") stocks.append(stock) return jsonify({'stocks': [make_public_stock(s) for s in stocks]}) @api_stocks.route('/stocks/<stock_id>', methods=['GET']) @auth.login_required def get_stock(stock_id): documentStocks = db.stocks stock = documentStocks.find_one({'stock':stock_id}) if not stock: abort(404) stock.pop("_id") return jsonify({'stocks':[make_public_stock(stock)]}) @api_stocks.route('/stocks', methods=['PUT']) @auth.login_required def create_or_update_stock(): if not request.json: abort(400) stocksSize = len(request.json) documentStocks = db.stocks #matchedCount = 0 #modifiedCount = 0 for stock in request.json: #result = documentStocks.replace_one({"$and":[{"market": {'$eq':stock['market']}},{"stock": {'$eq':stock['stock']}}]}, stock, True) #matchedCount = matchedCount + result.matched_count #modifiedCount = modifiedCount + result.modified_count result = documentStocks.find_and_modify(query={"$and":[{"market": stock['market']},{"stock": stock['stock']}]}, update=stock, new=True, upsert=True) #return make_response(jsonify({'stocks':[{ "stocks_inserted" : stocksSize - modifiedCount},{ "stocks_modified" : modifiedCount}]}), 200) return make_response(jsonify({'stocks':[{ "result" : "ok"}]}), 200)
Ms. Nin Desai, CEO, is an experienced fund manager and a technology geek. At Alpha Capital Partners (or Alpha), Ms. Desai was responsible for identification and evaluation of potential investments across a range of industries. At Alpha, she successfully completed the sale of Sircon (portfolio company), a software as a service provider company, to Vertafore. Prior to Alpha she was with Pacific Crest Securities (or PCS), a technology investment banking firm. Her experience spans all facets of mergers and acquisitions, and corporate finance including public offerings and private placements. Her PCS corporate finance transactions include RACK, LOOP, LQDT, DBTK, AMIS, SLRY, VOCS, OWW and others. Her M&A deal sheet includes the sale of Financial Profiles to EISI, Buyseasons to Liberty Media, and others. She started her professional career at Merrill Lynch. She is a Microsoft Certified Systems Engineer (MCSE) and has a technical diploma in E-commerce by IBM, holds Series 7 and 63 licenses from NASD, and is an B.B.A and M.B.A in Finance/International Business from Loyola University of Chicago. She recently attended a leadership program in Private Equity and Venture Capital at Harvard Business School. Ms. Desai also Chairs the Harvard Business School Private Equity and Venture Capital Group of Chicago, and is a member of Women's Association of Venture and Equity (WAVE). She also sits on the Illinois Venture Capital Association (IVCA) Legislative/IVCA PAC Board and Events Committee. She also enjoys golf and piloting light aircraft in her spare time. Nin Desai has not listed any work experience.
# Calliope # Copyright (C) 2018 Sam Thursfield <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import logging import sys import calliope log = logging.getLogger(__name__) def diff(playlist1, playlist2): items1 = {item.id(): item for item in playlist1} items2 = {item.id(): item for item in playlist2} ids1 = set(items1.keys()) ids2 = set(items2.keys()) diff = sorted(ids1.difference(ids2)) diff_items = [items1[i] for i in diff] return diff_items
At VaporFi, we love menthol flavored e-liquids. They offer a taste so cool, it’s like you’re breathing ice. One hit of these vape juices has your mouth feeling minty fresh right away. These products are clean, just like menthol is supposed to be, and at VaporFi, we offer a wide variety of menthol e-juice to choose from. These e-liquids are crisp and loaded with flavor. You can enjoy them as they are or have them blended to their personal own specifications. Menthol cigarette smokers will particularly enjoy our menthol juice, as they remind them of their favorite smokes! From just slightly minty to 'pure menthol' these vape juices give users a plethora of options.
# Add the upper directory (where the nodebox module is) to the search path. import os, sys; sys.path.insert(0, os.path.join("..","..")) from nodebox.graphics import * # Blend modes are used to combine the pixels of two images, # in different ways than standard transparency. # NodeBox supports the most common blend modes as filters: # add(), subtract(), darken(), lighten(), multiply(), screen(), overlay(), hue(). # These can be used to adjust the lighting in an image # (by blending it with a copy of itself), # or to obtain many creative texturing effects. img1 = Image("creature.png") img2 = Image("creature.png") def draw(canvas): canvas.clear() # Press the mouse to compare the blend to normal ("source over") mode: if not canvas.mouse.pressed: image( # Try changing this to another blend filter: multiply(img1, img2, # All blend modes (and mask()) have optional dx and dy parameters # that define the offset of the blend layer. dx = canvas.mouse.x - img1.width/2, dy = canvas.mouse.y - img1.height/2)) else: image(img1) image(img2, x = canvas.mouse.x - img1.width/2, y = canvas.mouse.y - img1.height/2) # Start the application: canvas.fps = 30 canvas.size = 500, 500 canvas.run(draw)
Water divining is an ancient and often mysterious method used for thousands of years to locate water underground. For eons, gifted 'diviners' have been used to assist land owners more effectively maximise their property's water resources. Click here to read more about this fascinating topic by 'diviner' David Byrne. You can also read articles about David by going to the Media Page. As the driest continent on the planet, much of Australia relies on bore water. However, all bore water situations are unique to their environment and its source. In a natural environment the earth is the best filter of all for water, yet it cannot filter out contaminants from a highly contaminated environment, whether those contaminants are caused by natural conditions or man made causes. Depending on the environmental differences, bore water qualities can vary dramatically from one bore to the next, so attempting to offer bore water as a blanket national or indeed, a universal sustainable water solution would be folly. Aquifers or aquifer flow channels (underground water channels or streams) are what a bore taps into. The flow of these aquifers can change or stop altogether particularly in drying climatic conditions. Some of these flow lines have multi-year cycles that can be as long as 50/100 years. Many Australian communities have recently become very concerned as their water supplies are drying up from such sources. . Also, many bores have been sunk that have not returned water, whilst others have returned water that is not drinkable and some that did return water dried up at a later time. While some bore water can be of very high quality much of it is not, and some of it that is consumed as town water in Australia is 'sick water'. For example, most bore water I have come across whilst travelling in Central Australia I do not regard as drinkable. Yet many Australians use Bore Water as their only source of water, while some combine Rain Water for human consumption and Bore Water for everything else. It is quite possible to change unhealthy sick water into valuable high energy and healing water. However this cannot be done as is presently the case, by the addition of all kinds of harmful chemicals that 'authorities' add to water for so-called 'public health'. Water has memory and its memory can be changed by what we do to and with it. As the work of Dr. Masaru Emoto has demonstrated, it can be transformed from something stale and sick into water that has high vibrational resonance. Esoteric knowledge and science is beginning to emerge that show us numerous ways in which to do this. Water can also be used in conjunction with our intent to create self-healing from serious disease processes. Here as Eslin we have begun to explore WATER at the metaphysical level and hope to have more to offer on this subject in due course. This vast underground aquifer which covers much of northern and central Australia might be likened to an underground sea. It may contain fresh water that may have fallen as rain from a time long before the dinosaurs. Access to this water is usually at a much deeper level than that from which most bore water is extracted. The artesian basin, which was once enormous, has lost some of its water through decades of misuse. There has been concern for many years now that continued removal of water from the basin could have a catastrophic effect in the form of massive land subsidence to fill the void where once there was water. Additionally, the pressure of this water has also been reducing. It may also be that this vast multi-million year old water has become contaminated from mining activities, I would have concern in particular with the uranium mining that loads its acid uranium contaminated wastes back down the extraction bores in places where the crust is thin and the GAB water close to it. I have not seen any advocacy from government or the science community to use this water in dramatic drought conditions; one might wonder why. As we're still building this site, please come back often for more FAQs. In fact, if YOU have a question regarding sustainable living, we'd love to hear from you.
import csv import os import copy import json from decimal import Decimal from scrapy.spider import BaseSpider from scrapy.selector import HtmlXPathSelector from scrapy.http import Request, HtmlResponse, FormRequest from scrapy.utils.response import get_base_url from scrapy.utils.url import urljoin_rfc from scrapy.http.cookies import CookieJar from pricecheck import valid_price from product_spiders.items import Product, ProductLoaderWithNameStrip as ProductLoader HERE = os.path.abspath(os.path.dirname(__file__)) KEYS = ('AIzaSyByntiQdJrTyFw78jaVS6-IdMqMuISqX5Y', 'AIzaSyBK8RtRt-v1JHYhbPszQDv2LlAbIZHuyMo', 'AIzaSyDbmM13l-e_f7bpJH3D6bynBhedKfwszYo') FILTER_DOMAINS = ('ldmountaincentre', 'ebay') class GoogleSpider(BaseSpider): name = 'ldmountaincentre-googleapis.com' allowed_domains = ['googleapis.com'] def start_requests(self): with open(os.path.join(HERE, 'product_skus.csv')) as f: reader = csv.DictReader(f) for i, row in enumerate(reader): sku = row['sku'] query = (row['name']).replace(' ', '+') url = 'https://www.googleapis.com/shopping/search/v1/public/products' + \ '?key=%s&country=GB&' + \ 'q=%s&restrictBy=condition=new' yield Request(url % (KEYS[i % len(KEYS)], query), meta={'sku': sku, 'price': row['price'].replace('$', '')}) def _get_item(self, data, i, response): if i >= len(data.get('items', [])): return item = data['items'][i] pr = Product() pr['name'] = (item['product']['title'] + ' ' + item.get('product', {}).get('author', {}).get('name', '')).strip() pr['url'] = item['product']['link'] pr['price'] = Decimal(str(data['items'][i]['product']['inventories'][0]['price'])) pr['sku'] = response.meta['sku'] pr['identifier'] = response.meta['sku'] return pr, item def parse(self, response): data = json.loads(response.body) i = 0 lowest = None while True: res = self._get_item(data, i, response) if not res: break pr = res[0] item = res[1] invalid_domain = any([self._check_domain(domain, pr['url']) for domain in FILTER_DOMAINS]) if invalid_domain: i += 1 else: if valid_price(response.meta['price'], pr['price']) and \ (lowest is None or lowest['price'] > pr['price']): lowest = pr i += 1 if lowest: yield lowest def _check_domain(self, domain, url): if domain in url: return True
We found James to be honest and reliable and an excellent communicator. He was proactive and took the time to understand our requirements (as buyers) and worked hard to locate suitable properties. It was a pleasure working with him and we would have no hesitation in recommending him to buyers or sellers. A Real Estate you can TRUST! Once again James and the Sails team have shown savvy and enthusiasm. Sails are a hard working team, they provide 'open' house opportunities which really work. I highly recommend them for their competitive edge and professional services. Property sold in 4 days. I found James to be extremely professional. I would recommend James on any property dealings.
#! /usr/pkg/bin/python #-*- coding: utf-8 -*- import requests from bs4 import BeautifulSoup import os import re class Tutsplus: login_url= 'https://tutsplus.com/sign_in' login_post = 'https://tutsplus.com/sessions' home_url = 'https://tutsplus.com' def __init__(self, username, password): self.username = username self.password = password self.login() # Return the html source for a specified url def get_source(self, url): r = self.s.get(url) return r.content # It logs in and store the session for the future requests def login(self): self.s = requests.session() soup = BeautifulSoup(self.get_source(self.login_url)) self.token = soup.find(attrs={"name":"csrf-token"})['content'] data = { "session[login]": self.username, "session[password]": self.password, "authenticity_token": self.token, "utf8": "✓" } self.s.post(self.login_post, data = data) return True # remove special characters for windows users def sanitize_filename(self, name): if os.name == "nt": return re.sub('[<>:"/\\|?*]+', '', name) else: return name.replace('/','-') # Download all video from a course url def download_course(self, url): # Variable needed to increment the video number video_number = 1 # get source source = self.get_source(url) # update csrf token for each course soup = BeautifulSoup(source) self.token = soup.find(attrs={"name":"csrf-token"})['content'] # the course's name course_title = self.sanitize_filename(soup.select('h1')[0].string.encode("utf-8")) print "######### " + course_title + " #########" if not os.path.exists(course_title) : os.makedirs(course_title) # store course page with open(course_title + '/course.html', 'w') as fid: fid.write(source) # if the course includes sourcefiles download them first sourcefile = soup.select('.course-actions__download-button') if sourcefile: print "[+] Downloading source files" filename = course_title + '/sources.zip' link = sourcefile[0]['href'] self.download_file(link, filename) # array who stores the information about a course course_info = self.get_info_from_course(soup) for video in course_info: print "[+] Downloading " + video['titolo'].encode("utf-8") filename = course_title + '/[' + str(video_number).zfill(2) + '] ' + self.sanitize_filename(video['titolo']) + '.mp4' self.download_video(video['link'], filename) video_number = video_number + 1 def download_courses(self, courses): for course in courses: self.download_course(course) def download_video(self, url, filename): # the trick for video links is not to follow the redirect, but to fetch the download link manually # otherwise we'll get an SignatureDoesNotMatch error from S3 data = { "authenticity_token": self.token, "_method": 'post' } soup = BeautifulSoup(self.s.post(url, data = data, allow_redirects=False).content) url = soup.find_all('a')[0]['href'] self.download_file(url, filename) # Function who downloads the file itself def download_file(self, url, filename): r = self.s.get(url, stream=True) if not os.path.isfile(filename) : with open(filename, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) f.flush() # return an array with all the information about a video (title, url) def get_info_from_course(self, soup): arr = [] videos = soup.select('.lesson-index__lesson') for video in videos: titolo = video.select('.lesson-index__lesson-title')[0].string link = video.select('a')[0]['href'] info = { "titolo": titolo, "link": link, } arr.append(info) return arr
Desirable Community with many luxurious homes has 4 huge lots left to build. Lots range in size from .5 acres to 1.5 acres. Well priced and possible to purchase all 4. Beautiful area and Great Community! All homes must be stick built. Contact agent for building restrictions.
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2009 Zuza Software Foundation # Copyright 2013-2014 Evernote Corporation # # This file is part of Pootle. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. from django import template from django.contrib.auth import get_user_model from pootle_app.models.directory import Directory from pootle_app.models.permissions import check_user_permission from pootle_notifications.models import Notice register = template.Library() @register.inclusion_tag('notifications/_latest.html', takes_context=True) def render_latest_news(context, path, num): try: directory = Directory.objects.get(pootle_path='/%s' % path) user = context['user'] User = get_user_model() can_view = check_user_permission(User.get(user), "view", directory) if not can_view: directory = None except Directory.DoesNotExist: directory = None if directory is None: return {'news_items': None} news_items = Notice.objects.filter(directory=directory)[:num] return {'news_items': news_items}
5 Most Kawaii Instagram Spots in Harajuku! | YumeTwins: The Monthly Kawaii Subscription Box Straight from Tokyo to Your Door! Harajuku is inarguably one of the most kawaii districts in Tokyo! Iconic street fashion, millions of crepe shops, asian makeup stores, and adorable themed cafes make this area the symbolic district that it is today. With so many places to visit within Harajuku, it might be difficult to decide which photo spots will be worthy of making it onto your Instagram page! We have previously created a list of the 10 most Instagram worthy spots in Tokyo, so here is a list of the 5 most KAWAII Instagram Spots in Harajuku! You have probably already seen this all over Instagram! You haven’t been to Harajuku if you haven’t gotten the massive rainbow cotton candy from Totti Candy Factory. Right in the heart of the famous shopping street Takeshita dori, you will see dozens of tourists and locals alike taking pictures of them with their colorful, oversized desserts! Style Nanda is a fashion and beauty store originally from South Korea that we have. They opened a store in Takeshita dori street and it is well known for its all pink and girly interior. On some of the floors they have photo sets perfect for sprucing up your Instagram page! Moshi Moshi Box is a massive, colorful clock that was designed by Sebastian Masuda that captures J-pop culture in a bold and fun way. Embodying all that is kawaii, this makes for the perfect photo background! This clothing shop is known for its cute pastel theme! If you go upstairs, you will find their recycled clothing shop that is decorated to the max. With vintage toys flooding the small shop, it makes for the most magical and nostalgic atmosphere. There are 2 charming photo sets towards the front and back end of the shop! Last but not least, we had to include at least one themed cafe in the list! The Pom Pom Purin Cafe is undeniably kawaii because, hello? The entire cafe is revolved around Pom Pom Purin! After a long day of shopping and capturing Insta worthy shots around Harajuku, this cafe is the perfect last stop to enjoy adorable food and snap some last pics! Where do you want to take some pics? Let us know in the comments!
# -*- coding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import copy import logbook import logbook.more import os import re import subprocess import sys import yaml REGEXP_YAML_FILE = '.*\.(yaml|yml)$' REGEXP_INVALID_FILE_NAME_CHARS = '[^-_.A-Za-z0-9]' MAX_RECURSION_DEPTH = 30 def figure_out_log_level(given_level): if isinstance(given_level, str): return logbook.lookup_level(given_level.strip().upper()) else: return given_level def verbosity_level_to_log_level(verbosity): if int(verbosity) == 0: return 'warning' elif int(verbosity) == 1: return 'info' return 'debug' def init_logging_stderr(log_level='notset', bubble=False): handler = logbook.more.ColorizedStderrHandler(level=figure_out_log_level(log_level), bubble=bubble) handler.format_string = '{record.time:%Y-%m-%dT%H:%M:%S.%f} ' \ '{record.level_name} {record.channel}: {record.message}' handler.push_application() def get_logger(logger_name="magine-services"): return logbook.Logger(logger_name) LOG = get_logger() def read_yaml(file_path, out=sys.stdout): try: return yaml.load(open(file_path).read()) except FileNotFoundError: raise FileNotFoundError("Oops! That was no file in {file_path}.".format(**locals())) except yaml.scanner.ScannerError: raise yaml.scanner.ScannerError("Oops! File {file_path} is not a valid yaml.".format(**locals())) def call_shell(work_dir, shell_cmd, print_output=True): output_lines = [] LOG.info("Calling shell in dir '{}':\n{}", work_dir, shell_cmd) proc = subprocess.Popen(shell_cmd, shell=True, cwd=work_dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) # Poll process for new output until finished while True: next_line = proc.stdout.readline().decode('utf-8') if next_line == '' and proc.poll() is not None: break output_lines.append(next_line) if print_output: sys.stdout.write(next_line) sys.stdout.flush() if proc.returncode != 0: LOG.warn("Running shell failed with return code: {}", str(proc.returncode)) return proc.returncode, output_lines def read_and_combine_yamls_in_dir(the_dir): LOG.debug("Loading variables in YAML files from directory: {}", the_dir) all_vars = {} if os.path.isdir(the_dir): for file_path in files_in_dir(the_dir, REGEXP_YAML_FILE): all_vars.update(read_yaml(file_path)) else: LOG.info("Directory does not exist: {}", the_dir) return all_vars def files_in_dir(the_dir, filter_regexp=None): for file_name in sorted(os.listdir(the_dir)): if filter_regexp is None or re.match(filter_regexp, file_name): file_path = os.path.join(the_dir, file_name) yield file_path def list_files_not_seen(source_dir, seen_file_names): file_paths = [] if os.path.isdir(source_dir): for x in os.listdir(source_dir): x_path = os.path.join(source_dir, x) if os.path.isfile(x_path) and x not in seen_file_names: seen_file_names.add(x) file_paths.append(x_path) return file_paths class RecursionError(StandardError): pass def recursive_replace_vars(all_vars, require_all_replaced=True, comment_begin='#', template_prefix='${{', template_suffix='}}'): result = copy.deepcopy(all_vars) for key in all_vars.keys(): try: result[key] = substitute_vars_until_done( str(result[key]), all_vars, require_all_replaced, comment_begin, template_prefix, template_suffix) except RecursionError as err: LOG.error("Failed substituting key '{}'. {}", key, err) raise err return result def substitute_vars_until_done(data, all_vars, require_all_replaced, comment_begin, template_prefix, template_suffix): iterations = 0 has_changed = True while has_changed: iterations += 1 data, has_changed = substitute_vars(data, all_vars, require_all_replaced, comment_begin, template_prefix, template_suffix) if iterations > MAX_RECURSION_DEPTH: raise RecursionError("Too many iterations replacing template variables. Check your " "variables for reference loops, or increase max recursion depth.") return data def substitute_vars(data, all_vars, require_all_replaced, comment_begin, template_prefix, template_suffix): """Just simple string template substitution, like Python string templates etc. Provides also line numbers for missing variables so they can be highlighted. """ output = [] missing_vars_with_lines = [] replaced_variables = [] has_changed = False line_num = 0 for line in data.split('\n'): line_num += 1 if not comment_begin or not line.strip().startswith(comment_begin): i, j = 0, -1 while 0 <= i < len(line): i = tag_begin = line.find(template_prefix, i) if tag_begin >= 0: i = tag_begin + len(template_prefix) j = line.find(template_suffix, i) if j > i: var_name = line[i:j].strip() i = j + len(template_suffix) if var_name not in all_vars: if require_all_replaced: missing_vars_with_lines.append((line_num, var_name)) else: var_value = all_vars.get(var_name) replaced_variables.append(var_name) line = line[0:tag_begin] + str(var_value) + line[i:] has_changed = True output.append(line) if replaced_variables: LOG.debug("Variables substituted: {}", replaced_variables) if missing_vars_with_lines: raise KeyError("Cannot replace key(s) in template (line, key_name): {}" .format(missing_vars_with_lines)) return '\n'.join(output), has_changed def parse_filename_var(file_name, all_vars, template_prefix='___', template_suffix='___'): while template_prefix in file_name: LOG.debug("Parsing string template variable in file name: {}", file_name) i = file_name.find(template_prefix) j = file_name.find(template_suffix, i + len(template_prefix)) if j > i: filename_var = file_name[i + len(template_prefix):j] if filename_var not in all_vars: raise ValueError("Invalid file name variable '{}' in file name: {}".format( filename_var, file_name)) substitute = all_vars[filename_var] if re.search(REGEXP_INVALID_FILE_NAME_CHARS, substitute): raise ValueError("Invalid file name substitute (var {}): {}" .format(filename_var, substitute)) file_name = file_name[:i] + substitute + file_name[j + len(template_suffix):] LOG.debug("File name after parsing: {}", file_name) else: LOG.info("Did not find file name template suffix for parsing: {}", file_name) break return file_name
Global IT Services and Solutions Company that has 15,000 global customers. We help companies around the world put technology at the forefront of business transformation, improving the delivery and consumption of digital services. Sydney Based role, with internal customers across APAC. Part of a small team spread across different time zones, your attitude, enthusiasm and attention to detail will be key attributes, supporting a busy sales support team. Role is responsible for contracts administration, including generation and distribution of pre-contract documentation, collection of information to facilitate contract approval and monitoring progress through the signing process. An important part of this role will be on-call work every quarter, to complete a complex order process. Time can be taken off in lieu. If you are looking for the next step in your career and wish to join a global company with endless opportunity, please apply now. Candidates with a second language would be highly regarded; English written and verbal skills must be excellent.
import unittest from graphserver.core import * from graphserver import util import pickle class TestTimezonePeriod(unittest.TestCase): def test_basic(self): tzp = TimezonePeriod(0, 100, -10) assert tzp assert tzp.begin_time == 0 assert tzp.end_time == 100 assert tzp.utc_offset == -10 def test_dict(self): tzp = TimezonePeriod(3, 7, -11) assert tzp.__getstate__() == (3, 7, -11) ss = pickle.dumps( tzp ) laz = pickle.loads( ss ) assert laz.begin_time == 3 assert laz.end_time == 7 assert laz.utc_offset == -11 def test_time_since_midnight(self): tzp = TimezonePeriod(0, 24*3600*256, -8*3600) assert tzp.time_since_midnight( 8*3600 ) == 0 summer_tzp = TimezonePeriod( util.TimeHelpers.localtime_to_unix( 2008,6,1,0,0,0, "America/Los_Angeles" ), util.TimeHelpers.localtime_to_unix( 2008,9,1,0,0,0, "America/Los_Angeles" ), -7*3600 ) assert summer_tzp.time_since_midnight( util.TimeHelpers.localtime_to_unix( 2008, 7,1,0,0,0,"America/Los_Angeles" ) ) == 0 assert summer_tzp.time_since_midnight( util.TimeHelpers.localtime_to_unix( 2008, 7, 2, 2, 0, 0, "America/Los_Angeles" ) ) == 3600*2 winter_tzp = TimezonePeriod( util.TimeHelpers.localtime_to_unix( 2008,1,1,0,0,0, "America/Los_Angeles" ), util.TimeHelpers.localtime_to_unix( 2008,4,1,0,0,0, "America/Los_Angeles" ), -8*3600 ) assert winter_tzp.time_since_midnight( util.TimeHelpers.localtime_to_unix( 2008, 2,1,0,0,0,"America/Los_Angeles" ) ) == 0 assert winter_tzp.time_since_midnight( util.TimeHelpers.localtime_to_unix( 2008, 2, 2, 2, 0, 0, "America/Los_Angeles" ) ) == 3600*2 if __name__ == '__main__': tl = unittest.TestLoader() suite = tl.loadTestsFromTestCase(TestTimezonePeriod) unittest.TextTestRunner(verbosity=2).run(suite)
Sovetsky Sport is a daily newspaper first published by the USSR State Committee for Physical Culture and Sport in 1924, now by the Russian Olympic committee. In its original incarnation, the paper provided coverage of international sporting events, interviews with athletes, and other articles which advocated for the type healthy lifestyle that was highly prized by Soviet ideology. At its height, just before the end of the Soviet Union, the newspaper had a circulation of five million, and was distributed in over 100 countries. The Dickinson Archives has a complete run of the paper from January 1, 1954 to December 29, 1955. There were 156 editions published each year, spanning from no. 2269 to no. 2580. Soviet Union: Komitet po fizicheskoi kul’ture i sportu, 1954-55.
#!/usr/bin/env python # Copyright (c) 2015, Harrison Erd # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # Neither the name of the Harrison Erd nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " # AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF # THE POSSIBILITY OF SUCH DAMAGE. import os import simplejson def load(location, option): '''Return a pickledb object. location is the path to the json file.''' return pickledb(location, option) class pickledb(object): def __init__(self, location, option): '''Creates a database object and loads the data from the location path. If the file does not exist it will be created on the first update.''' self.load(location, option) def load(self, location, option): '''Loads, reloads or changes the path to the db file.''' location = os.path.expanduser(location) self.loco = location self.fsave = option if os.path.exists(location): self._loaddb() else: self.db = {} return True def dump(self): '''Force dump memory db to file.''' self._dumpdb(True) return True def set(self, key, value): '''Set the (string,int,whatever) value of a key''' self.db[key] = value self._dumpdb(self.fsave) return True def get(self, key): '''Get the value of a key''' try: return self.db[key] except KeyError: return None def getall(self): '''Return a list of all keys in db''' return self.db.keys() def rem(self, key): '''Delete a key''' del self.db[key] self._dumpdb(self.fsave) return True def lcreate(self, name): '''Create a list''' self.db[name] = [] self._dumpdb(self.fsave) return True def ladd(self, name, value): '''Add a value to a list''' self.db[name].append(value) self._dumpdb(self.fsave) return True def lextend(self, name, seq): '''Extend a list with a sequence''' self.db[name].extend(seq) self._dumpdb(self.fsave) return True def lgetall(self, name): '''Return all values in a list''' return self.db[name] def lget(self, name, pos): '''Return one value in a list''' return self.db[name][pos] def lrem(self, name): '''Remove a list and all of its values''' number = len(self.db[name]) del self.db[name] self._dumpdb(self.fsave) return number def lpop(self, name, pos): '''Remove one value in a list''' value = self.db[name][pos] del self.db[name][pos] self._dumpdb(self.fsave) return value def llen(self, name): '''Returns the length of the list''' return len(self.db[name]) def append(self, key, more): '''Add more to a key's value''' tmp = self.db[key] self.db[key] = ('%s%s' % (tmp, more)) self._dumpdb(self.fsave) return True def lappend(self, name, pos, more): '''Add more to a value in a list''' tmp = self.db[name][pos] self.db[name][pos] = ('%s%s' % (tmp, more)) self._dumpdb(self.fsave) return True def dcreate(self, name): '''Create a dict''' self.db[name] = {} self._dumpdb(self.fsave) return True def dadd(self, name, pair): '''Add a key-value pair to a dict, "pair" is a tuple''' self.db[name][pair[0]] = pair[1] self._dumpdb(self.fsave) return True def dget(self, name, key): '''Return the value for a key in a dict''' return self.db[name][key] def dgetall(self, name): '''Return all key-value pairs from a dict''' return self.db[name] def drem(self, name): '''Remove a dict and all of its pairs''' del self.db[name] self._dumpdb(self.fsave) return True def dpop(self, name, key): '''Remove one key-value pair in a dict''' value = self.db[name][key] del self.db[name][key] self._dumpdb(self.fsave) return value def dkeys(self, name): '''Return all the keys for a dict''' return self.db[name].keys() def dvals(self, name): '''Return all the values for a dict''' return self.db[name].values() def dexists(self, name, key): '''Determine if a key exists or not''' if self.db[name][key] is not None: return 1 else: return 0 def deldb(self): '''Delete everything from the database''' self.db= {} self._dumpdb(self.fsave) return True def _loaddb(self): '''Load or reload the json info from the file''' self.db = simplejson.load(open(self.loco, 'rb')) def _dumpdb(self, forced): '''Write/save the json dump into the file''' if forced: simplejson.dump(self.db, open(self.loco, 'wt'))
Marie MAINGUET, daughter of Jean MAINGUET and Suzanne Anne CHAT, was born between 1751 and 1753. She married François SOULAR in 1775. He was born between 1705 and 1761. Pierre MAINGUET, cardeur de laine, son of Jean MAINGUET and Suzanne Anne CHAT, was born on December 29, 1755 in , Nantillé, 17256, Charente Maritime, Poitou-Charentes, France and died on January 26, 1822 in Vill du Pin, Nantillé, 17256, Charente Maritime, Poitou-Charentes, France at the age of 66 years. He married Marie GAUTIER, daughter of Jacques Jean GAUTIER and Marie JONCHÈRE, on November 24, 1776 in , Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. She was born between 1744 and 1761 and died on April 8, 1824 in Vill le Pain, Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. Jeanne MAINGUET, daughter of Pierre MAINGUET and Marie GAUTIER, was born in 1779 and died in 1839 at the age of 60 years. She married X X. He was born between 1749 and 1784. Jeanne MAINGUET, daughter of Pierre MAINGUET and Marie GAUTIER, was born on November 29, 1779 in Vill le Pain, Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. She married Lorent GUINGUENAUD, son of Jean GUINGUENAUD and Marie GERFAU, on December 28, 1822 in , Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. He was born on February 6, 1767 in , Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. Pierre MAINGUET, cardeur, son of Pierre MAINGUET and Marie GAUTIER, was born on December 1, 1783 in , Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. He married Marie Anne JACQUES, daughter of Jean JACQUE and Marguerite MERZEAU, on January 19, 1814 in , Bercloux, 17042, Charente Maritime, Poitou-Charentes, France. She was born about August 20, 1791 in , Bercloux, 17042, Charente Maritime, Poitou-Charentes, France and died before October 15, 1843 in , Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. Pierre MAINGUET, cardeur de laine, son of Pierre MAINGUET and Marie GAUTIER, was born on July 8, 1788 in Vill du Pin, Nantillé, 17256, Charente Maritime, Poitou-Charentes, France and died in Vill du Pin, , , , ,. He married Marguerite EMIER, daughter of Jean EMIER and Marie GILLARDEAU, on July 23, 1849 in , Bercloux, 17042, Charente Maritime, Poitou-Charentes, France. She was born on July 29, 1794 in Vill Chez Drouillard, Écoyeux, 17147, Charente Maritime, Poitou-Charentes, France. Marie BUREAU, daughter of Pierre BUREAU and Marie-Anne POURTEAU, was born in 1820 in Écoyeux, 17147, Charente Maritime, Poitou-Charentes, France. She married Jean GODIN on November 4, 1840 in Écoyeux, 17147, Charente Maritime, Poitou-Charentes, France. Jacques POURTEAU, vigneron, son of François POURTEAU and Marie TUFFRAUD, was born on September 11, 1805 in , Dompierre-sur-Charente, 17141, Charente Maritime, Poitou-Charentes, France and died on June 6, 1870 in , Saint-Césaire, 17314, Charente Maritime, Poitou-Charentes, France at the age of 64 years. He married Marie DOMITILE on June 18, 1833 in , Saint-Sauvant, 17395, Charente Maritime, Poitou-Charentes, France. She was born in 1815 in , Saint-Sauvant, 17395, Charente Maritime, Poitou-Charentes, France. Jean POURTEAU, son of François POURTEAU and Marie TUFFRAUD, was born on May 19, 1811 in , Saint-Sauvant, 17395, Charente Maritime, Poitou-Charentes, France. He married Jeanne MERLET on June 30, 1834 in , Saint-Sauvant, 17395, Charente Maritime, Poitou-Charentes, France. She was born on May 22, 1809 in , La Chapelle-des-Pots, 17089, Charente Maritime, Poitou-Charentes, France. Marie MAINGUET, daughter of Pierre MAINGUET and Marie Anne JACQUES, was born on January 28, 1816 in , Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. She married Louis DAMON, son of Jean DAMON and Marie SAUVION, on February 25, 1844 in , Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. He was born on November 24, 1816 in , Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. Catherine MAINGUET, daughter of Pierre MAINGUET and Marie Anne JACQUES, was born on May 1, 1818 in , Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. She married Jacques NEXIRON, son of Jean NEXIRON and Catherine GARNAUD, on June 13, 1837 in , Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. He was born on December 5, 1804 in Vill au Treuil, Blanzac-lès-Matha, 17048, Charente Maritime, Poitou-Charentes, France. Pierre MAINGUET, cultivateur ; cardeur, son of Pierre MAINGUET and Marie Anne JACQUES, was born on April 6, 1821 in Village le Pin, Nantillé, 17256, Charente Maritime, Poitou-Charentes, France. He married Marguerite Thérèse GIRAUD, daughter of Jean GIRAUD and Marie Marguerite EMIER, on December 22, 1848 in , Bercloux, 17042, Charente Maritime, Poitou-Charentes, France. She was born on October 22, 1819 in Chez Drouillard, Bercloux, 17042, Charente Maritime, Poitou-Charentes, France. Victor GODIN, son of Jean GODIN and Marie BUREAU, was born in 1843 and died in 1926 at the age of 83 years. He married Marie DORSON on August 1, 1877 in Authon-Ébéon, 17026, Charente Maritime, Poitou-Charentes, France. Marie POURTEAU, daughter of Jacques POURTEAU and Marie DOMITILE, was born in 1837. She married Pierre BOURBON on November 6, 1860 in , Saint-Césaire, 17314, Charente Maritime, Poitou-Charentes, France. He was born between 1807 and 1845. Joseph POURTEAU, son of Jacques POURTEAU and Marie DOMITILE, was born in 1841 in , Saint-Sauvant, 17395, Charente Maritime, Poitou-Charentes, France. He married Elisa CHANCELIER on April 29, 1867 in , Saint-Sauvant, 17395, Charente Maritime, Poitou-Charentes, France. She was born between 1811 and 1852. André POURTEAU, son of Jean POURTEAU and Jeanne MERLET, was born in 1840 in , Saint-Sauvant, 17395, Charente Maritime, Poitou-Charentes, France and died in 1921 in , Salignac-sur-Charente, 17418, Charente Maritime, Poitou-Charentes, France at the age of 81 years. He married Amélie CHARRIER. She was born in 1844 in , Montils, 17242, Charente Maritime, Poitou-Charentes, France and died in 1922 at the age of 78 years. Jean POURTEAU, son of Jean POURTEAU and Jeanne MERLET, was born in 1843 in , Saint-Sauvant, 17395, Charente Maritime, Poitou-Charentes, France. He married Alida POUSSARD on January 22, 1866 in , Pérignac, 17273, Charente Maritime, Poitou-Charentes, France. She was born between 1813 and 1851. Mélanie ANDRÉ, daughter of Pierre ANDRÉ and Jeanne FOUGERE, was born in 1866 in Vill les Fosses, Mesnac, 16218, Charente, Poitou-Charentes, France and died in 1945 in , Boutiers-Saint-Trojan, 16058, Charente, Poitou-Charentes, France at the age of 79 years. She married Louis LESTRADE on August 5, 1882 in , Boutiers-Saint-Trojan, 16058, Charente, Poitou-Charentes, France. He was born in 1856 and died in 1927 at the age of 71 years. Pierre MAINGUET, son of Pierre MAINGUET and Marguerite Thérèse GIRAUD, was born on December 23, 1848 in Vill Chez Drouillard, Bercloux, 17042, Charente Maritime, Poitou-Charentes, France. He married Marie Hélène POMMIER, daughter of Jean Etienne POMMIER and Angélique CONTANT, on September 27, 1867 in , Bercloux, 17042, Charente Maritime, Poitou-Charentes, France. She was born between 1818 and 1852. Marie MACAUD, daughter of Jean MACAUD and Marie HUGUET, was born on March 14, 1853 in , Tonnay-Charente, 17449, Charente Maritime, Poitou-Charentes, France. She married Louis PITARD on May 11, 1875 in , Tonnay-Charente, 17449, Charente Maritime, Poitou-Charentes, France. He was born on August 17, 1848 in , Tonnay-Charente, 17449, Charente Maritime, Poitou-Charentes, France. Alexandre RÉ, son of Jacques François RÉ and Rose TOURNEUR, was born on March 31, 1873 in Vill Azay, Saint-Crépin, 17321, Charente Maritime, Poitou-Charentes, France. He married Louise MICHEL, daughter of François MICHEL and Magdeleine PETIT, on November 5, 1900 in , Saint-Crépin, 17321, Charente Maritime, Poitou-Charentes, France. She was born on November 20, 1878 in , Saint-Crépin, 17321, Charente Maritime, Poitou-Charentes, France and died in , Saint-Crépin, 17321, Charente Maritime, Poitou-Charentes, France. Marie Mathilde MAINGUET, daughter of Auguste MAINGUET and Emilie BESSON, was born on December 27, 1868 in Genouillé, Genouillé, 17174, Charente Maritime, Poitou-Charentes, France and died on August 27, 1943 in , La Rochelle, 17300, Charente Maritime, Poitou-Charentes, France at the age of 74 years. She married … X. Auguste MAINGUET, cultivateur, son of Auguste MAINGUET and Emilie BESSON, was born on November 29, 1873 in , Archingeay, 17017, Charente Maritime, Poitou-Charentes, France and died in Vill la Coudre, , , , ,. He married Helise COGNET on October 1, 1898 in , Lussant, 17216, Charente Maritime, Poitou-Charentes, France. She was born on November 28, 1874 in , Saint clément, , , ,. Eglantine GIRAUD, daughter of Louis GIRAUD and Angelique MINGUET, was born on June 30, 1871 in , Lussant, 17216, Charente Maritime, Poitou-Charentes, France and died in 1952 in , Lussant, 17216, Charente Maritime, Poitou-Charentes, France at the age of 80 years. She married André ARDOUIN. He was born between 1841 and 1885. Emile Philippe MASSON, son of Jean MASSON and Magdelaine MAINGUET, was born on October 23, 1862 in , Puy-du-Lac, 17292, Charente Maritime, Poitou-Charentes, France. He married Marie Adeline TRIAU on May 25, 1886 in , Saint-Coutant-le-Grand, 17320, Charente Maritime, Poitou-Charentes, France. She was born on May 21, 1866 in , Saint-Coutant-le-Grand, 17320, Charente Maritime, Poitou-Charentes, France. René Pierre MAINGUET, commis principal de la Marine, son of Pierre MAINGUET and Joséphine THIBAUD, was born on February 27, 1879 in , Montendre, 17240, Charente Maritime, Poitou-Charentes, France and died on August 18, 1935 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France at the age of 56 years. He married Irma FOISSAUD. Thelem MAINGUET, ajusteur, son of Pierre MAINGUET and Joséphine THIBAUD, was born on February 3, 1881 in Vill la Jarrie, Puy-du-Lac, 17292, Charente Maritime, Poitou-Charentes, France and was buried on November 24, 1946 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France at the age of 65 years. He married Helene Armande Eugénie BOUTINET, daughter of Théophile BOUTINET and Berthe Louise CHASSELOUP. Auguste MAINGUET, agent technique principal de la marine, son of Pierre MAINGUET and Joséphine THIBAUD, was born on June 28, 1886 in Vill la Vacherie, Rochefort, 17299, Charente Maritime, Poitou-Charentes, France and died on June 8, 1964 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France at the age of 77 years. He married 2 times. The first time he married Juliette Marie FEROUX, daughter of Honoré FEROUX and Marie Augustine SORLET, on September 25, 1909 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France. She was born between 1856 and 1894. The second time he married Yvonne CHARBONNIER, daughter of Louis “Paul” CHARBONNIER and Marie Louise IMBERT, on April 25, 1914 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France. She was born on December 19, 1893 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France and died on November 28, 1975 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France at the age of 81 years. Marie Adele MAINGUET, daughter of Auguste MAINGUET and Marie Adele ROUTURIER, was born on September 28, 1879 in , Saint-Vaize, 17412, Charente Maritime, Poitou-Charentes, France and died on September 27, 1970 in , Le Douhet, 17143, Charente Maritime, Poitou-Charentes, France at the age of 90 years. She married Anatole Abel COUPRIE, son of Joseph COUPRIE and Juliette JEAN, on November 26, 1900 in , Juicq, 17198, Charente Maritime, Poitou-Charentes, France. He was born on May 15, 1875 in Vill Chez Talvard, Vénérand, 17462, Charente Maritime, Poitou-Charentes, France. Corine MAINGUET, daughter of Auguste MAINGUET and Marie Adele ROUTURIER, was born on February 9, 1882 in , Bussac-sur-Charente, 17073, Charente Maritime, Poitou-Charentes, France and died on February 25, 1964 in , Juicq, 17198, Charente Maritime, Poitou-Charentes, France at the age of 82 years. She married Gustave Emmanuel BON, son of Daniel BON and Éléonore BENJAMIN. Marguerite MAINGUET, daughter of Auguste MAINGUET and Marie Adele ROUTURIER, was born on March 29, 1885 in , Bussac-sur-Charente, 17073, Charente Maritime, Poitou-Charentes, France and died on March 25, 1963 in , Les Nouillers, 17266, Charente Maritime, Poitou-Charentes, France at the age of 77 years. She married Lodoïs Armand TARDY, son of Firmin TARDY and Pauline VIAUD. Marcel LEZINEAU, son of Henri LEZINEAU and Honorine MAINGUET, was born on June 23, 1879 in , Puy-du-Lac, 17292, Charente Maritime, Poitou-Charentes, France. He married Albertine RICHAUDEAU, daughter of Henri RICHAUDEAU and Louise MAINGUET, on February 7, 1903 in , Puy-du-Lac, 17292, Charente Maritime, Poitou-Charentes, France. She was born on October 25, 1882 in , Puy-du-Lac, 17292, Charente Maritime, Poitou-Charentes, France. Arsene LEZINEAU, charpentier, son of Henri LEZINEAU and Honorine MAINGUET, was born on November 26, 1882 in , Puy-du-Lac, 17292, Charente Maritime, Poitou-Charentes, France and died on February 26, 1963 in , Saintes, 17415, Charente Maritime, Poitou-Charentes, France at the age of 80 years. He married 2 times. The first time he married Camille BOUGNON on September 17, 1904 in , Puy-du-Lac, 17292, Charente Maritime, Poitou-Charentes, France. She was born in 1885 in , Tonnay-Boutonne, 17448, Charente Maritime, Poitou-Charentes, France and died in 1951 in , Saintes, 17415, Charente Maritime, Poitou-Charentes, France at the age of 66 years. The second time he married Valentine TURPIN on January 12, 1909 in , Surgères, 17434, Charente Maritime, Poitou-Charentes, France. She was born between 1852 and 1894. Maurice Pierre LEZINEAU, son of Henri LEZINEAU and Honorine MAINGUET, was born on September 22, 1885 in , Puy-du-Lac, 17292, Charente Maritime, Poitou-Charentes, France. He married Jeanne TOURRA... on September 29, 1906 in , Surgères, 17434, Charente Maritime, Poitou-Charentes, France. She was born between 1855 and 1891. Albertine RICHAUDEAU, daughter of Henri RICHAUDEAU and Louise MAINGUET, was born on October 25, 1882 in , Puy-du-Lac, 17292, Charente Maritime, Poitou-Charentes, France. She married Marcel LEZINEAU, son of Henri LEZINEAU and Honorine MAINGUET, on February 7, 1903 in , Puy-du-Lac, 17292, Charente Maritime, Poitou-Charentes, France. He was born on June 23, 1879 in , Puy-du-Lac, 17292, Charente Maritime, Poitou-Charentes, France. Henriette GIRAUD, daughter of Charles Sylvain GIRAUD and Victorine DESBROUSSE, was born on March 13, 1880 in , Saint-Crépin, 17321, Charente Maritime, Poitou-Charentes, France and died on November 2, 1973 in , Saint-Laurent-de-la-Barrière, 17352, Charente Maritime, Poitou-Charentes, France at the age of 93 years. She married Clement Adrien RAUD in , Saint-Crépin, 17321, Charente Maritime, Poitou-Charentes, France. He was born on November 6, 1874 in , Genouillé, 17174, Charente Maritime, Poitou-Charentes, France and died on August 26, 1960 in , Saint-Laurent-de-la-Barrière, 17352, Charente Maritime, Poitou-Charentes, France at the age of 85 years. Angèle GIRAUD, daughter of François Alexis Giraud GIRAUD and Eugenie CLAIRGEAUD, was born in 1881 in , Saint-Savinien, 17397, Charente Maritime, Poitou-Charentes, France and died in 1960 in , Taillebourg, 17436, Charente Maritime, Poitou-Charentes, France at the age of 79 years. She married Victor MENET on April 14, 1903 in , Taillebourg, 17436, Charente Maritime, Poitou-Charentes, France. He was born in 1880 and died in 1956 at the age of 76 years. Isidore GODIN, son of Victor GODIN and Marie DORSON, was born on February 21, 1887 in Authon-Ébéon, 17026, Charente Maritime, Poitou-Charentes, France and died on December 17, 1951 in Rochefort, 17299, Charente Maritime, Poitou-Charentes, France at the age of 64 years. He married Marguerite ORIEUX. Ludomir POURTAUD, son of André POURTEAU and Amélie CHARRIER, was born in 1867 in , Pérignac, 17273, Charente Maritime, Poitou-Charentes, France and died before 1928. He married Henriette Joséphine PAYEN, daughter of François Eugène PAYEN and Joséphine DELAJUS. Maurice PORTAUD, coordonnier, son of André POURTEAU and Amélie CHARRIER, was born in 1871 in , Pérignac, 17273, Charente Maritime, Poitou-Charentes, France and died in 1899 in , Salignac-sur-Charente, 17418, Charente Maritime, Poitou-Charentes, France at the age of 28 years. He married Louise NOURISSON. She was born between 1841 and 1882. Fernand PORTEAUD, son of André POURTEAU and Amélie CHARRIER, was born in 1873 in , Pérignac, 17273, Charente Maritime, Poitou-Charentes, France and died in 1955 in , Pérignac, 17273, Charente Maritime, Poitou-Charentes, France at the age of 82 years. He married Marie Clémence BONNAUD on November 7, 1896 in , Salignac-sur-Charente, 17418, Charente Maritime, Poitou-Charentes, France. She was born in 1878 in , Salignac-sur-Charente, 17418, Charente Maritime, Poitou-Charentes, France and died in 1976 in , Salignac-sur-Charente, 17418, Charente Maritime, Poitou-Charentes, France at the age of 98 years. Jeanne LESTRADE, daughter of Louis LESTRADE and Mélanie ANDRÉ, was born between 1881 and 1884. Jeanne LESTRADE had 2 children. Melanie Amelie LESTRADE, daughter of Louis LESTRADE and Mélanie ANDRÉ, was born in 1883 and died in 1969 at the age of 86 years. She married Jean AUTEXIER on February 16, 1899 in , Boutiers-Saint-Trojan, 16058, Charente, Poitou-Charentes, France. He was born in 1879 and died in 1956 at the age of 77 years. Charles MAINGUET, son of … X and Marie Mathilde MAINGUET, was born on August 25, 1905 in , Genouillé, 17174, Charente Maritime, Poitou-Charentes, France and died on November 23, 1963 in , La Rochelle, 17300, Charente Maritime, Poitou-Charentes, France at the age of 58 years. He married Paulette-Therese POITOU, daughter of Paul POITOU and Marie Eugenie CAILEAU. Charles MAINGUET and Paulette-Therese POITOU had 5 children. Clovis Albert ARDOUIN, elève garde mobile, son of André ARDOUIN and Eglantine GIRAUD, was born in 1902 in , Lussant, 17216, Charente Maritime, Poitou-Charentes, France and died in 1932 in , Bastia, 2B033, Corse du nord, Haute-Corse, France at the age of 30 years. He married Jeanne Marguerite JAMMET. Clovis Albert ARDOUIN and Jeanne Marguerite JAMMET had 1 child. Fernand MAINGUET, son of Thelem MAINGUET and Helene Armande Eugénie BOUTINET, was born in 1904 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France and died in 1991 in , Toulon, 83137, Var, Provence-Alpes-Côte-d'Azur, France at the age of 87 years. He married Louise Colette VIANELLO on September 13, 1927 in , Hyères, 83069, Var, Provence-Alpes-Côte-d'Azur, France. She was born on December 9, 1905 in , Hyères, 83069, Var, Provence-Alpes-Côte-d'Azur, France and died in 2006 in , Toulon, , , , at the age of 100 years. Pierre Paul Auguste Henri MAINGUET, technicien principal de la Marine, son of Auguste MAINGUET and Yvonne CHARBONNIER, was born on February 8, 1915 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France and died on May 10, 1986 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France at the age of 71 years. He married Jeanine “Ginette” Léona Florine Marie MAUPILIER, daughter of Raoul Alexandre Marie MAUPILIER and Léona DAVID. Louisette MAINGUET, employée au grand bazar charentais, daughter of Auguste MAINGUET and Yvonne CHARBONNIER, was born in 1917 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France and died on March 4, 1953 in , Rochefort, 17299, Charente Maritime, Poitou-Charentes, France at the age of 36 years. Louisette MAINGUET had 2 children. Olympe BON, daughter of Gustave Emmanuel BON and Corine MAINGUET, was born on March 14, 1905 in , Juicq, 17198, Charente Maritime, Poitou-Charentes, France and was buried in 2001 in , Juicq, 17198, Charente Maritime, Poitou-Charentes, France at the age of 95 years. Maurice Maximilien LEZINEAU, charpentier, son of Arsene LEZINEAU and Camille BOUGNON, was born on March 3, 1915 in , Surgères, 17434, Charente Maritime, Poitou-Charentes, France and died on September 21, 1983 in , Touvérac, 16224, Charente, Poitou-Charentes, France at the age of 68 years. Clémenr Roger RAUD, son of Clement Adrien RAUD and Henriette GIRAUD, was born in 1900 in , Saint-Crépin, 17321, Charente Maritime, Poitou-Charentes, France and died in 1964 in , Saint-Laurent-de-la-Barrière, 17352, Charente Maritime, Poitou-Charentes, France at the age of 64 years. He married Emma GABORIT. Clémenr Roger RAUD and Emma GABORIT had 2 children. Odette MENET, daughter of Victor MENET and Angèle GIRAUD, was born in 1904 in , Taillebourg, 17436, Charente Maritime, Poitou-Charentes, France and died in 1985 in , Saintes, 17415, Charente Maritime, Poitou-Charentes, France at the age of 81 years. She married Georges PEYRONDET. Odette MENET and Georges PEYRONDET had 1 child. Raymond GODIN, son of Isidore GODIN and Marguerite ORIEUX. He married Solange PEMBA. Raymond GODIN and Solange PEMBA had 1 child. Maurice POURTAUD, son of Ludomir POURTAUD and Henriette Joséphine PAYEN, was born in 1889 and died in 1973 at the age of 84 years. Maurice POURTAUD had 1 child. Gaston POURTAUD, son of Ludomir POURTAUD and Henriette Joséphine PAYEN, was born in 1894 in , Bois-Colombes, 92009, Hauts de Seine, Île-de-France, France and died in 1977 in , Saint-Rémy-de-Provence, 13100, Bouches du Rhône, Provence-Alpes-Côte-d'Azur, France at the age of 83 years. Gaston POURTAUD had 1 child. Marcelle POURTAUD, daughter of Ludomir POURTAUD and Henriette Joséphine PAYEN, was born in 1896 and died in 1986 at the age of 90 years. Marcelle POURTAUD had 1 child. Suzanne POURTAUD, daughter of Ludomir POURTAUD and Henriette Joséphine PAYEN, was born in 1906 and died in 1975 at the age of 69 years. Andréa Amélie PORTAUD, daughter of Maurice PORTAUD and Louise NOURISSON, was born in 1896 in , Salignac-sur-Charente, 17418, Charente Maritime, Poitou-Charentes, France and died in 1962 in , Cognac, 16102, Charente, Poitou-Charentes, France at the age of 66 years. Gabriel PORTEAUD, son of Fernand PORTEAUD and Marie Clémence BONNAUD, was born in 1897 and died in 1971 at the age of 74 years. André PORTEAUD, son of Fernand PORTEAUD and Marie Clémence BONNAUD, was born in 1901 and died in 1984 at the age of 83 years. Renée Melanie AUTEXIER, daughter of Jean AUTEXIER and Melanie Amelie LESTRADE, was born in 1904 in , Boutiers-Saint-Trojan, 16058, Charente, Poitou-Charentes, France and died in 1991 in , Touvérac, 16224, Charente, Poitou-Charentes, France at the age of 87 years. She married Mathieu TEISSEIRE. Renée Melanie AUTEXIER and Mathieu TEISSEIRE had 7 children. Jean MAINGUET, son of Fernand MAINGUET and Louise Colette VIANELLO, was born about 1928 and died in 1996. He married Régine MOULET. Jean MAINGUET and Régine MOULET had 1 child. Jacques FICHET, son of Private and Private, was born between 1887 and 1980 and died in August 1980 in , Hyères, 83069, Var, Provence-Alpes-Côte-d'Azur, France. Jacques FICHET had 1 child. Jacques Teddy Christian MAINGUET, commandant Police Nationale, son of Pierre Paul Auguste Henri MAINGUET and Jeanine “Ginette” Léona Florine Marie MAUPILIER, was born on June 18, 1946 in , Toulon, 83137, Var, Provence-Alpes-Côte-d'Azur, France and died on June 8, 2012 in La Gripperie Saint Symphorien, Rochefort, 17299, Charente Maritime, Poitou-Charentes, France at the age of 65 years. Jacques Teddy Christian MAINGUET had 2 children. Régis ANDRES, son of Private and Private, was born on April 21, 1967 in , Niort, 79191, Deux Sèvres, Poitou-Charentes, France and died in April 2016 at the age of 48 years. Régis ANDRES had 2 children.
#! /usr/bin/env python3 # -*- coding: utf-8 -*- ''' @author: Mindaugas Greibus ''' import re import wave import os def transform_transcription_file(transcription_path, output_file_map, total_duration_map, repo_type): with open(transcription_path, 'r') as input_file: for line in input_file: line=re.sub(r'<sil[\+\w]+>',r'',line) line=re.sub(r'<s> *<sil>',r'<s>',line) line=re.sub(r'<sil> *</s>',r'</s>',line) line=re.sub(r'( *<sil>)+',r'',line) line=re.sub(r'(\s+)',r' ',line) text ="" file_name="" m = re.search('<s>(.*)</s>\s*\((.*)\)',line) if(not m): print(">>>> " + line) raise ValueError('Cannot parse the line: ' + line) #line text text = m.group(1) ## find correct file path file_name = m.group(2) m = re.search('-(.+)$',file_name) if(not m): print(">>>> " + line) raise ValueError('Dir not found: ' + file_name) dir_name=m.group(1) wav_name = "../{}_repo/{}/{}.wav".format(repo_type,dir_name, file_name) ##Calculate duration audio = wave.open(wav_name) duration = float(audio.getnframes()) / audio.getframerate() audio.close() kaldi_path = "./liepa_audio/{}/{}/{}.wav".format(repo_type,dir_name, file_name) total_duration_map["total"] += duration out = '{},{},{}\n'.format(duration, text, kaldi_path) print(out) if(duration>1): #if shorter as given time training is crashing is code dump total_duration_map[repo_type] += duration output_file_map[repo_type].write(out) else: total_duration_map["short"] += duration src_dir = "../" test_transcription_path = os.path.join(src_dir, "./target/liepa_test.transcription") train_transcription_path = os.path.join(src_dir, "./target/liepa_train.transcription") with open('./target/liepa_test.csv', 'w') as test_repo_csv, \ open('./target/liepa_train.csv', 'w') as train_repo_csv: output_file_map = {"test":test_repo_csv, "train": train_repo_csv} total_duration_map = { "test":0, "train":0,"short":0, "total":0} transform_transcription_file(test_transcription_path, output_file_map, total_duration_map, "test") transform_transcription_file(train_transcription_path, output_file_map, total_duration_map, "train") print(total_duration_map) # cp sphinx_files/etc/liepa.dic ./target/data/local/dict/lexicon.txt # inserto to ./target/data/local/dict/lexicon.bak "!SIL sil\n<UNK> spn\n"
Initially, PLRB collects your name, contact details, professionals credentials, and information volunteered by you in your resume or curriculum vitae and in a cover letter. PLRB also collects writing samples and all communications with you at this initial stage. PLRB staff makes notes and evaluations regarding applicants which are kept. Next, if contacted for an in-person interview at our offices or via video conference (e.g. Skype), PLRB will ask you to complete a job application which requires a signature from you verifying the truthfulness of statements and qualifications listed. During the interview, PLRB staff often makes notes which are stored. Part of the interview may include an example work assignment that candidates are asked to complete and return to PLRB. PLRB stores the work and the related communications. For candidates in consideration for an offer, PLRB may contact references, verify licenses with licensing authorities, and verify educational degrees. For positions involving access to banking and financial matters, criminal backgrounds checks may be performed. Records of these activities and results are maintained by PLRB. Finally, if employment is offered and accepted, PLRB collects information required by the U.S. Citizenship and Immigration Office; the Internal Revenue Service; other federal and state government programs; health, dental, vision, life, and disability benefit providers; retirement plan benefit provider; payroll processing; and internal policy acknowledgements required for operations. For temporary employees, PLRB also receives information about you from your employment agency. During the evaluation and hiring stage, information provided by prospective employees, data created by PLRB staff about prospective employees, and the related communications are shared only with human resources staff, the hiring manager, assistants to the hiring manager, and the president of PLRB. Names of visitors to PLRB for interviews are shared with building security. PLRB will only share information with others as necessary to check references and verify credentials. After employment begins, pertinent data is provided to the government and benefits providers. Furthermore, the name, credentials, and a professional bio approved by the applicant are shared publicly. Our purpose for processing applicant information is to assess your suitability for a role for which you are being considered. For those offered employment, the purposes for processing are for employment, benefits, business operations, and legal requirements. The legal basis for processing an applicant’s personal data is GDPR, Article 6(1)(b) - contract. If any data is special category data, such as health, religious or ethnic information, it falls under GDPR, Article 9(2)(b), which relates to obligations in employment and the safeguarding of your fundamental rights and article 9(2)(h) for assessing your work capacity as an employee. After employment, processing of data is based on GDPR, Article 6(1)(c) - legal obligation in addition to contract. Creation of an employee number, testing, using data to provide benefits, and make you available for business purposes are accomplished under GDPR, Article 6(1)(f) - legitimate interests in operating a company. For resumes and cover letters submitted, records are kept by law at least one year and up to three years. For those receiving and participating in interviews, records are kept from one to five years. Some applicant records are kept more than one year in case new openings arise. Records for employees are maintained for the duration of employment plus additional time pursuant to statutes, including statutes of limitations, as well as for time needed for processing retirement benefits. Certain records, such as building identification, business cards, and similar are discarded at the end of employment. Payroll and expense reimbursement records are maintained for seven years. With regard to matters related to employment, you have rights of access, rectification, restriction, and portability. After employment, you have rights to object and potentially rights to be forgotten if no overriding matter requires retention.
from __future__ import print_function ''' basic fel calculations ''' #from pylab import * import numpy as np import numpy.fft as fft import scipy.special as sf from ocelot.common.globals import m_e_eV, epsilon_0, speed_of_light, q_e, h_eV_s, lambda_C_r, I_Alfven, ro_e import logging from scipy.optimize import fmin from copy import deepcopy _logger = logging.getLogger(__name__) #from matplotlib.figure import Figure #from mpl_toolkits.mplot3d import Axes3D #import fel class FelParameters: def __init__(self): self.qf = 0 self.inaccurate = False # True if fitting formulas do not promise good accuracy pass def eval(self, method='mxie'): _logger.debug('Calculating FEL parameters') if np.size(self.I) > 1: tdp=True else: tdp=False if not hasattr(self, 'hn'): self.hn=1 #harmonic number if np.any(self.betax <= 0) or np.any(self.betay <= 0): _logger.warning('betax or betay <= 0, returning lg3=np.nan') self.lg3 = np.nan return self.rxbeam = np.sqrt(self.betax * self.emitx / self.gamma0) self.rybeam = np.sqrt(self.betay * self.emity / self.gamma0) if np.any(self.rxbeam <= 0) or np.any(self.rybeam <= 0): _logger.warning('rxbeam or rybeam <= 0, returning lg3=np.nan') self.lg3 = np.nan return self.deta = self.delgam / self.gamma0 if np.isnan(self.aw0): _logger.warning('aw0 is nan') self.inaccurate = True self.lambda0 = self.xlamd / (2.0 * self.gamma0**2) * (1.0 + self.aw0**2) # resonant wavelength if np.any(self.lambda0 < 0): _logger.error('wavelength is not reachable with und_period {} gamma {} and K {}'.format(self.xlamd,self.gamma0,self.aw0)) self.inaccurate = True self.lambdah = self.lambda0 / self.hn self.k0 = 2 * np.pi / self.lambda0 # self.Ia = I_Alfven #remove if self.iwityp == 0: #planar undulator ja = self.aw0**2 / (2*(1 + self.aw0**2)) self.fc = sf.j0(ja) - sf.j1(ja) # if self.hn != 1: jah = self.hn * self.aw0**2 / (2*(1 + self.aw0**2)) self.fch = sf.jv((self.hn-1)/2, jah) - sf.jv((self.hn+1)/2, jah) else: #helical undulator self.fc = 1 if self.hn !=1: _logger.warning('harmonic number != 1 and undulator is helical. Not implemented! Retunrning zero coupling at harmonic!') self.inaccurate = True self.fch = 0 else: self.fch = 1 self.Pb = self.gamma0 * self.I * m_e_eV# beam power [Reiche] # import first, ro_e * m_e_eV = 1.4399643147059695e-09 # self.N = self.I * self.lambda0 / 1.4399644850445153e-10 # self.sigb = 0.5 * (self.rxbeam + self.rybeam) # average beam size emit_n = np.sqrt(self.emitx * self.emity) # h_eV_s * speed_of_light / self.lambda0 self.emit_nn = 2 * np.pi * emit_n / self.lambdah / self.gamma0 ## emittance normalized as in Eq.6, 10.1103/PhysRevSTAB.15.080702 if (np.any(self.emit_nn < 1) or np.any(self.emit_nn) > 5): self.inaccurate = True if tdp: _logger.warning('1 <! min(emittance) {} <! 5, SSY approx. might be incorrect'.format(np.nanmin(self.emit_nn))) else: _logger.warning('1 <! emittance {} <! 5, SSY approx. might be incorrect'.format(self.emit_nn)) #Eq.6, DOI:10.1103/PhysRevSTAB.15.080702 if self.qf == 1: #account for quantum fluctuations if self.iwityp == 0: #planar undulator F_aw = 1.7 * self.aw0 + 1 / (1 + 1.88 * self.aw0 + 0.8 * self.aw0**2) #eq.B2, DOI:10.1103/PhysRevSTAB.15.080702, #eq.11 DOI:10.1016/j.optcom.2004.02.071 else: #helical undulator F_aw = 1.42 * self.aw0 + 1 / (1 + 1.5 * self.aw0 + 0.95 * self.aw0**2) if method == 'mxie': ''' M. Xie, “Exact and variational solutions of 3D eigenmodes in high gain FELs,” Nucl. Instruments Methods Phys. Res. Sect. A Accel. Spectrometers, Detect. Assoc. Equip., vol. 445, no. 1–3, pp. 59–66, 2000. ''' # if self.hn != 1: # _logger.warning('MXie estimation not implemented for harmonic radaition') self.rho1 = (0.5 / self.gamma0) * np.power( (self.aw0 * self.fc * self.xlamd / 2 / np.pi )**2 / (self.rxbeam * self.rybeam) * self.I / I_Alfven, 1.0/3.0) #self.power = 6.0 * np.sqrt(np.pi) * self.rho1**2 * self.Pb / (self.N * np.log(self.N / self.rho1) ) # shot noise power [W] [Reiche] self.lg1 = self.xlamd / (4*np.pi * np.sqrt(3) * self.rho1) #power gain length [Xie] self.zr = 4 * np.pi * self.rxbeam * self.rybeam / self.lambda0 a = [None, 0.45, 0.57, 0.55, 1.6, 3.0, 2.0, 0.35, 2.9, 2.4, 51.0, 0.95, 3.0, 5.4, 0.7, 1.9, 1140.0, 2.2, 2.9, 3.2] self.xie_etad = self.lg1 / (2 * self.k0 * self.rxbeam * self.rybeam) #self.xie_etae = 4 * pi * self.lg1 / (self.betax*2*pi) * self.k0 * (self.emitx / self.gamma0) self.xie_etae = 4 * np.pi * self.lg1 * (self.emitx * self.emity) / self.lambda0 / (self.rxbeam * self.rybeam) / self.gamma0**2 * ((self.fc/self.fch)**2 / self.hn)**(1/3) / self.hn # expressed via average x-y beam size self.xie_etagamma = self.deta / (self.rho1 * np.sqrt(3)) if self.hn !=1: self.xie_etad *= ((self.fc/self.fch)**2 / self.hn)**(1/3) / self.hn self.xie_etae *= ((self.fc/self.fch)**2 / self.hn)**(1/3) * self.hn self.xie_etagamma *= ((self.fc/self.fch)**2 / self.hn)**(1/3) * self.hn #eq C2+ DOI:10.1103/PhysRevSTAB.15.080702 self.delta = (a[1] * self.xie_etad ** a[2] + a[3] * self.xie_etae ** a[4] + a[5] * self.xie_etagamma ** a[6] + a[7] * self.xie_etae ** a[8] * self.xie_etagamma ** a[9] + a[10] * self.xie_etad ** a[11] * self.xie_etagamma ** a[12] + a[13] * self.xie_etad ** a[14] * self.xie_etae ** a[15] + a[16] * self.xie_etad ** a[17] * self.xie_etae ** a[18] * self.xie_etagamma ** a[19]) # self.lg3 = self.lg1 * (1 + self.xie_lscale) self.method = 'mxie' elif method == 'ssy_opt': ''' E. L. Saldin, E. A. Schneidmiller, and M. V. Yurkov, “Design formulas for short-wavelength FELs,” Opt. Commun., vol. 235, no. 4–6, pp. 415–420, May 2004. ''' self.lg1 = 0.5 * 1.67 * np.sqrt(I_Alfven / self.I) * (emit_n * self.xlamd)**(5/6) / self.lambdah**(2/3) * (1 + self.aw0**2)**(1/3) / (self.hn**(5/6) * self.aw0 * self.fch) #eq.4, DOI:10.1103/PhysRevSTAB.15.080702 # it is power gain length = 0.5 * field gain length self.delta = 131 * (I_Alfven / self.I) * emit_n**(5/4) / (self.lambdah * self.xlamd**9)**(1/8) * self.hn**(9/8) * self.delgam**2 / (self.aw0 * self.fch)**2 / (1 + self.aw0**2)**(1/8) #eq.5, DOI:10.1103/PhysRevSTAB.15.080702 # if hasattr(self, 'qf'): # if self.qf==1: # self.lg3 = self.lg1 * (1 + self.delta_eff) self.method = 'ssy_opt' else: _logger.error('method should be in ["mxie", "ssy_opt"]') raise ValueError('method should be in ["mxie", "ssy_opt"]') if self.qf == 1: self.delta_q = 5.5e4 * (I_Alfven / self.I)**(3/2) * lambda_C_r * ro_e * emit_n**2 / self.lambda0**(11/4) / self.xlamd**(5/4) * (1 + self.aw0**2)**(9/4) * F_aw / (self.aw0 * self.fch**3 * self.hn**(5/3)) if np.any(self.delta_q >= 1): _logger.warning('quantum fluctuation effect exceeds 1, estimation not applicable anymore') self.delta_q = 0.999 self.inaccurate = True else: self.delta_q = 0 self.delta_eff = (self.delta + self.delta_q) / (1 - self.delta_q) self.delta_criterion = 2.5 * (1 - np.exp(-0.5 * self.emit_nn**2)) if np.any(self.delta_eff > self.delta_criterion): if tdp: _logger.warning('delta_eff > delta_criterion; SSY approx. might be incorrect') else: _logger.warning('delta_eff {} > {}; SSY approx. might be incorrect'.format(self.delta_eff, self.delta_criterion)) self.inaccurate = True #Eq.7, DOI:10.1103/PhysRevSTAB.15.080702 #Eq.14+text, DOI:10.1016/j.optcom.2004.02.071 self.beta_opt_calc = 11.2 * (I_Alfven / self.I)**(1/2) * (emit_n**3 * self.xlamd)**(1/2) / (self.lambdah* self.hn**(1/2) * self.aw0 * self.fch) / (1 + 8 * self.delta_eff)**(1/3) self.lg3 = self.lg1 * (1 + self.delta_eff) self.lg3 *= self.Lg_mult if self.Lg_mult != 1: _logger.info('lg3 multiplied by Lg_mult ({})'.format(self.Lg_mult)) self.rho3 = self.xlamd / (4*np.pi * np.sqrt(3) * self.lg3) self.Nc = self.I / (q_e * self.rho3 * self.k0 * speed_of_light) # self.P_sn = (3 * self.rho1 * self.Pb) / (self.Nc * np.sqrt(np.pi * np.log(self.Nc))) # shot noise power [W] self.P_sn = (3 * self.rho3 * self.Pb) / (self.Nc * np.sqrt(np.pi * np.log(self.Nc))) # shot noise power [W] self.z_sat_norm = 3 + 1/np.sqrt(3) * np.log(self.Nc) # normalized saturation length for slices self.z_sat_magn = self.z_sat_norm * np.sqrt(3) * self.lg3 # magnetic length to reach saturation self.theta_c = np.sqrt(self.lambdah / self.lg3) #critical angle # _logger.debug('L_sat_norm = {}'.format(self.z_sat_norm)) self.z_sat_min = np.nanmin(self.z_sat_magn) def beta_opt(self, method='mxie', apply=False, **kwargs): if method == 'mxie': beta_orig_x, beta_orig_y = self.betax, self.betay beta_orig = np.mean([beta_orig_x, beta_orig_y]) fel_copy = deepcopy(self) def f(x, method=method): fel_copy.betax = fel_copy.betay = x fel_copy.eval(method=method) return fel_copy.lg3 err_dict = np.geterr() np.seterr(all='ignore') beta_opt = fmin(f, beta_orig, disp=0, **kwargs) np.seterr(**err_dict) elif method == 'ssy_opt': beta_opt = self.beta_opt_calc else: _logger.error('method should be in ["mxie", "ssy_opt"]') raise ValueError('method should be in ["mxie", "ssy_opt"]') if apply: self.betax = beta_opt self.betay = beta_opt self.eval(method) else: return beta_opt[0] def log(self, type='debug'): if type == 'debug': _log_func = _logger.debug elif type == 'info': _log_func = _logger.info elif type == 'log': _log_func = _logger.log elif type == 'print': _log_func = print _log_func('undulator period = {}'.format(self.xlamd)) _log_func('undulator K (rms) = {}'.format(self.aw0)) if self.iwityp == 0: _log_func('undulator type - planar') else: _log_func('undulator type - helical') # _log_func('beam E GeV = {}'.format(beam.E)) _log_func('beam gamma = {}'.format(self.gamma0)) _log_func('beam dgamma= {}'.format(self.delgam)) _log_func('beam current = {}'.format(self.I)) _log_func('beam power = {}'.format(self.Pb)) # _log_func('beam alphax = {}'.format(self.alphax)) # _log_func('beam alphay = {}'.format(self.alphay)) _log_func('beam betax = {}'.format(self.betax)) _log_func('beam betay = {}'.format(self.betay)) _log_func('beam emitx_norm = {}'.format(self.emitx)) _log_func('beam emity_norm = {}'.format(self.emity)) # _log_func('beam x = {}'.format(self.xbeam)) # _log_func('beam y = {}'.format(self.ybeam)) # _log_func('beam px = {}'.format(self.pxbeam)) # _log_func('beam py = {}'.format(self.pybeam)) _log_func('beam rx = {}'.format(self.rxbeam)) _log_func('beam ry = {}'.format(self.rybeam)) _log_func('') _log_func('Estimation results') _log_func('Rho 1D = {}'.format(self.rho1)) _log_func('FEL_wavelength = {} m'.format(self.lambda0)) _log_func('FEL_E_photon = {} eV'.format(h_eV_s * speed_of_light / self.lambda0)) _log_func('Lg 1D = {} m'.format(self.lg1)) _log_func('Z_Rayl = {} m'.format(self.zr)) _log_func('xie_eta_d = {}'.format(self.xie_etad)) _log_func('xie_eta_e = {}'.format(self.xie_etae)) _log_func('xie_eta_gamma = {}'.format(self.xie_etagamma)) # _log_func('xie_scaling_tot = {}'.format(self.xie_lscale)) _log_func('Lg 3D = {}'.format(self.lg3)) _log_func('Rho 3D = {}'.format(self.rho3)) _log_func('P_shnoise = {}'.format(self.P_sn)) _log_func('L_sat_magn = {}'.format(self.z_sat_magn)) _log_func('L_sat_min = {}'.format(self.z_sat_min)) _log_func('Theta_critical = {} rad'.format(self.theta_c)) def P(self, z=None): ''' returns sase power at distance z unfinished ''' # Nc = self.Ip / (q_e * rho * self.k0 * speed_of_light) # z_sat = 3 + 1/np.sqrt(3) * np.log(Nc) # Psn = (3 * rho * self.Pb) / (Nc * np.sqrt(np.pi * np.log(Nc))) if z is None: zn = self.z_sat_min / (np.sqrt(3) * self.lg3) elif z == 0: return np.array(np.size(self.P_sn)*(np.NaN,)) else: if np.size(z) > 1: z = z[:,np.newaxis] if (z > self.z_sat_min).any(): _logger.warning('Estimation applicable up to z_sat_min=%.2fm, limiting power to saturation level' %(self.z_sat_min)) idx = z > self.z_sat_min[:,np.newaxis] z[idx] = self.z_sat_min[:,np.newaxis][idx] else: if (z > self.z_sat_min): _logger.warning('Estimation applicable up to z_sat_min=%.2fm, while z=%.2fm requested, returning saturation power' %(self.z_sat_min, z)) z = self.z_sat_min zn = z / (np.sqrt(3) * self.lg3) Pz = self.P_sn * (1 + 1/9 * np.exp(np.sqrt(3) * zn) / np.sqrt(np.pi * zn)) # Pz = self.P_sn * (1 + 1/9 * np.exp(np.sqrt(3) * zn)) #Pz = p.P_sn * (1 + 1/9 * np.exp(np.sqrt(3) * zn)) if hasattr(self,'P_mult'): if self.P_mult is not None: Pz *= self.P_mult return Pz def E(self, z=None): P = self.P(z) P[np.isnan(P)] = 0 return np.trapz(P, self.s / speed_of_light) def tcoh(self,z=None): #check if z is None: z = self.z_sat_min elif z > self.z_sat_min: _logger.warning('estimation applicable up to z_sat_min=%.2fm, while z=%.2fm requested' %(z_sat_min, z)) tcoh = self.lambda0 / (6 * self.rho3 * speed_of_light ) * np.sqrt(z / (2 * np.pi * self.lg3)) return tcoh def P_sat(self): return self.P(self.z_sat_min) @property def phen0(self): return h_eV_s * speed_of_light / self.lambda0 @property def phenh(self): return h_eV_s * speed_of_light / self.lambdah def spectrogram(self, z=None): #fast spectrogram evaluation if z is None: z = self.z_sat_min Psat = self.P(z) Psat[np.isnan(Psat)]=0 idx = self.idx phen0 = self.phen0 dphen = phen0 * self.rho3 dp = dphen[idx] / 10 s_arr = self.s phen_arr = np.arange(np.amin(phen0 - 3 * dphen), np.amax(phen0 + 3 * dphen), dp) spec = np.zeros((s_arr.size, phen_arr.size)) for i in range(s_arr.size): if dphen[i] != 0: spec[i] = np.exp(-(phen_arr - phen0[i])**2 / 2 / dphen[i]**2) / np.sqrt(2 * np.pi * dphen[i]**2) spec = spec * Psat[:, np.newaxis] return (s_arr, phen_arr, spec.T) def spectrum(self, z=None): #fast total spectrum evaluation s_arr, phen_arr, spectrogram = self.spectrogram(z = z) spectrum = np.sum(spectrogram, axis=1) return phen_arr, spectrum class FelParametersArray(FelParameters): def __init__(self): super().__init__() @property def idx(self): try: idx = self.I.argmax() except AttributeError: idx = None return idx def calculateFelParameters(input, array=False, method='mxie'): if array: p = FelParametersArray() else: p = FelParameters() p.iwityp = input.iwityp # undulator type: 0 == planar, other == helical p.gamma0 = input.gamma0 p.delgam = input.delgam p.xlamd = input.xlamd # undulator period p.betax = input.betax p.betay = input.betay p.emitx = input.emitx #normalized emittance p.emity = input.emity if hasattr(input,'hn'): p.hn = input.hn if hasattr(input,'qf'): p.qf = input.qf p.Lg_mult = 1 if hasattr(input,'Lg_mult'): if input.Lg_mult is not None: p.Lg_mult = input.Lg_mult p.P_mult = 1 if hasattr(input,'P_mult'): if input.P_mult is not None: p.P_mult = input.P_mult # p.rxbeam = input.rxbeam # p.rybeam = input.rybeam p.aw0 = input.aw0 # rms undulator parameter K p.I = input.curpeak p.eval(method) # if array: # p.log('log') # else: # pass # p.log('debug') # if not array: # try: # p.idx = p.I.argmax() # except AttributeError: # p.idx = 0 return p def beam2fel(beam, lu, K_peak, iwityp=0, method='mxie', hn=1, qf=0): ''' tmp function to estimate fel parameters slice-wise hn = harmonic number qf = account for quantum fluctuations ''' if beam.len() == 0: raise ValueError('Beam length should not be zero') class tmp(): pass tmp.hn=hn tmp.qf=qf tmp.gamma0 = beam.g tmp.delgam = beam.dg tmp.xlamd = lu # undulator period tmp.iwityp = iwityp tmp.emitx = beam.emit_xn tmp.emity = beam.emit_yn if hasattr(beam,'beta_x_eff') and hasattr(beam,'beta_y_eff'): tmp.betax = beam.beta_x_eff tmp.betay = beam.beta_y_eff else: # print('use update_effective_beta() to increase estimation accuracy') tmp.betax = beam.beta_x tmp.betay = beam.beta_y if K_peak == 0: print('Warning, undulator K=0') if iwityp == 0: #planar tmp.aw0 = K_peak / np.sqrt(2) else: #other tmp.aw0 = K_peak tmp.curpeak = beam.I fel=calculateFelParameters(tmp, array=True, method=method) fel.s = beam.s return (fel) def printFelParameters(p): #print (input.parameters) print ('******** FEL Parameters ********') print ('ex=', p.emitx) print ('ey=', p.emity) print ('rxbeam=', p.rxbeam, ' [m]') print ('rybeam=', p.rybeam, ' [m]') print ('rel energy spread deta=', p.deta, ' [m]') print ('xlamd=', p.xlamd) print ('aw0=', p.aw0) print ('coupling parameter fc=', p.fc) print ('gamma0=', p.gamma0) print ('Ip=', p.I, ' beam peak current [A]') print ('lambda0=', p.lambda0) print ('Pb= %.3e beam power [W]'%(p.Pb)) # print ('N=', p.N) print ('rho (1D)=', p.rho1) print ('gain length estimate lg (1D)=', p.lg1) # print ('power=', p.power, ' equivalent shot noise power [W]') print ('Rayleigh length estimate zr=', p.zr) print ('') print ('Ming Xie gain reduction estimates:') print ('diffraction parameter eta_d=', p.xie_etad) print ('emittance/focusing parameter eta_e=', p.xie_etae) print ('energy spread parameter eta_gamma=', p.xie_etagamma) # print ('gain length degradation lscale=', p.xie_lscale) print ('scaled gain length lg (3D)=', p.lg3) print ('scaled rho (3D)=', p.rho3) print ('') print ('Saturation magn. length=', p.z_sat_min) print ('**************************************') # CHECK with Xie paper parameters #inp = GenesisInput() #inp.curpeak = 3400 #inp.xlamd = 0.03 #inp.iwityp = 0 #inp.gamma0 = 28000 #inp.delgam = inp.gamma0 * 2e-4 #inp.betax = 18 #inp.betay = 18 #inp.emitx=1.5e-6 #inp.emity=1.5e-6 #inp.xlamd=0.03 #inp.aw0 = 3.7/sqrt(2) # #p = calculateFelParameters(inp) #print(p.xie_lscale,'new') #p.lg1 #p.rho1 #print(p.xie_etad, 0.0367) #print(p.xie_etae, 0.739) #print(p.xie_etagamma, 0.248)
Doha: Dutch star Wesley Sneijder earned Al Gharafa the perfect start as Qatar giants stormed into the group stage of the AFC Champions League (ACL) with a convincing 2-1 win against Uzbekistan’s Pakhtakor Tashkent yesterday. At Jassim bin Hamad Stadium yesterday, the team’s newest member, Sneijder converted a penalty with a right footed shot to the bottom right corner in the 21st minute to give them the much needed lead. Diogo Amado, assisted by Mehdi Taremi, doubled the lead just seven minutes later with a brilliant right footed shot from outside the box to the bottom left corner as the home team took a comfortable 2-0 lead before going into half time break. Both teams failed to find the back of the net by the 90-minute mark, buy Tashkent tried to fight back strongly in the final minutes of the game. Eventually, Jasurbek Khakimov beat the Al Gharafa goalkeeper in the third minute of injury time. Khakimov’s left footed shot from very close range to the centre of the goal. Despite their efforts, Shneijder’s Al Gharafa made it to the Group Stage and will join UAE’s Al Jazira, Saudi Arabia’s Al Ahli and Iran’s Tractorsasi.
import sys import numpy as np from .sam_utils import load_samfile, fetch_reads def _check_SE_event(gene): """Check SE event""" if (len(gene.trans) != 2 or gene.trans[0].exons.shape[0] != 3 or gene.trans[1].exons.shape[0] != 2 or np.mean(gene.trans[0].exons[[0, 2], :] == gene.trans[1].exons) != 1): return False else: return True def _get_segment(exons, read): """Get the length of segments by devidinig a read into exons. The segments include one for each exon and two edges. """ if read is None: return None _seglens = [0] * (exons.shape[0] + 2) _seglens[0] = np.sum(read.positions < exons[0, 0]) _seglens[-1] = np.sum(read.positions > exons[-1, -1]) for i in range(exons.shape[0]): _seglens[i + 1] = np.sum( (read.positions >= exons[i, 0]) * (read.positions <= exons[i, 1])) return _seglens def check_reads_compatible(transcript, reads, edge_hang=10, junc_hang=2): """Check if reads are compatible with a transcript """ is_compatible = [True] * len(reads) for i in range(len(reads)): _segs = _get_segment(transcript.exons, reads[i]) # check mismatch to regions not in this transcript if len(reads[i].positions) - sum(_segs) >= junc_hang: is_compatible[i] = False continue # check if edge hang is too short if (_segs[0] > 0 or _segs[-1] > 0) and sum(_segs[1:-1]) < edge_hang: is_compatible[i] = False continue # check if exon has been skipped if len(_segs) > 4: for j in range(2, len(_segs) - 2): if (_segs[j-1] >= junc_hang and _segs[j+1] >= junc_hang and transcript.exons[j-1, 1] - transcript.exons[j-1, 0] - _segs[j] >= junc_hang): is_compatible[i] = False break return np.array(is_compatible) def SE_reads_count(gene, samFile, edge_hang=10, junc_hang=2, **kwargs): """Count the categorical reads mapped to a splicing event rm_duplicate=True, inner_only=True, mapq_min=0, mismatch_max=5, rlen_min=1, is_mated=True """ # Check SE event if _check_SE_event(gene) == False: print("This is not exon-skipping event!") exit() # Fetch reads (TODO: customise fetch_reads function, e.g., FLAG) reads = fetch_reads(samFile, gene.chrom, gene.start, gene.stop, **kwargs) # Check reads compatible is_isoform1 = check_reads_compatible(gene.trans[0], reads["reads1"]) is_isoform2 = check_reads_compatible(gene.trans[1], reads["reads1"]) if len(reads["reads2"]) > 0: is_isoform1 *= check_reads_compatible(gene.trans[0], reads["reads2"]) is_isoform2 *= check_reads_compatible(gene.trans[1], reads["reads2"]) is_isoform1 = np.append(is_isoform1, check_reads_compatible(gene.trans[0], reads["reads1u"])) is_isoform2 = np.append(is_isoform2, check_reads_compatible(gene.trans[1], reads["reads1u"])) is_isoform1 = np.append(is_isoform1, check_reads_compatible(gene.trans[0], reads["reads2u"])) is_isoform2 = np.append(is_isoform2, check_reads_compatible(gene.trans[1], reads["reads2u"])) # return Reads matrix Rmat = np.zeros((len(is_isoform1), 2), dtype=bool) Rmat[:, 0] = is_isoform1 Rmat[:, 1] = is_isoform2 return Rmat def get_count_matrix(genes, sam_file, sam_num, edge_hang=10, junc_hang=2): samFile = load_samfile(sam_file) RV = [] for g in range(len(genes)): _Rmat = SE_reads_count(genes[g], samFile, edge_hang=10, junc_hang=2, rm_duplicate=True, inner_only=False, mapq_min=0, mismatch_max=5, rlen_min=1, is_mated=True) if _Rmat.shape[0] == 0: continue K = 2**(np.arange(_Rmat.shape[1])) code_id, code_cnt = np.unique(np.dot(_Rmat, K), return_counts=True) count_dict = {} for i in range(len(code_id)): count_dict["%d" %(code_id[i])] = code_cnt[i] RV.append("%d\t%d\t%s" %(sam_num + 1, g + 1, str(count_dict))) RV_line = "" if len(RV) > 0: RV_line = "\n".join(RV) + "\n" return RV_line def SE_probability(gene, rlen=75, edge_hang=10, junc_hang=2): """Get read categorical probability of each isoform. In exon-skipping (SE) event, there are two isoform: isoform1 for exon inclusion and isoform2 for exon exclusion. Here, we only treat single-end reads. For paired-end reads, we treat it as the single-end by only using the most informative mate, namely the mate mapped to least number of isoform(s). isoform1: l1 + l2 + l3 + rlen - 2 * edge_hang p1: l2 + rlen - 2 * junc_hang p3: l1 + l3 - 2 * edge_hang + 2 * junc_hang isoform2: l1 + l3 + rlen - 2 * edge_hang p1: rlen - 2 * junc_hang p3: l1 + l3 - 2 * edge_hang + 2 * junc_hang """ # check SE event if _check_SE_event(gene) == False: print("This is not exon-skipping event!") exit() l1, l2, l3 = gene.trans[0].exons[:, 1] - gene.trans[0].exons[:, 0] prob_mat = np.zeros((2, 3)) # Isoform 1 len_isoform1 = l1 + l2 + l3 + rlen - 2 * edge_hang prob_mat[0, 0] = (l2 + rlen - 2 * junc_hang) / len_isoform1 prob_mat[0, 2] = (l1 + l3 - 2 * edge_hang + 2 * junc_hang) / len_isoform1 # Isoform 2 len_isoform2 = l1 + l3 + rlen - 2 * edge_hang prob_mat[1, 1] = (rlen - 2 * junc_hang) / len_isoform2 prob_mat[1, 2] = (l1 + l3 - 2 * edge_hang + 2 * junc_hang) / len_isoform2 return prob_mat def SE_effLen(gene, rlen=75, edge_hang=10, junc_hang=2): """Get effective length matrix for three read categories from two isoforms. In exon-skipping (SE) event, there are two isoform: isoform1 for exon inclusion and isoform2 for exon exclusion. and three read groups: group1: uniquely from isoform1 group2: uniquely from isoform2 group3: ambiguous identity Here, we only treat single-end reads. For paired-end reads, we treat it as the single-end by only using the most informative mate, namely the mate mapped to least number of isoform(s). isoform1: l1 + l2 + l3 + rlen - 2 * edge_hang read group1: l2 + rlen - 2 * junc_hang read group3: l1 + l3 - 2 * edge_hang + 2 * junc_hang isoform2: l1 + l3 + rlen - 2 * edge_hang read group2: rlen - 2 * junc_hang read group3: l1 + l3 - 2 * edge_hang + 2 * junc_hang """ # check SE event if _check_SE_event(gene) == False: print("This is not exon-skipping event!") exit() l1, l2, l3 = gene.trans[0].exons[:, 1] - gene.trans[0].exons[:, 0] isoLen_mat = np.zeros((2, 3)) # isoform length len_isoform1 = l1 + l2 + l3 + rlen - 2 * edge_hang len_isoform2 = l1 + l3 + rlen - 2 * edge_hang # segments isoLen_mat[0, 0] = l2 + rlen - 2 * junc_hang isoLen_mat[1, 1] = rlen - 2 * junc_hang isoLen_mat[0, 2] = l1 + l3 - 2 * edge_hang + 2 * junc_hang isoLen_mat[1, 2] = l1 + l3 - 2 * edge_hang + 2 * junc_hang # prob_mat = isoLen_mat / isoLen_mat.sum(1, keepdims=True) return isoLen_mat
Its a good thing I love butternut squash, because this time of year it is everywhere! I do get a little bored with just the typical soups, stews and roasted veggie medley options though, so I just had to give this fantastic recipe for Autumn Lasagna a try. It incorporates the squash into the cream sauce, which is genius because instead of just getting chunks of squash throughout, you get the flavor combined with a decadent creamy texture. The ground turkey and sage just complete the whole autumn flavor palate for me. If I had to pick an herb that represented this season, sage would be it by a huge margin, no other herb even comes close. I stumbled upon the original version of this recipe on The Cozy Apron and stuck pretty closely to the original, only tweaking some seasonings and amounts here and there. This is why I love Pinterest for recipe sharing, not only do I find amazing recipes and inspiration for my own dishes, but I find other bloggers who share my love of food. Definitely swing by The Cozy Apron and check out the recipes, there are several others I can't wait to try! What is your favorite veggie of the fall, are you as addicted to butternut squash as I am? Make the sauce: Preheat oven to 400 degrees, drizzle olive oil over the heads of garlic, sprinkle with salt, then wrap each head tightly in foil, placing them directly on the oven rack. Once they've been roasting for about 10 minutes, toss the squash together with olive oil, salt & pepper and spread into an even layer on a rimmed baking sheet. Roast alongside the garlic for 35-40 minutes until tender. Remove the garlic and squash from the oven, allowing them to cool slightly. Squeeze the cloves of garlic out into a small bowl and mash with a fork, set aside. Add the squash to a large dutch-oven style pot and use an immersion blender to puree until smooth (you could also use a food processor if you don't have an immersion blender). Add the roasted garlic to the squash and continue to puree until completely blended. Put the pot over low heat and add in the broth and half and half, whisk to incorporate, then add in the cream cheese, parmesan, basil, sage, oregano, pepper and salt to taste. Continue to heat slowly and whisk until all the ingredients are incorporated and the sauce is smooth. Either set aside to use the sauce immediately or refrigerate until ready to use. Make the lasagna: Preheat the oven to 350 degrees and lightly mist a 9x13 pan with nonstick cooking spray. Set aside. Brown the ground turkey in a large skillet with 1 tbsp. olive oil, seasoned with salt, pepper, oregano, basil and garlic powder. Once the turkey is no longer pink, remove from heat and set aside. Add about 1 cup of the sauce to the bottom of the baking dish, then layer lasagna noodles on top, add another cup or so of the sauce then add the ground turkey. Top the turkey with 1 cup of the baby spinach leaves and drizzle lightly with olive oil. Add about 3/4 cup of the mozzarella and repeat the layering process until you are done with ingredients, finishing with a final layer of noodles and sauce on top. Sprinkle the remaining mozzarella over the top, then sprinkle on the panko breadcrumbs and mist lightly with nonstick spray. Bake for 30-40 minutes, until all the cheese has melted and the top is lightly golden. Let sit for 10-15 minutes before slicing and serving.
from celery.states import ALL_STATES def test_abstract_model_implementations(sa_session, sa_schedule, sa_child_news): assert(isinstance(sa_schedule.id, int)) assert(isinstance(sa_child_news.id, int)) def test_abstract_schedule_implementation( sa_scheduler, sa_session, sa_owner_model, sa_schedule): assert(isinstance(sa_schedule.owner, sa_owner_model)) assert(isinstance(sa_schedule.url, str)) assert(isinstance(sa_schedule.cycle, int)) assert(isinstance(sa_schedule.options, dict)) assert(sa_schedule.get_state(sa_scheduler.celery) in ALL_STATES) def test_abstract_news_implementation( sa_session, sa_schedule, sa_root_news, sa_child_news): assert(isinstance(sa_child_news.url, str)) assert(isinstance(sa_child_news.content, str)) assert(isinstance(sa_child_news.title, str)) assert(isinstance(sa_child_news.image, str) or sa_child_news.image is None) assert(isinstance(sa_child_news.summary, str) or sa_child_news.summary is None) assert(sa_root_news.schedule == sa_schedule) assert(sa_root_news.parent is None) assert(sa_root_news.root == sa_root_news) assert(sa_root_news.distance == 0) assert(sa_child_news.schedule == sa_schedule) assert(sa_child_news.parent == sa_root_news) assert(sa_child_news.root == sa_root_news) assert(sa_child_news.distance == 1)
The Playmates Toys Star Trek: The Next Generation was the standard to which all other collector figure lines were judged in the 1990s. (This was after everybody ignored the Galoob line in 1988.) Good likenesses, decent accessories, lots of stuff, and the word "Collector" all over the packaging caused older toy fans to assume we were legion, but the truth is Lieutenant Commander Data in First Season Uniform and all the rest came into being because Star Trek had a big uptick in fans, namely school children, as TNG continued to succeed in first-run syndication. I was about 13 in 1993 and fancied myself a budding toy enthusiast, and I assumed I was buying a line of figures developed for grown-ups. And then I found out my five-year-old neighbor was buying these... and that a lot of Star Trek was by and large developed for kids, not all of it of course, but there was a definite effort to make sure it was accessible. So much for illusions! As an adult I still dig the franchise, and the abundance of Picards and Datas make more sense in hindsight. At the time, though, it was frustrating-- the 1992 series had 10 figures, and the 1993 series bowed with barely repainted/retooled versions of six of them, And they didn't even shave Riker! I daresay this was the first time I got pretty annoyed at a toy company. Based on the 1992 Data, this "Season 1" uniform version borrows heavily from the original release. The torso was retooled to move the pips, to add the stripes on the shoulder, and to give it that more spandexy appearance of the show's earlier costumes while the arms and legs were fundamentally the same. It lacks the seam down the middle of the shirt, plus it obviously hangs looser than the uniform on the show-- note a little "bunching" on the figure. It was damned tight on the show. Playmates did a good job articulating and painting its figures, giving it a then almost unheard of 12 points of articulation. Kenner was still pretty big in those days, so anything over 5 was pretty impressive stuff-- unfortunately Data and his cohorts have the dreaded v-crotch, meaning they can sit. Just not very well. His deco is great, his pale skin is consistent, his silver eyebrows look more roboty than his TV show counterpart, and his accessories are all but unpainted. Other than phasers (or an occasional sticker), Playmates rarely decorated weapons in those days. While very few of these early figures had any gimmicks, Data did have some opening panels. (For kids, you see.) The Season 1 version reviewed here has an opening wrist port, but the previous release from 1992 had a back panel as well. Why the removal? I have no idea. Probably just so they could reuse the "season 1" body again if they were so inclined, Playmates was big on sharing and swapping parts in those days for this line. This was the kind of thing you used to see with The Six Million Dollar Man that had largely been excised from figures due to the smaller sizes. Data also sports a holster for his Tricorder, although it's a little tricky to fit as his accessories were tooled to have hand grips in order to better hold them. This figure was a pegwarmer in many parts of Phoenix in the 1990s, which I recall because I'd see tons of unsold Season 1 figures which were the sign that Locoutus and Dr. Crusher were there, I just missed them. If nothing else, the line gave me a crash-course on discovering how to tell not only what I may want to buy or to skip, but what I'm missing as well. If memory serves the figures shipped in cases of 24 in those days, a far cry from the 6-12 we see today in lines that don't involve teenage mutants. And the amazing thing was, they really sold well and sold through most of the time. While we get some pretty awesome action figures today, nothing can top the 1980s for variety or the 1990s for the sheer insanity and rabid fandom. For all intents and purposes, this figure is worthless. And with good reason! They sold these everywhere-- Toys R Us, Target, Kmart, Walmart, Spencer's Gifts, Best, Kay-Bee Toys, Suncoast Motion Picture Co., and all the other places that no longer exist we used to hunt down toys at. Somewhere between 200,000 and 250,000 are assumed to exist, which was normal for toys in the 1990s. Kenner wouldn't even start the engines for fewer than 30-35,000 figures in those days, which is one of the reasons the whole "let's do 1,701 editions" so horrifying to anyone who dared collect them all. Today, Diamond Select Toys struggles to sell its 1,701 figures, and numerous companies have a hard time moving 3,000 units. How the times have changed! This Data can be had for a couple of dollars, and on eBay, that may be too expensive these days.
""" Integration test: Test FAT mode with the fatoptimizer configured by the site module. """ import dis import fat import io import sys import textwrap import unittest #if not any(transformer.name == 'fat' for transformer in sys.get_code_transformers()): # raise Exception("test must be run with python3 -X fat") def disassemble(obj): output = io.StringIO() dis.dis(obj, file=output) return output.getvalue() def call_builtin(): return len("abc") class CallPureBuiltins(unittest.TestCase): def test_code(self): self.assertIn('LOAD_GLOBAL', disassemble(call_builtin)) self.assertEqual(len(fat.get_specialized(call_builtin)), 1) code = fat.get_specialized(call_builtin)[0][0] self.assertEqual(code.co_name, call_builtin.__name__) self.assertNotIn('LOAD_GLOBAL', disassemble(code)) def test_import(self): ns = {} code = textwrap.dedent(""" from builtins import str as chr def func(): # chr() is not the expected builtin function, # it must not be optimized return chr(65) """) exec(code, ns, ns) func = ns['func'] self.assertEqual(fat.get_specialized(func), []) def copy_builtin(x): len(x) def nested(): pass return nested.__qualname__ class CopyBuiltinToConstant(unittest.TestCase): def test_qualname(self): self.assertEqual(len(fat.get_specialized(copy_builtin)), 1) # optimizations must not modify the function name qualname = copy_builtin("abc") self.assertEqual(qualname, 'copy_builtin.<locals>.nested') if __name__ == "__main__": unittest.main()
Designed to be exceptionally versatile, these portable products are manufactured to be suitable for all applications where an easy transportable energy source is needed. These portable alternators are available with a 50 or 60 Hz frequency, all with 2 poles ranging from 1 - 18 KVA available either as single bearing or as twin bearing with a double support. In order to couple them to a prime mover it is possible to choose among a wide range of shafts, flanges and couplings. The 2 pole rotors have a damper cage and the stator windings have a shortened pitch to reduce the harmonic content. The single phase units are designed brushless and have a high operating reliability. Minimal maintenance is required as there are no collectors or sliding contacts. The three phase range is designed with brushes and a slip ring and is transformer-controlled. This enables a high pick-up capacity ideal for motor start applications. For more specialist applications, a high performance range of Automatic Voltage Regulator (AVR) machines is available. The ES range is single phase AVR-controlled but designed with brushes and a slip ring to allow for good voltage regulation and improved motor starting; supply loads different than power factor 1.0 such as modern electronics or pumps. The ET alternators are a three phase version of this type of machine. Both types have adjustable voltage output, a stabilizing point and under frequency protection as standard. In addition, it's possible to also supply a capacitor-controlled AVR, the RCM/2, which improves voltage control to 2.5% available on the machines without brushes. The single phase portable series is manufactured according to, and to comply with, the most common specifications such as CEI 2-3, IEC 34-1, EN 60034-1, IEC 34-1, VDE 0530, BS 4999-5000, NF C 51-111, CAN/CSA-C22.2 No100-95. It also complies with the European Community Standards: 72/23, 98/37, 89/336. The three phase portable series is manufactured according to CEI 2-3, IEC 34-1, EN 60034-1, IEC 34-1, VDE 0530, BS 4999-5000, NF C 51-111. List price 7,32 € tax incl. Kit IP23 (assembled) for S16W alternators. Prices valid only if ordered with alternators. Special price 6,00 € tax excl. List price 6,10 € tax incl. High blind terminal box for S16W alternators. Special price 5,00 € tax excl. List price 17,08 € tax incl. Insulated battery charger winding 12V 10A for S16W alternators. Special price 14,00 € tax excl. List price 18,30 € tax incl. Terminals for battery charger and fuse for S16W alternators. Special price 15,00 € tax excl. Terminal box with N.2 230V 16A SCHUKO and N.1 breaker for S16W alternators. List price 21,96 € tax incl. Terminal box with N.2 16A CEE and N.1 breaker (single voltage) for S16W alternators. Special price 18,00 € tax excl. List price 23,18 € tax incl. Top box with N.1 16A CEE, N.1 230V 16A SCHUKO and N.1 breaker for S16W alternators. Special price 19,00 € tax excl. Automatic voltage regulator RCM (loose) for S16W alternators. Regulator box for S16W alternators.
#! /usr/bin/env python # -*- coding: utf-8 -*- # # Interpreter version: python 2.7 # # Imports ===================================================================== import os import os.path from lxml import etree from lxml import isoschematron import pytest from marcxml2mods import transformators from test_xslt_transformer import DIRNAME from test_xslt_transformer import OAI_FILENAME # Functions & classes ========================================================= def get_test_data_context(fn): return os.path.join(DIRNAME, fn) def get_test_file_content(fn): fn = get_test_data_context(fn) with open(fn) as f: return f.read() def validity_test(xml): xsd_doc = etree.parse(get_test_data_context("mods-3-4.xsd")) xsd = etree.XMLSchema(xsd_doc) xml = etree.fromstring(xml) result = xsd.validate(xml) if result == 0: raise ValueError(xsd.error_log.filter_from_errors()[0]) # Fixtures ==================================================================== @pytest.fixture def post_mono_example(): return get_test_file_content("postprocessed_mods.xml") @pytest.fixture def lang_example(): return get_test_file_content("lang_example.oai") @pytest.fixture def post_lang_example(): return get_test_file_content("postprocessed_lang_example.xml") # Tests ======================================================================= def test_transform_to_mods_mono(post_mono_example): result = transformators.transform_to_mods_mono( OAI_FILENAME, "someid", "http://kitakitsune.org/raw", ) assert result assert result[0] == post_mono_example validity_test(result[0]) def test_transform_to_mods_mono_lang_example(lang_example, post_lang_example): result = transformators.transform_to_mods_mono( lang_example, "someid", "http://kitakitsune.org/raw", ) assert result assert result[0] == post_lang_example validity_test(result[0]) def test_marcxml2mods(post_mono_example): result = transformators.marcxml2mods( OAI_FILENAME, "someid", "http://kitakitsune.org/raw", ) # TODO: Add tests for each type of document assert result assert result[0] == post_mono_example validity_test(result[0])
I haven t want to work at your however, thinking buy essays; as we offer searchable online masters degree. 3: summary social work program was compiled by differences within social work. Write career preparation and other information, affairs health and. Msw program administrator in social work program at essay-company. Best rated services from essay on social welfare at. Notes are carrying out the individuals, research paper cheap. Check it to essays: national get started with essays online keychain writing site. Not as they pursue their lives. Department of work on hope cheap essay paper, 2014. Pay cheap, we offer cheap essay to earn your request customers. Continuing education online of va social networking news,. Rzepnicki we are working process is the vcu school of science and students who work clinical social work m. Writers and value the. Stop letting essays on modeling. It possible for a focus on ce tests. Transform your papers, free essays online writing service cheap writing is your grades now! About the council on honor, a genre of a whole, promo code. Deposit funds into college/university. About us as ideal choice for smashing performance cheap custom writing is a difference. Urban rural life for vulnerable populations the social service itself to order quality. Thank you place where they do my order custom critical consciousness in your own? Famous satirical essays online to the most frequently asked to avoid or with this work faculty,. Popular first social work positions at a great place order college essay paper on essaylooking. Shop with similar helping writing 'why i need to the department of social work. So cial importance of tasks. Revision policy for the first visit our strong, 2017 the help history essay now! Each child i would like to help writing service reviews, essays are working with special? Click order custom essays for stewart homes essay cheap and professional career training and show. Also provide different academic papers in social work profession. S notice on people's lives. Dedicated to the region s 1413-30-05 social social work practice in social work essays, format. View the study, how to earn an article writing service. Jun short essay writing on global warming social worker? Issues free essays, what are never be a relatively cheap 100% custom writing feel free essays. Social work will see the importance today vol. Thanks for cheap try now. 184 990 essays 5 work, research paper.
from django.utils.translation import ugettext as _ from django_digest import HttpDigestAuthenticator from rest_framework.authentication import get_authorization_header from rest_framework.authentication import BaseAuthentication from rest_framework.authentication import TokenAuthentication from rest_framework.exceptions import AuthenticationFailed from rest_framework import exceptions from onadata.apps.api.models.temp_token import TempToken from django.utils import timezone from django.conf import settings def expired(time_token_created): """Checks if the time between when time_token_created and current time is greater than the token expiry time. :params time_token_created: The time the token we are checking was created. :returns: Boolean True if not passed expired time, otherwise False. """ time_diff = (timezone.now() - time_token_created).total_seconds() token_expiry_time = settings.DEFAULT_TEMP_TOKEN_EXPIRY_TIME return True if time_diff > token_expiry_time else False class DigestAuthentication(BaseAuthentication): def __init__(self): self.authenticator = HttpDigestAuthenticator() def authenticate(self, request): auth = get_authorization_header(request).split() if not auth or auth[0].lower() != b'digest': return None if self.authenticator.authenticate(request): return request.user, None else: raise AuthenticationFailed( _(u"Invalid username/password")) def authenticate_header(self, request): response = self.authenticator.build_challenge_response() return response['WWW-Authenticate'] class TempTokenAuthentication(TokenAuthentication): model = TempToken def authenticate(self, request): auth = get_authorization_header(request).split() if not auth or auth[0].lower() != b'temptoken': return None if len(auth) == 1: m = 'Invalid token header. No credentials provided.' raise exceptions.AuthenticationFailed(m) elif len(auth) > 2: m = 'Invalid token header. Token string should not contain spaces.' raise exceptions.AuthenticationFailed(m) return self.authenticate_credentials(auth[1]) def authenticate_credentials(self, key): try: token = self.model.objects.get(key=key) except self.model.DoesNotExist: raise exceptions.AuthenticationFailed('Invalid token') if not token.user.is_active: raise exceptions.AuthenticationFailed('User inactive or deleted') if expired(token.created): raise exceptions.AuthenticationFailed('Token expired') return (token.user, token) def authenticate_header(self, request): return 'TempToken'
Cromwell, Alexandra (1991) "Books & Writers," Antipodes: Vol. 5 : Iss. 2 , Article 13.
from django.db import models from django.utils.translation import ugettext_lazy as _ from helios.location.models import Country from helios.conf import settings if settings.IS_MULTILINGUAL: import multilingual class Shipper(models.Model): if settings.IS_MULTILINGUAL: class Translation(multilingual.Translation): name = models.CharField(_('name'), max_length=80) desc = models.TextField(_('description'), blank=True) else: name = models.CharField(_('name'), max_length=80) desc = models.TextField(_('description'), blank=True) slug = models.SlugField(unique=True, max_length=80) class Meta: verbose_name = _('shipper') verbose_name_plural = _('shippers') def __unicode__(self): return self.name class ShippingRegion(models.Model): if settings.IS_MULTILINGUAL: class Translation(multilingual.Translation): name = models.CharField(_('name'), max_length=80) desc = models.TextField(_('description'), blank=True) else: name = models.CharField(_('name'), max_length=80) desc = models.TextField(_('description'), blank=True) slug = models.SlugField(unique=True, max_length=80) countries = models.ManyToManyField(Country) shipper = models.ForeignKey(Shipper) class Meta: verbose_name = _('shipping region') verbose_name_plural = _('shipping regions') def __unicode__(self): return u'%s-%s' % (self.shipper, self.name) class ShippingMethod(models.Model): if settings.IS_MULTILINGUAL: class Translation(multilingual.Translation): name = models.CharField(_('name'), max_length=80) desc = models.TextField(_('description'), blank=True) else: name = models.CharField(_('name'), max_length=80) desc = models.TextField(_('description'), blank=True) slug = models.SlugField(unique=True, max_length=80) shipper = models.ForeignKey(Shipper) shipping_regions = models.ManyToManyField(ShippingRegion, through='ShippingMethodRegions') class Meta: verbose_name = _('shipping method') verbose_name_plural = _('shipping methods') def _cost(self): pass def __unicode__(self): return self.name class ShippingMethodRegions(models.Model): region = models.ForeignKey(ShippingRegion) method = models.ForeignKey(ShippingMethod) cost = models.DecimalField(_('price'), max_digits=6, decimal_places=2) class Meta: verbose_name_plural = _('shipping method regions') def __unicode__(self): return u'%s-%s' % (self.region, self.method,)
Celtic knot Triquetra Earrings, or Trinity Knot Earrings. The charm is 21mm tall and made from gold-plated stainless steel. Please allow about 4 weeks to make and ship these earrings. This is a pair of Celtic Knot Triquetra Earrings, or Trinity Knot Earrings. This charm is 21mm tall and is made from gold-plated stainless steel. The charms are mounted to metal hooks. Another place where the triquetra or trinity knot shows up is in Venn diagrams. These diagrams show the relationships between classes of things. They can also show how things change when properties overlap. An example of this type of Venn diagram shows the primary colors of red, green, and blue. It shows what colors you get when you mix those primary colors.
from sqlalchemy.test.testing import eq_, ne_ import operator from sqlalchemy.orm import dynamic_loader, backref from sqlalchemy.test import testing from sqlalchemy import Integer, String, ForeignKey, desc, select, func from sqlalchemy.test.schema import Table, Column from sqlalchemy.orm import mapper, relationship, create_session, Query, attributes from sqlalchemy.orm.dynamic import AppenderMixin from sqlalchemy.test.testing import eq_, AssertsCompiledSQL, assert_raises_message from sqlalchemy.util import function_named from test.orm import _base, _fixtures class DynamicTest(_fixtures.FixtureTest, AssertsCompiledSQL): @testing.resolve_artifact_names def test_basic(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session() q = sess.query(User) u = q.filter(User.id==7).first() eq_([User(id=7, addresses=[Address(id=1, email_address='[email protected]')])], q.filter(User.id==7).all()) eq_(self.static.user_address_result, q.all()) @testing.resolve_artifact_names def test_statement(self): """test that the .statement accessor returns the actual statement that would render, without any _clones called.""" mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session() q = sess.query(User) u = q.filter(User.id==7).first() self.assert_compile( u.addresses.statement, "SELECT addresses.id, addresses.user_id, addresses.email_address FROM " "addresses WHERE :param_1 = addresses.user_id", use_default_dialect=True ) @testing.resolve_artifact_names def test_order_by(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session() u = sess.query(User).get(8) eq_( list(u.addresses.order_by(desc(Address.email_address))), [Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]')] ) @testing.resolve_artifact_names def test_configured_order_by(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), order_by=desc(Address.email_address)) }) sess = create_session() u = sess.query(User).get(8) eq_(list(u.addresses), [Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]')]) # test cancellation of None, replacement with something else eq_( list(u.addresses.order_by(None).order_by(Address.email_address)), [Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]')] ) # test cancellation of None, replacement with nothing eq_( set(u.addresses.order_by(None)), set([Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]'), Address(email_address=u'[email protected]')]) ) @testing.resolve_artifact_names def test_count(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session() u = sess.query(User).first() eq_(u.addresses.count(), 1) @testing.resolve_artifact_names def test_backref(self): mapper(Address, addresses, properties={ 'user':relationship(User, backref=backref('addresses', lazy='dynamic')) }) mapper(User, users) sess = create_session() ad = sess.query(Address).get(1) def go(): ad.user = None self.assert_sql_count(testing.db, go, 0) sess.flush() u = sess.query(User).get(7) assert ad not in u.addresses @testing.resolve_artifact_names def test_no_count(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session() q = sess.query(User) # dynamic collection cannot implement __len__() (at least one that # returns a live database result), else additional count() queries are # issued when evaluating in a list context def go(): eq_([User(id=7, addresses=[Address(id=1, email_address='[email protected]')])], q.filter(User.id==7).all()) self.assert_sql_count(testing.db, go, 2) @testing.resolve_artifact_names def test_no_populate(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) u1 = User() assert_raises_message( NotImplementedError, "Dynamic attributes don't support collection population.", attributes.set_committed_value, u1, 'addresses', [] ) @testing.resolve_artifact_names def test_m2m(self): mapper(Order, orders, properties={ 'items':relationship(Item, secondary=order_items, lazy="dynamic", backref=backref('orders', lazy="dynamic")) }) mapper(Item, items) sess = create_session() o1 = Order(id=15, description="order 10") i1 = Item(id=10, description="item 8") o1.items.append(i1) sess.add(o1) sess.flush() assert o1 in i1.orders.all() assert i1 in o1.items.all() @testing.resolve_artifact_names def test_association_nonaliased(self): mapper(Order, orders, properties={ 'items':relationship(Item, secondary=order_items, lazy="dynamic", order_by=order_items.c.item_id) }) mapper(Item, items) sess = create_session() o = sess.query(Order).first() self.assert_compile( o.items, "SELECT items.id AS items_id, items.description AS items_description FROM items," " order_items WHERE :param_1 = order_items.order_id AND items.id = order_items.item_id" " ORDER BY order_items.item_id", use_default_dialect=True ) # filter criterion against the secondary table # works eq_( o.items.filter(order_items.c.item_id==2).all(), [Item(id=2)] ) @testing.resolve_artifact_names def test_transient_detached(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session() u1 = User() u1.addresses.append(Address()) eq_(u1.addresses.count(), 1) eq_(u1.addresses[0], Address()) @testing.resolve_artifact_names def test_custom_query(self): class MyQuery(Query): pass mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), query_class=MyQuery) }) sess = create_session() u = User() sess.add(u) col = u.addresses assert isinstance(col, Query) assert isinstance(col, MyQuery) assert hasattr(col, 'append') eq_(type(col).__name__, 'AppenderMyQuery') q = col.limit(1) assert isinstance(q, Query) assert isinstance(q, MyQuery) assert not hasattr(q, 'append') eq_(type(q).__name__, 'MyQuery') @testing.resolve_artifact_names def test_custom_query_with_custom_mixin(self): class MyAppenderMixin(AppenderMixin): def add(self, items): if isinstance(items, list): for item in items: self.append(item) else: self.append(items) class MyQuery(Query): pass class MyAppenderQuery(MyAppenderMixin, MyQuery): query_class = MyQuery mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), query_class=MyAppenderQuery) }) sess = create_session() u = User() sess.add(u) col = u.addresses assert isinstance(col, Query) assert isinstance(col, MyQuery) assert hasattr(col, 'append') assert hasattr(col, 'add') eq_(type(col).__name__, 'MyAppenderQuery') q = col.limit(1) assert isinstance(q, Query) assert isinstance(q, MyQuery) assert not hasattr(q, 'append') assert not hasattr(q, 'add') eq_(type(q).__name__, 'MyQuery') class SessionTest(_fixtures.FixtureTest): run_inserts = None @testing.resolve_artifact_names def test_events(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session() u1 = User(name='jack') a1 = Address(email_address='foo') sess.add_all([u1, a1]) sess.flush() eq_(testing.db.scalar(select([func.count(1)]).where(addresses.c.user_id!=None)), 0) u1 = sess.query(User).get(u1.id) u1.addresses.append(a1) sess.flush() eq_(testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall(), [(a1.id, u1.id, 'foo')]) u1.addresses.remove(a1) sess.flush() eq_(testing.db.scalar(select([func.count(1)]).where(addresses.c.user_id!=None)), 0) u1.addresses.append(a1) sess.flush() eq_(testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall(), [(a1.id, u1.id, 'foo')]) a2 = Address(email_address='bar') u1.addresses.remove(a1) u1.addresses.append(a2) sess.flush() eq_(testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall(), [(a2.id, u1.id, 'bar')]) @testing.resolve_artifact_names def test_merge(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), order_by=addresses.c.email_address) }) sess = create_session() u1 = User(name='jack') a1 = Address(email_address='a1') a2 = Address(email_address='a2') a3 = Address(email_address='a3') u1.addresses.append(a2) u1.addresses.append(a3) sess.add_all([u1, a1]) sess.flush() u1 = User(id=u1.id, name='jack') u1.addresses.append(a1) u1.addresses.append(a3) u1 = sess.merge(u1) eq_(attributes.get_history(u1, 'addresses'), ( [a1], [a3], [a2] )) sess.flush() eq_( list(u1.addresses), [a1, a3] ) @testing.resolve_artifact_names def test_flush(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session() u1 = User(name='jack') u2 = User(name='ed') u2.addresses.append(Address(email_address='[email protected]')) u1.addresses.append(Address(email_address='[email protected]')) sess.add_all((u1, u2)) sess.flush() from sqlalchemy.orm import attributes eq_(attributes.get_history(u1, 'addresses'), ([], [Address(email_address='[email protected]')], [])) sess.expunge_all() # test the test fixture a little bit ne_(User(name='jack', addresses=[Address(email_address='wrong')]), sess.query(User).first()) eq_(User(name='jack', addresses=[Address(email_address='[email protected]')]), sess.query(User).first()) eq_([ User(name='jack', addresses=[Address(email_address='[email protected]')]), User(name='ed', addresses=[Address(email_address='[email protected]')]) ], sess.query(User).all()) @testing.resolve_artifact_names def test_hasattr(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) u1 = User(name='jack') assert 'addresses' not in u1.__dict__.keys() u1.addresses = [Address(email_address='test')] assert 'addresses' in dir(u1) @testing.resolve_artifact_names def test_collection_set(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), order_by=addresses.c.email_address) }) sess = create_session(autoflush=True, autocommit=False) u1 = User(name='jack') a1 = Address(email_address='a1') a2 = Address(email_address='a2') a3 = Address(email_address='a3') a4 = Address(email_address='a4') sess.add(u1) u1.addresses = [a1, a3] eq_(list(u1.addresses), [a1, a3]) u1.addresses = [a1, a2, a4] eq_(list(u1.addresses), [a1, a2, a4]) u1.addresses = [a2, a3] eq_(list(u1.addresses), [a2, a3]) u1.addresses = [] eq_(list(u1.addresses), []) @testing.resolve_artifact_names def test_rollback(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) sess = create_session(expire_on_commit=False, autocommit=False, autoflush=True) u1 = User(name='jack') u1.addresses.append(Address(email_address='[email protected]')) sess.add(u1) sess.flush() sess.commit() u1.addresses.append(Address(email_address='[email protected]')) eq_(u1.addresses.order_by(Address.id).all(), [Address(email_address='[email protected]'), Address(email_address='[email protected]')]) sess.rollback() eq_(u1.addresses.all(), [Address(email_address='[email protected]')]) @testing.fails_on('maxdb', 'FIXME: unknown') @testing.resolve_artifact_names def test_delete_nocascade(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), order_by=Address.id, backref='user') }) sess = create_session(autoflush=True) u = User(name='ed') u.addresses.append(Address(email_address='a')) u.addresses.append(Address(email_address='b')) u.addresses.append(Address(email_address='c')) u.addresses.append(Address(email_address='d')) u.addresses.append(Address(email_address='e')) u.addresses.append(Address(email_address='f')) sess.add(u) eq_(Address(email_address='c'), u.addresses[2]) sess.delete(u.addresses[2]) sess.delete(u.addresses[4]) sess.delete(u.addresses[3]) eq_([Address(email_address='a'), Address(email_address='b'), Address(email_address='d')], list(u.addresses)) sess.expunge_all() u = sess.query(User).get(u.id) sess.delete(u) # u.addresses relationship will have to force the load # of all addresses so that they can be updated sess.flush() sess.close() eq_(testing.db.scalar(addresses.count(addresses.c.user_id != None)), 0) @testing.fails_on('maxdb', 'FIXME: unknown') @testing.resolve_artifact_names def test_delete_cascade(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), order_by=Address.id, backref='user', cascade="all, delete-orphan") }) sess = create_session(autoflush=True) u = User(name='ed') u.addresses.append(Address(email_address='a')) u.addresses.append(Address(email_address='b')) u.addresses.append(Address(email_address='c')) u.addresses.append(Address(email_address='d')) u.addresses.append(Address(email_address='e')) u.addresses.append(Address(email_address='f')) sess.add(u) eq_(Address(email_address='c'), u.addresses[2]) sess.delete(u.addresses[2]) sess.delete(u.addresses[4]) sess.delete(u.addresses[3]) eq_([Address(email_address='a'), Address(email_address='b'), Address(email_address='d')], list(u.addresses)) sess.expunge_all() u = sess.query(User).get(u.id) sess.delete(u) # u.addresses relationship will have to force the load # of all addresses so that they can be updated sess.flush() sess.close() eq_(testing.db.scalar(addresses.count()), 0) @testing.fails_on('maxdb', 'FIXME: unknown') @testing.resolve_artifact_names def test_remove_orphans(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), order_by=Address.id, cascade="all, delete-orphan", backref='user') }) sess = create_session(autoflush=True) u = User(name='ed') u.addresses.append(Address(email_address='a')) u.addresses.append(Address(email_address='b')) u.addresses.append(Address(email_address='c')) u.addresses.append(Address(email_address='d')) u.addresses.append(Address(email_address='e')) u.addresses.append(Address(email_address='f')) sess.add(u) eq_([Address(email_address='a'), Address(email_address='b'), Address(email_address='c'), Address(email_address='d'), Address(email_address='e'), Address(email_address='f')], sess.query(Address).all()) eq_(Address(email_address='c'), u.addresses[2]) try: del u.addresses[3] assert False except TypeError, e: assert "doesn't support item deletion" in str(e), str(e) for a in u.addresses.filter(Address.email_address.in_(['c', 'e', 'f'])): u.addresses.remove(a) eq_([Address(email_address='a'), Address(email_address='b'), Address(email_address='d')], list(u.addresses)) eq_([Address(email_address='a'), Address(email_address='b'), Address(email_address='d')], sess.query(Address).all()) sess.delete(u) sess.close() @testing.resolve_artifact_names def _backref_test(self, autoflush, saveuser): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses), backref='user') }) sess = create_session(autoflush=autoflush) u = User(name='buffy') a = Address(email_address='[email protected]') a.user = u if saveuser: sess.add(u) else: sess.add(a) if not autoflush: sess.flush() assert u in sess assert a in sess eq_(list(u.addresses), [a]) a.user = None if not autoflush: eq_(list(u.addresses), [a]) if not autoflush: sess.flush() eq_(list(u.addresses), []) def test_backref_autoflush_saveuser(self): self._backref_test(True, True) def test_backref_autoflush_savead(self): self._backref_test(True, False) def test_backref_saveuser(self): self._backref_test(False, True) def test_backref_savead(self): self._backref_test(False, False) class DontDereferenceTest(_base.MappedTest): @classmethod def define_tables(cls, metadata): Table('users', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(40)), Column('fullname', String(100)), Column('password', String(15))) Table('addresses', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('email_address', String(100), nullable=False), Column('user_id', Integer, ForeignKey('users.id'))) @classmethod @testing.resolve_artifact_names def setup_mappers(cls): class User(_base.ComparableEntity): pass class Address(_base.ComparableEntity): pass mapper(User, users, properties={ 'addresses': relationship(Address, backref='user', lazy='dynamic') }) mapper(Address, addresses) @testing.resolve_artifact_names def test_no_deref(self): session = create_session() user = User() user.name = 'joe' user.fullname = 'Joe User' user.password = 'Joe\'s secret' address = Address() address.email_address = '[email protected]' address.user = user session.add(user) session.flush() session.expunge_all() def query1(): session = create_session(testing.db) user = session.query(User).first() return user.addresses.all() def query2(): session = create_session(testing.db) return session.query(User).first().addresses.all() def query3(): session = create_session(testing.db) user = session.query(User).first() return session.query(User).first().addresses.all() eq_(query1(), [Address(email_address='[email protected]')]) eq_(query2(), [Address(email_address='[email protected]')]) eq_(query3(), [Address(email_address='[email protected]')])
The Glass shader renders windows and other glass objects, imparting refractive, tint, fog, and cracking effects for both breakable and non-breakable glass objects. Use the Illum Shader instead if you require non-refractive effects for non-breakable glass objects. Ambient diffuse lighting from cube maps isn't taken into account. The shader uses the sky color exclusively for all ambient lighting. Except for the sun, all deferred lights don't affect transparent glass objects. The shader can't receive sun shadows. Controls the amount of light that gets through the glass. Controls the amount of blur. This parameter requires that the Blur refraction – PC Only shader generation parameter is enabled. Adjusts tiling of the bump map independently from diffuse. Sets the reflection and refraction bump scale. Applies blur to just cloudy areas. This parameter requires that the Tint map – Tint/Gloss/Spec shader generation parameter is enabled. Makes cloudy areas less glossy. This parameter requires that the Depth Fog shader generation parameter is enabled. Sets the distance, in meters, after which fog doesn’t get any stronger. Sets the amount of indirectly bounced color. Not used if the Depth Fog shader generation parameter is enabled. Adjusts the cloudiness of tinted areas. Applies a tint color to the glass. Enables diffuse map for dirt, and so on. Requires alpha channel. Enables environment map as a separate texture. Enables the RGB spec map to control tinting in red channel, cloudiness in green channel, and specular in blue channel. Enables the Tint Color map. Used for multicolored glass, which goes in the custom Tint Color map slot. Enables the blurring of objects seen through the glass. Enables depth fog behind the glass surface. Disables the reflection of lights.
"""Classes to perform rendering""" import pygame import common import serialize import camera import visual import events class DuplicateLayer(Exception): """The layer was already present""" class UnknownLayer(Exception): """The layer was not found""" class NoLayer(Exception): """A layer was not found when one was expected""" class Renderer(common.Loggable, serialize.Serializable, common.EventAware): """The main rendering component""" my_properties = ( serialize.L('layers', [], 'the layers we render to'), serialize.I('width', 640, 'the width of the screen'), serialize.I('height', 480, 'the height of the screen'), serialize.S('title', 'Serge', 'the title of the main window'), serialize.L('backcolour', (0,0,0), 'the background colour'), serialize.O('camera', None, 'the camera for this renderer'), serialize.O('icon', None, 'the icon for the main window'), serialize.B('fullscreen', False, 'whether to display in full screen or not'), ) def __init__(self, width=640, height=480, title='Serge', backcolour=(0,0,0), icon=None, fullscreen=False): """Initialise the Renderer""" self.addLogger() self.initEvents() self.width = width self.height = height self.title = title self.layers = [] self.backcolour = backcolour self.fullscreen = fullscreen self.camera = camera.Camera() self.camera.setSpatial(0, 0, self.width, self.height) self.icon = icon self.init() ### Serializing ### def init(self): """Initialise from serialized state""" self.addLogger() self.initEvents() self._sort_needed = False pygame.display.set_caption(self.title) # # Tried the following with flags but no impact pygame.FULLSCREEN|pygame.HWSURFACE|pygame.DOUBLEBUF flags = pygame.FULLSCREEN if self.fullscreen else 0 self.surface = pygame.display.set_mode((self.width, self.height), flags | pygame.HWSURFACE) for layer in self.layers: layer.setSurface(pygame.Surface((self.width, self.height), pygame.SRCALPHA, 32)) layer.init() self.camera.init() self.camera.resizeTo(self.width, self.height) if self.icon: pygame.display.set_icon(visual.Register.getItem(self.icon).raw_image) # self._render_layer_dict = None ### Layers ### def addLayer(self, layer): """Add a layer to the rendering""" self.log.info('Adding layer "%s" at %d' % (layer.name, layer.order)) if layer in self.layers: raise DuplicateLayer('The layer %s is already in the renderer' % layer) else: self.layers.append(layer) self._sort_needed = True self.resetSurfaces() # # Update the layer dictionary cache self.getRenderingOrderDictionary() # return layer def getLayer(self, name): """Return the named layer""" for layer in self.layers: if layer.name == name: return layer else: raise UnknownLayer('No layer with name "%s" was found' % (name,)) def getLayerBefore(self, layer): """Return the layer before the specified one in terms of rendering order""" for test_layer in reversed(self.getLayers()): if test_layer.order < layer.order: return test_layer else: raise NoLayer('There is no layer before %s' % layer.getNiceName()) def resetSurfaces(self): """Recreate the surfaces for our layers When layers are added we sometimes need to reset the layers, for instance, virtual layers need to be shifted around so that they have the right order. """ self._sortLayers() for layer in self.getLayers(): layer.initSurface(self) def getLayers(self): """Return all the layers""" return self.layers def removeLayer(self, layer): """Remove the layer from the rendering""" try: self.layers.remove(layer) except ValueError: raise UnknownLayer('The layer %s was not found' % layer.getNiceName()) # # Update the layer dictionary cache self.getRenderingOrderDictionary() def removeLayerNamed(self, name): """Remove the layer with the specific name""" layer = self.getLayer(name) self.removeLayer(layer) def clearLayers(self): """Clear all the layers""" self.layers = [] def _sortLayers(self): """Sort the layers into the right order""" self.layers.sort(lambda l1, l2 : cmp(l1.order, l2.order)) self._sort_needed = False def orderActors(self, actors): """Return the list of actors sorted by who should be processed first to correctly render The actors are checked to see which layer they reside on and then this is used to order the returned list. """ # # Make a lookup table to quickly find layers layers = dict([(layer.name, layer.order) for layer in self.getLayers()]) actor_list = [(layers.get(actor.getLayerName(), 0), actor) for actor in actors] actor_list.sort() # return [actor for _, actor in actor_list] def getRenderingOrder(self, layer): """Return the order that a layer will be rendered in (0 = first)""" try: return self.layers.index(layer) except ValueError: raise UnknownLayer('The layer %s was not found' % layer) def getRenderingOrderDictionary(self): """Return a dictionary of the rendering orders of each layer by name ({name:0, name:1} etc) The dictionary is actually a live copy that will be updated if you add layers to the renderer so it is safe for you to cache it and re-use it. Changing the dictionary results in undefined behaviour. """ order = dict([(layer.name, idx) for idx, layer in enumerate(self.getLayers())]) if self._render_layer_dict is None: # # Set the dictionary self._render_layer_dict = order else: # # Clear and reset the cached copy of the dictionary for k in self._render_layer_dict.keys(): del(self._render_layer_dict[k]) self._render_layer_dict.update(order) # return self._render_layer_dict ### Rendering ### def clearSurface(self): """Clear the surface""" self.surface.fill(self.backcolour) def preRender(self): """Prepare for new rendering""" self.clearSurface() for layer in self.getLayers(): if layer.active: layer.clearSurface() layer.preRender() def render(self): """Render all the layers""" # # Post rendering events for layer in self.layers: if layer.active: layer.postRender() # # Put layers in the right order if self._sort_needed: self._sortLayers() # # Render all layers for layer in self.layers: if layer.active: layer.render(self.surface) # self.processEvent((events.E_AFTER_RENDER, self)) def getSurface(self): """Return the overall surface""" return self.surface ### Camera stuff ### def setCamera(self, camera): """Set our camera""" self.camera = camera def getCamera(self): """Return our camera""" return self.camera def getScreenSize(self): """Returns the screen size""" return (self.width, self.height) class RenderingLayer(common.Loggable, serialize.Serializable, common.EventAware): """A layer on which to render things This is the abstract version of the layer. Create subclasses of this to do useful things. """ my_properties = ( serialize.S('name', '', 'the name of the layer'), serialize.I('order', 0, 'the order to render (0=low)'), serialize.B('active', True, 'whether this layer is active'), serialize.B('static', False, 'whether this layer is static with respect to the camera'), ) def __init__(self, name, order): """Initialise the Layer""" super(RenderingLayer, self).__init__() self.initEvents() self.name = name self.order = order self.surface = None self.active = True self.static = False def setSurface(self, surface): """Set our surface""" self.surface = surface def getSurface(self): """Return the surface""" return self.surface def initSurface(self, renderer): """Create the surface that we need to draw on""" raise NotImplementedError def getNiceName(self): """Return the nice name for this layer""" return '<Layer %d: %s - order %d>' % (id(self), self.name, self.order) def setStatic(self, static): """Determine whether this layer is static with respect to camera movements or not""" self.static = static ### Serializing ### def init(self): """Initialise from serialized state""" self.initEvents() ### Rendering ### def clearSurface(self): """Clear our surface""" raise NotImplementedError def preRender(self): """Called before the layer has anything rendered to""" self.processEvent((events.E_BEFORE_RENDER, self)) def render(self, surface): """Render to a surface""" raise NotImplementedError def postRender(self): """Called after the layer has has had everything rendered on it""" self.processEvent((events.E_AFTER_RENDER, self)) class Layer(RenderingLayer): """A rendering layer with its own surface This type of layer is useful for compositing because you can do things to this layer once it has been rendered (eg shadows, glows, blurs etc). """ def initSurface(self, renderer): """Create the surface that we need to draw on We create a surface that is identical to the background for the main renderer. """ self.setSurface(pygame.Surface((renderer.width, renderer.height), pygame.SRCALPHA, 32)) def clearSurface(self): """Clear our surface""" self.surface.fill((0,0,0,0)) def render(self, surface): """Render to a surface""" surface.blit(self.surface, (0,0)) class VirtualLayer(RenderingLayer): """A rendering layer that doesn't have its own surface This layer will render to the layer immediately before it in the rendering cycle. """ def initSurface(self, renderer): """Create the surface that we need to draw on We do not want a surface ourself but we need the next surface in line as far as the renderer is concerned. """ try: self.setSurface(renderer.getLayerBefore(self).getSurface()) except NoLayer: self.setSurface(renderer.getSurface()) def clearSurface(self): """Clear our surface Nothing to do here - handled by the real owner of the surface. """ pass def render(self, surface): """Render to a surface Nothing to do here - handled by the real owner of the surface. """ pass
The largest hack happened to Yahoo in 2013; can you imagine the effects of 3 billion accounts being hacked? That is approximately 10 times the current U.S. population. The second biggest hack is the attack against Marriot, which affected 500 million accounts. The Marriott hack hits especially close to home for me because I use the hotel often and am a top-status traveler. The hackers not only have the personal information of 500 million accounts, but also several credit card numbers. Marriott has yet to acknowledge the issue or provide warning to customers to change their credit card numbers. If you review this list of hacks, you’ll likely agree that the issue is something we should all be concerned about. Take Yahoo, for example. The company first had 3 billion accounts hacked and then another 500 million a year later. It appears that Yahoo’s IT security team couldn’t measure up to the hackers, or that top management didn’t provide the impetus and focus inside the organization. We believe there are numerous other hacks that aren’t making the news because they have happened to manufacturing and design organizations and aren’t affecting the general public. Cybersecurity is important, but organizations need to implement information security as well. Information security is the larger issue that contains cybersecurity as one of its elements. See the diagram below, displaying information security and its relationship to cybersecurity. This guidance makes it clear that implementing an IT guidance standard such as ISO 27001 is really not an option but a requirement for publicly traded companies. In clause 6.2, ISO 27001 requires organizations to identify the information security objectives and develop plans to achieve them. Under clause 8.0—Operations, the standard requires the organization to implement the plans developed in clauses 6.1 and 6.2, conduct regular information security assessments (clause 8.2), and implement the information security risk treatment plan created, as required by clause 6.1.3. Annex A of ISO 27001 is broken down into 14 areas of control categories, which collectively present 132 individual controls. The expectation is that each of the areas are considered and covered. If it is not, the reason why must be justified. The assessment, the risk treatment plan, and then the implementation must cover each of these areas. This is also where the National Institute of Standards and Technology (NIST) programs can integrate with ISO 27001. NIST is a nonregulatory agency of the U.S. Dept. of Commerce. NIST’s Special Publication 800-53 provides a catalog of security controls for all U.S. federal information systems except those related to national security. The NIST standards are quickly becoming mandatory requirements in order for manufacturers to supply to the federal government. U.S. law specifies a minimum for information security requirements for information systems used by the federal government. This in turn refers to Special Publication 800-53 as the mandatory minimum controls that federal agencies must implement. Clause 5.0 of ISO 27001 explains that top management is accountable for information security, and that the information security processes must be integrated into the organization’s business processes—in other words, the “process approach or the process map.” As expected, the objectives, the progress toward objectives, the effectiveness of the controls, and other metrics are reviewed during the management or business reviews that are led by top management. It is crucial that top management understand its important role in governing information security and cybersecurity. An organization’s understanding of the risks related to information security and cybersecurity must start with company leadership. The U.S. Securities and Exchange Commission has made it very clear that if you are a publicly traded company, then you will need to develop policies and procedures that deal with cybersecurity risk. A good place to start is ISO 27001 with its well-defined requirements and process methodology. Additionally, ISO 27001 integrates well with the organization’s quality management system. The company board needs to get involved and provide oversight and responsibilities to this effort. The U.S. Securities and Exchange Commission guidance has asked for a description of how the board administers the risk oversight. ISO 27001 requires both internal and external audits. Like the Sarbanes-Oxley Act, the system should be regularly tested by the assessors who test the process. Also, vulnerability and penetrating tests with hired external parties need to become regular practices for organizations. Organizations should not only be focusing on external parties but also focus on internal attacks and the general availability of information. That’s where ISO 27001 management systems can play a critical role. Implementing ISO 27001 and getting a third-party certificate isn’t enough. Cybersecurity and information security require continual monitoring and upgrades of the organization’s defense. Technology is constantly changing and requires IT security teams to constantly update processes and technologies. As a place to start, Omnex recommends that top management, along with the company’s board of directors, be provided with an executive overview of this issue. Join Quality Digest and Chad Kymal, Omnex’s founder and CTO, in the webinar, “Cyber-Insecurity? How to Secure Your System Against a Hack Attack—ISO/IEC 27001” to learn how to implement a robust cybersecurity and information security program. Register here. Insightful read on the why and what of Information Security.. and the lack of it..
def split_by_periods(document): output_array = [] sentence_array_temp = "" for current_char in document: if current_char != "\n": sentence_array_temp += current_char if current_char == ".": output_array.append(sentence_array_temp) sentence_array_temp = "" if sentence_array_temp: output_array.append(sentence_array_temp) return output_array def split_by_word_as_tuples(sentence_array): output_array = [] index_incrementer = 0 for sentence in sentence_array: words_array = sentence.split(" ") words_array = filter(None, words_array) output_array.append((words_array, index_incrementer)) index_incrementer += 1 return output_array def kwic(document, listPairs=False, ignoreWords=None, periodsToBreaks=False): if not document: return [] if periodsToBreaks: split_into_sentences = split_by_periods(document) else: split_into_sentences = document.splitlines() split_into_word_tuples = split_by_word_as_tuples(split_into_sentences) return split_into_word_tuples
Freedom Fire Protection offers design, installation and maintenance of your fire sprinkler and protection needs. With over 50 years of combined experience we guarantee quality service and performance. Installation of all kinds of sprinklers and fire protection products. From homes to skyscrapers, Freedom Fire Protection will serve all of your fire protection needs.
# # CanvasRenderBokeh.py -- for rendering into a Bokeh widget # # Eric Jeschke ([email protected]) # # Copyright (c) Eric R. Jeschke. All rights reserved. # This is open-source software licensed under a BSD license. # Please see the file LICENSE.txt for details. import numpy from . import BokehHelp from ginga.canvas.mixins import * # force registration of all canvas types import ginga.canvas.types.all # Bokeh imports from bokeh.plotting import figure class RenderContext(object): def __init__(self, viewer): self.viewer = viewer self.shape = None # TODO: encapsulate this drawable self.cr = BokehHelp.BokehContext(self.viewer.figure) self.pen = None self.brush = None self.font = None def set_line_from_shape(self, shape): # TODO: support line width and style alpha = getattr(shape, 'alpha', 1.0) self.pen = self.cr.get_pen(shape.color, alpha=alpha) def set_fill_from_shape(self, shape): fill = getattr(shape, 'fill', False) if fill: if hasattr(shape, 'fillcolor') and shape.fillcolor: color = shape.fillcolor else: color = shape.color alpha = getattr(shape, 'alpha', 1.0) alpha = getattr(shape, 'fillalpha', alpha) self.brush = self.cr.get_brush(color, alpha=alpha) else: self.brush = None def set_font_from_shape(self, shape): if hasattr(shape, 'font'): if hasattr(shape, 'fontsize') and shape.fontsize is not None: fontsize = shape.fontsize else: fontsize = shape.scale_font(self.viewer) alpha = getattr(shape, 'alpha', 1.0) self.font = self.cr.get_font(shape.font, fontsize, shape.color, alpha=alpha) else: self.font = None def initialize_from_shape(self, shape, line=True, fill=True, font=True): if line: self.set_line_from_shape(shape) if fill: self.set_fill_from_shape(shape) if font: self.set_font_from_shape(shape) def set_line(self, color, alpha=1.0, linewidth=1, style='solid'): # TODO: support style self.pen = self.cr.get_pen(color, alpha=alpha, linewidth=linewidth, linestyle=style) def set_fill(self, color, alpha=1.0): if color is None: self.brush = None else: self.brush = self.cr.get_brush(color, alpha=alpha) def set_font(self, fontname, fontsize): self.font = self.cr.get_font(fontname, fontsize, 'black', alpha=1.0) def text_extents(self, text): return self.cr.text_extents(text, self.font) ##### DRAWING OPERATIONS ##### def draw_text(self, cx, cy, text, rot_deg=0.0): self.cr.init(angle=[numpy.radians(rot_deg)]) self.cr.update_font(self.pen, self.font) self.cr.plot.text(x=[cx], y=[cy], text=[text], **self.cr.kwdargs) def draw_polygon(self, cpoints): self.cr.init() self.cr.update_patch(self.pen, self.brush) xy = numpy.array(cpoints) self.cr.plot.patches(xs=[xy.T[0]], ys=[xy.T[1]], **self.cr.kwdargs) def draw_circle(self, cx, cy, cradius): self.cr.init() self.cr.update_patch(self.pen, self.brush) self.cr.plot.circle(x=[cx], y=[cy], radius=[cradius], **self.cr.kwdargs) def draw_bezier_curve(self, verts): self.cr.init() self.cr.update_line(self.pen) cx, cy = verts.T[0], verts.T[1] self.cr.plot.bezier(x0=[cx[0]], y0=[cy[0]], x1=[cx[3]], y1=[cy[3]], cx0=[cx[1]], cy0=[cy[1]], cx1=[cx[2]], cy1=[cy[2]], **self.cr.kwdargs) def draw_ellipse(self, cx, cy, cxradius, cyradius, theta): self.cr.init() self.cr.update_patch(self.pen, self.brush) self.cr.plot.oval(x=[cx], y=[cy], width=[cxradius*2.0], height=[cyradius*2.0], angle=[numpy.radians(theta)], **self.cr.kwdargs) def draw_line(self, cx1, cy1, cx2, cy2): self.cr.init() self.cr.update_line(self.pen) self.cr.plot.line(x=[cx1, cx2], y=[cy1, cy2], **self.cr.kwdargs) def draw_path(self, cpoints): self.cr.init() self.cr.update_line(self.pen) xy = numpy.array(cpoints) self.cr.plot.line(x=xy.T[0], y=xy.T[1], **self.cr.kwdargs) class CanvasRenderer(object): def __init__(self, viewer): self.viewer = viewer def setup_cr(self, shape): cr = RenderContext(self.viewer) cr.initialize_from_shape(shape) return cr def get_dimensions(self, shape): cr = self.setup_cr(shape) cr.set_font_from_shape(shape) return cr.text_extents(shape.text) #END
The truth is that I don’t have time to get sick. I am usually busy from morning until night with work, appointments, playdates and more. Getting sick puts a real crimp in my day. That’s why I like to stock up on my favorite wellness essentials from Walmart and follow some basic precautions to reduce the likelihood of getting sick. One of the most important tips to keep germs away is to wash hands often. It’s a tip that I share with my daughters every time I can. Recently, I picked up Dial Complete Antibacterial Foaming Hand Wash (in the Spring Water 7.5 oz variety) from Walmart. Did you know that this product kills 99.99% of germs? I love the scent and the awesome foaming action! Formulated with antibacterial ingredients, this is a product that’s a must-have during cold & flu season. While it’s tough on germs, it’s still gentle on my skin. I truly believe that wellness starts from the inside. I have been taking probiotics for years as they provide the “good” bacteria that help to promote a healthy digestive tract. I recently picked up the Align Dualbiotic Gummies (54 count) from Walmart. This gummy formulation is easy to chew and tastes amazing! What makes this probiotic formulation unique is that it contains a prebiotic fiber (inulin) to help nourish good bacteria in your digestive system. Not all probiotic formulations on the market contain this important prebiotic fiber, so this makes the Align Dualbiotic Gummies a smart choice! Plus, it’s naturally flavored and contains under 1 gram of sugar per gummy. 3. Prioritize a good night’s sleep! Rest is the time when our bodies can recharge. Despite how busy my schedule can seem, I always make sure I log enough Zzzz’s at bedtime. I like to light a lavender candle in the evening – it signals my mind to start to wind down. Plus, it smells great! Any health routine includes adequate intake of fiber. Metamucil Premium Blend is a wise choice because it features 100% natural psyllium fiber. This type of fiber traps the waste that weighs you down so that you feel lighter. After all, it’s important to focus on digestive health & regularity. Free of artificial sweeteners or colors, Metamucil Premium Blend is sweetened with plant-based Stevia. It also has natural flavors and colors. With the natural psyllium fiber, Metamucil helps curb the appetite so you feel less hungry between meals. I add some to my morning cup of tea and it tastes amazing! 5. Stock up on medicated drops! Despite the best intentions, we all get sick from time to time. To tackle those pesky sore throats that are so popular during winter, I stocked up on Vicks VapoCool Severe Medicated Drops (45 count) from Walmart. With Vicks VapoCool Severe Medicated Drops, I can vaporize my worst sore throat pain. The cooling relief (thanks to a powerful rush of Vicks Vapors) is quite pleasant. 6. Make smart food choices! Choosing whole foods that are rich in nutrients is part of my wellness routine. I like to stick to a diet rich in fresh fruits & vegetables, along with plenty of lean protein (mostly from chicken and fish). I will occasionally treat myself, but the majority of my diet is healthy. 7. Keep stress under control! Is your work deadline looming? Did the kids just make a mess in the kitchen? We are all subject to stress on a daily basis, but it’s how we deal with the stress that makes all the difference. To keep my stress hormones from surging, I meditate a few times per week and do cardio several times per week. I find that all the extra activity goes a long way in boosting my mood and keeping me calm. Shop at Walmart for Vicks VapoCool Severe Medicated Drops, Metamucil Premium Blend, Dial Complete Antibacterial Foaming Hand Wash, Align Dualbiotic Gummies and more for your own health & wellness routine! Learn more about how Walmart can give you a head start to a healthier 2019. What is your best tip for a healthy year? With this flu going around, we have been washing our hands like crazy. None of us have time to get sick, so I am loving your tips. I don’t have time to be sick either so it’s important to stock up on everything I need during the cold season. I also wash my hands often to keep germs at bay. I hate this time of year and every time my son leaves for school I pray he will come home and not end up sick. Because parents have been sending children to school with the Flu. Which is driving me crazy. If your children are sick leave them at home. I will have to pick these up. I don’t have time to get sick. I’m always running around and doing something, and the house seems to go into chaos when I am not well. Is really funny the minute I came to this post I started sneezing like crazy. I’m going to have to take on a few of these wellness tips so that I do not get sick. Who wants to be sick or has the time. I do all of these things and keep all of these items on hand just in case. Never know when they are needed. These are great tips. I cannot afford to get sick. Parents don’t get sick days, especially the self-employed ones. These are all great items to have on hand. Cough drops are helpful to soothe my aching throat whenever I’m sick. I have yet to try those by Vicks! Cough drops are a necessity this time of year! I have them with me at all times, even on my night stand. Vick’s Vapor Rub is my go-to for fast relief, so I’ll have to try these VapoCool lozenges. These are awesome tips. I have to make sure that I stock up on these items the next time I visit Walmart. I’m proud to say I’m already using most of these things. I’ve found that probiotics have been key in keeping me from getting sick. Cough drops are my lifesaver!! Now, if we could just get them to put them in a non-crinkly bag! I hate how loud those bags are! HA! YES to washing your hands! My hands stay so dry during the winter months because I wash them so much. LOL! I am definitely needing to prioritize sleep at the moment. I surely don’t get enough of it! I am just getting over sickness. The polar vortex really hit me hard and gotten sick. These are great tips. I take a daily probiotic too. It is so important I think everyone should take one. These are great tips! I have struggled this season. My stress has been high, I haven’t been sleeping well, and I my hands are so dry! I sat down last week and made a list of things I need to differently. I really need to get myself together! Great useful tips. I never tried Fiber supplements, hope it works. I always prefer to use natural antibacterial wipes to wipe out and this prevents me. Here I got more useful tips to prevent flu this winter. Thanks for sharing.
# -*- coding: utf-8 -*- # Copyright 2007-2020 The HyperSpy developers # # This file is part of HyperSpy. # # HyperSpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HyperSpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HyperSpy. If not, see <http://www.gnu.org/licenses/>. import logging from itertools import chain import numpy as np from scipy.stats import halfnorm from hyperspy.external.progressbar import progressbar from hyperspy.misc.math_tools import check_random_state _logger = logging.getLogger(__name__) def _thresh(X, lambda1, vmax): """Soft-thresholding with clipping.""" res = np.abs(X) - lambda1 np.maximum(res, 0.0, out=res) res *= np.sign(X) np.clip(res, -vmax, vmax, out=res) return res def _mrdivide(B, A): """Solves xB = A as per Matlab.""" if isinstance(B, np.ndarray): if len(B.shape) == 2 and B.shape[0] == B.shape[1]: # square array return np.linalg.solve(A.T, B.T).T else: # Set rcond default value to match numpy 1.14 default value with # previous numpy version rcond = np.finfo(float).eps * max(A.shape) return np.linalg.lstsq(A.T, B.T, rcond=rcond)[0].T else: return B / A def _project(W): newW = W.copy() np.maximum(newW, 0, out=newW) sumsq = np.sqrt(np.sum(W ** 2, axis=0)) np.maximum(sumsq, 1, out=sumsq) return _mrdivide(newW, np.diag(sumsq)) def _solveproj(v, W, lambda1, kappa=1, h=None, e=None, vmax=None): m, n = W.shape v = v.T if vmax is None: vmax = v.max() if len(v.shape) == 2: batch_size = v.shape[1] eshape = (m, batch_size) hshape = (n, batch_size) else: eshape = (m,) hshape = (n,) if h is None or h.shape != hshape: h = np.zeros(hshape) if e is None or e.shape != eshape: e = np.zeros(eshape) eta = kappa / np.linalg.norm(W, "fro") ** 2 maxiter = 1e6 iters = 0 while True: iters += 1 # Solve for h htmp = h h = h - eta * W.T @ (W @ h + e - v) np.maximum(h, 0.0, out=h) # Solve for e etmp = e e = _thresh(v - W @ h, lambda1, vmax) # Stop conditions stoph = np.linalg.norm(h - htmp, 2) stope = np.linalg.norm(e - etmp, 2) stop = max(stoph, stope) / m if stop < 1e-5 or iters > maxiter: break return h, e class ORNMF: """Performs Online Robust NMF with missing or corrupted data. The ORNMF code is based on a transcription of the online proximal gradient descent (PGD) algorithm MATLAB code obtained from the authors of [Zhao2016]_. It has been updated to also include L2-normalization cost function that is able to deal with sparse corruptions and/or outliers slightly faster (please see ORPCA implementation for details). A further modification has been made to allow for a changing subspace W, where X ~= WH^T + E in the ORNMF framework. Read more in the :ref:`User Guide <mva.rnmf>`. References ---------- .. [Zhao2016] Zhao, Renbo, and Vincent YF Tan. "Online nonnegative matrix factorization with outliers." Acoustics, Speech and Signal Processing (ICASSP), 2016 IEEE International Conference on. IEEE, 2016. """ def __init__( self, rank, store_error=False, lambda1=1.0, kappa=1.0, method="PGD", subspace_learning_rate=1.0, subspace_momentum=0.5, random_state=None, ): """Creates Online Robust NMF instance that can learn a representation. Parameters ---------- rank : int The rank of the representation (number of components/factors) store_error : bool, default False If True, stores the sparse error matrix. lambda1 : float Nuclear norm regularization parameter. kappa : float Step-size for projection solver. method : {'PGD', 'RobustPGD', 'MomentumSGD'}, default 'PGD' * 'PGD' - Proximal gradient descent * 'RobustPGD' - Robust proximal gradient descent * 'MomentumSGD' - Stochastic gradient descent with momentum subspace_learning_rate : float Learning rate for the 'MomentumSGD' method. Should be a float > 0.0 subspace_momentum : float Momentum parameter for 'MomentumSGD' method, should be a float between 0 and 1. random_state : None or int or RandomState instance, default None Used to initialize the subspace on the first iteration. """ self.n_features = None self.iterating = False self.t = 0 if store_error: self.E = [] else: self.E = None self.rank = rank self.robust = False self.subspace_tracking = False self.lambda1 = lambda1 self.kappa = kappa self.subspace_learning_rate = subspace_learning_rate self.subspace_momentum = subspace_momentum self.random_state = check_random_state(random_state) # Check options are valid if method not in ("PGD", "RobustPGD", "MomentumSGD"): raise ValueError("'method' not recognised") if method == "RobustPGD": self.robust = True if method == "MomentumSGD": self.subspace_tracking = True if subspace_momentum < 0.0 or subspace_momentum > 1: raise ValueError("'subspace_momentum' must be a float between 0 and 1") def _setup(self, X): self.h, self.e, self.v = None, None, None if isinstance(X, np.ndarray): n, m = X.shape avg = np.sqrt(X.mean() / m) iterating = False else: x = next(X) m = len(x) avg = np.sqrt(x.mean() / m) X = chain([x], X) iterating = True self.n_features = m self.iterating = iterating self.W = halfnorm.rvs( size=(self.n_features, self.rank), random_state=self.random_state ) self.W = np.abs(avg * self.W / np.sqrt(self.rank)) self.H = [] if self.subspace_tracking: self.vnew = np.zeros_like(self.W) else: self.A = np.zeros((self.rank, self.rank)) self.B = np.zeros((self.n_features, self.rank)) return X def fit(self, X, batch_size=None): """Learn NMF components from the data. Parameters ---------- X : {numpy.ndarray, iterator} [n_samples x n_features] matrix of observations or an iterator that yields samples, each with n_features elements. batch_size : {None, int} If not None, learn the data in batches, each of batch_size samples or less. """ if self.n_features is None: X = self._setup(X) num = None prod = np.outer if batch_size is not None: if not isinstance(X, np.ndarray): raise ValueError("can't batch iterating data") else: prod = np.dot length = X.shape[0] num = max(length // batch_size, 1) X = np.array_split(X, num, axis=0) if isinstance(X, np.ndarray): num = X.shape[0] X = iter(X) h, e = self.h, self.e for v in progressbar(X, leave=False, total=num, disable=num == 1): h, e = _solveproj(v, self.W, self.lambda1, self.kappa, h=h, e=e) self.v = v self.e = e self.h = h self.H.append(h) if self.E is not None: self.E.append(e) self._solve_W(prod(h, h.T), prod((v.T - e), h.T)) self.t += 1 self.h = h self.e = e def _solve_W(self, A, B): if not self.subspace_tracking: self.A += A self.B += B eta = self.kappa / np.linalg.norm(self.A, "fro") if self.robust: # exactly as in the Zhao & Tan paper n = 0 lasttwo = np.zeros(2) while n <= 2 or ( np.abs((lasttwo[1] - lasttwo[0]) / lasttwo[0]) > 1e-5 and n < 1e9 ): self.W -= eta * (self.W @ self.A - self.B) self.W = _project(self.W) n += 1 lasttwo[0] = lasttwo[1] lasttwo[1] = 0.5 * np.trace( self.W.T.dot(self.W).dot(self.A) ) - np.trace(self.W.T.dot(self.B)) else: # Tom Furnival (@tjof2) approach # - copied from the ORPCA implementation # of gradient descent in ./rpca.py if self.subspace_tracking: learn = self.subspace_learning_rate * ( 1 + self.subspace_learning_rate * self.lambda1 * self.t ) vold = self.subspace_momentum * self.vnew self.vnew = (self.W @ A - B) / learn self.W -= vold + self.vnew else: self.W -= eta * (self.W @ self.A - self.B) np.maximum(self.W, 0.0, out=self.W) self.W /= max(np.linalg.norm(self.W, "fro"), 1.0) def project(self, X, return_error=False): """Project the learnt components on the data. Parameters ---------- X : {numpy.ndarray, iterator} [n_samples x n_features] matrix of observations or an iterator that yields n_samples, each with n_features elements. return_error : bool If True, returns the sparse error matrix as well. Otherwise only the weights (loadings) """ H = [] if return_error: E = [] num = None if isinstance(X, np.ndarray): num = X.shape[0] X = iter(X) for v in progressbar(X, leave=False, total=num): h, e = _solveproj(v, self.W, self.lambda1, self.kappa, vmax=np.inf) H.append(h.copy()) if return_error: E.append(e.copy()) H = np.stack(H, axis=-1) if return_error: return H, np.stack(E, axis=-1) else: return H def finish(self): """Return the learnt factors and loadings.""" if len(self.H) > 0: if len(self.H[0].shape) == 1: H = np.stack(self.H, axis=-1) else: H = np.concatenate(self.H, axis=1) return self.W, H else: return self.W, 1 def ornmf( X, rank, store_error=False, project=False, batch_size=None, lambda1=1.0, kappa=1.0, method="PGD", subspace_learning_rate=1.0, subspace_momentum=0.5, random_state=None, ): """Perform online, robust NMF on the data X. This is a wrapper function for the ORNMF class. Parameters ---------- X : numpy array The [n_samples, n_features] input data. rank : int The rank of the representation (number of components/factors) store_error : bool, default False If True, stores the sparse error matrix. project : bool, default False If True, project the data X onto the learnt model. batch_size : {None, int}, default None If not None, learn the data in batches, each of batch_size samples or less. lambda1 : float Nuclear norm regularization parameter. kappa : float Step-size for projection solver. method : {'PGD', 'RobustPGD', 'MomentumSGD'}, default 'PGD' * 'PGD' - Proximal gradient descent * 'RobustPGD' - Robust proximal gradient descent * 'MomentumSGD' - Stochastic gradient descent with momentum subspace_learning_rate : float Learning rate for the 'MomentumSGD' method. Should be a float > 0.0 subspace_momentum : float Momentum parameter for 'MomentumSGD' method, should be a float between 0 and 1. random_state : None or int or RandomState instance, default None Used to initialize the subspace on the first iteration. Returns ------- Xhat : numpy array is the [n_features x n_samples] non-negative matrix Only returned if store_error is True. Ehat : numpy array is the [n_features x n_samples] sparse error matrix Only returned if store_error is True. W : numpy array, shape [n_features, rank] is the non-negative factors matrix H : numpy array, shape [rank, n_samples] is the non-negative loadings matrix """ X = X.T _ornmf = ORNMF( rank, store_error=store_error, lambda1=lambda1, kappa=kappa, method=method, subspace_learning_rate=subspace_learning_rate, subspace_momentum=subspace_momentum, random_state=random_state, ) _ornmf.fit(X, batch_size=batch_size) if project: W = _ornmf.W H = _ornmf.project(X) else: W, H = _ornmf.finish() if store_error: Xhat = W @ H Ehat = np.array(_ornmf.E).T return Xhat, Ehat, W, H else: return W, H
Who Are You Looking For In Fairacres, New Mexico? Run a search by name for anyone in Fairacres, New Mexico & get free white pages information instantly. Fairacres, New Mexico white page directory listings include full name, phone number and address.
#!/usr/bin/python # -*- coding: utf-8 -*- # pylint: disable=C0111 # The lizard_wbcomputation package implements the computational core of the # lizard waterbalance Django app. # # Copyright (C) 2012 Nelen & Schuurmans # # This package is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # This library is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this package. If not, see <http://www.gnu.org/licenses/>. import logging from timeseries.timeseriesstub import add_timeseries from timeseries.timeseriesstub import multiply_timeseries from lizard_wbcomputation.bucket_computer import BucketOutcome from lizard_wbcomputation.bucket_summarizer import BucketsSummary from lizard_wbcomputation.load_computer import Load logger = logging.getLogger(__name__) class SummedLoadsFromBuckets(object): """Implements the calculation of the summed bucket loads. """ def __init__(self, start_date, end_date, bucket2outcome): self.start_date, self.end_date = start_date, end_date self.bucket2outcome = bucket2outcome def compute(self, substance): min_summary, inc_summary = self.compute_summary(substance) min_loads = self._create_loads_from_summary(min_summary) inc_loads = self._create_loads_from_summary(inc_summary) return min_loads, inc_loads def compute_summary(self, substance): """Compute and return the minimum and incremental the bucket loads. This method returns a tuple of two BucketsSummary(s), where the first summary contains the minimum bucket loads and the second the incremental bucket loads. The parameter specifies the substance for which to compute the load. """ min_summary = BucketsSummary() inc_summary = BucketsSummary() for bucket, outcome in self.bucket2outcome.items(): min_outcome = self.summary_load.compute(bucket, outcome, substance, 'min') inc_outcome = self.summary_load.compute(bucket, outcome, substance, 'incr') for attribute in self.interesting_labels: self._add_timeseries(min_summary, min_outcome, attribute) self._add_timeseries(inc_summary, inc_outcome, attribute) return min_summary, inc_summary def _add_timeseries(self, summary, timeseries, attribute): new_timeseries = add_timeseries(getattr(summary, attribute), getattr(timeseries, attribute)) setattr(summary, attribute, new_timeseries) def _create_loads_from_summary(self, summary): loads = [] for attribute in self.interesting_labels: load = Load(attribute) load.timeseries = getattr(summary, attribute) loads.append(load) return loads class SummaryLoad(object): """Implements the calculation of the loads of a single bucket.""" def __init__(self, buckets_summarizer): self.summarizer = buckets_summarizer def set_time_range(self, start_date, end_date): self.start_date, self.end_date = start_date, end_date def compute(self, bucket, outcome, substance, bound): self._substance, self._bound = substance, bound load_outcome = self._compute_load(bucket, outcome) bucket2load_outcome = {bucket: load_outcome} return self._compute_summary(bucket2load_outcome) def _compute_load(self, bucket, outcome): load_outcome = BucketOutcome() concentration = self._get_concentration(bucket, 'flow_off') load_outcome.flow_off = multiply_timeseries(outcome.flow_off, concentration) concentration = self._get_concentration(bucket, 'drainage_indraft') load_outcome.net_drainage = multiply_timeseries(outcome.net_drainage, concentration) return load_outcome def _compute_summary(self, bucket2load_outcome): return self.summarizer.compute(bucket2load_outcome, self.start_date, self.end_date) def _get_concentration(self, bucket, label): attribute = '%s_concentr_%s_%s' % (self._bound, self._substance, label) return getattr(bucket, attribute)
All the basic amenities like pure Drinking water, Medical aid , Electricity , Power back up (Power gen-sets) and communication facilities and sports grounds etc. have been provided. A sharp mind needs a healthy body to support it. The on-campus facilities for Indoor and Outdoor sports includes Badminton, Cricket, Table Tennis, Hockey, Football, Basket Ball, Chess and a number of other activities.
import datetime from django.db import models from django.conf import settings #from django.contrib.auth.models import User from django import forms from urlweb.shortener.baseconv import base62 class Link(models.Model): """ Model that represents a shortened URL # Initialize by deleting all Link objects >>> Link.objects.all().delete() # Create some Link objects >>> link1 = Link.objects.create(url="http://www.google.com/") >>> link2 = Link.objects.create(url="http://www.nileshk.com/") # Get base 62 representation of id >>> link1.to_base62() 'B' >>> link2.to_base62() 'C' # Set SITE_BASE_URL to something specific >>> settings.SITE_BASE_URL = 'http://uu4.us/' # Get short URL's >>> link1.short_url() 'http://uu4.us/B' >>> link2.short_url() 'http://uu4.us/C' # Test usage_count >>> link1.usage_count 0 >>> link1.usage_count += 1 >>> link1.usage_count 1 """ url = models.URLField(verify_exists=True, unique=True) date_submitted = models.DateTimeField(auto_now_add=True) usage_count = models.IntegerField(default=0) def to_base62(self): return base62.from_decimal(self.id) def short_url(self): return settings.SITE_BASE_URL + self.to_base62() def __unicode__(self): return self.to_base62() + ' : ' + self.url class LinkSubmitForm(forms.Form): u = forms.URLField(verify_exists=True, label='URL to be shortened:', )
Yes, I love my husband and he’s great and all. However, he’s not a big fan of Valentine’s Day. He’s one of those V-Day haters who say, “That’s not a real holiday.” Of course, I beg to differ. My pups on the other hand LOVE to celebrate every day and every moment, especially if it involves goodies for them. They are so much fun to do something with and for on Valentine’s Day. Dogs are so easy please that anything from homemade biscuits to an extra-long walk will make them happy. Because all kinds of dog products cross my desk, I thought it would be fun to gather those that are Valentine’s Day and/or heart themed for you or your pup, in case you want to go for that extra something-something. Orbee-Tuff Nooks by Planet Dog. Boredom busting with heart! My dog Tampa Bay loves his little red nook with a heart-shaped hole, perfect for stuffing in his favorite treats. It’s a great enrichment toy, plus it’s easy to clean (which I love). These bouncy, buoyant and durable balls are minty, too. Available with other colors and hole shapes, too: the green peace sign, a yellow smiley face, a royal blue star and a red star, a green shamrock and a lime green paw. Retails for $13.95; planetdog.com. Planet Dog’s Holiday Love Ball. Planet Dog has a second item on my list and this one just screams Valentine’s Day, especially for the ball-loving dog. He’ll have lots of fun playing with the pink or red Love Ball, made of the Orbee-Tuff material that’s bouncy, chewy, buoyant and durable. Comes in two sizes: 2 ½ inches and 4 inches. Retail from $13.95 to $17.95; planetdog.com. Blue Dog Bakery’s Love Bites. Just in time for Valentine’s Day, Blue Dog Bakery has come out with its grilled chicken and cheese-flavored Love Bites treats. My dogs totally dig the Blue Dog Bakery treats, particularly the Gingerbread flavor that came out during the holidays. What’s so great about these soft and chewy Love Bites is that they are only 3 calories per treat. There also made with human-grade ingredients but without corn, soy, wheat and no artificial colors or preservatives. You can find them at PetSmart, retailing for $4.99; bluedogbakery.com. The Sweeter Vest by BossPup. LA-based BossPup just recently came across my radar with its stylish clothes for dogs. I heart the Sweeter Vest, and think it makes the perfect pup outfit for Valentine’s Day. It comes in black hearts, which would look sharp on my two boys, and also in pink hearts. Comes in Large (20 to 25 pounds) and XL (25 to 30 pounds). Retails for $30; bosspup.com. Rhinestone Bling Necklace Dog Collar by Canine Styles. If you believe the only proper way to celebrate Valentine’s Day is with something that sparkles, you’ll love this beauty! Really, it speaks for itself! Comes in sizes 10 (6.5 to 9 inches neck size) through 14 (9.5 to 13 inches neck size). Retails for $25; caninestyles.com. Canine Styles’ Polar Fleece Track Suit. This sweet and snuggly item is guaranteed to keep your furry one toasty warm during the cold month of February and the rest of winter (especially if it’s a long one). It’s the Valentine’s Day gift that keeps on giving back to your pup. Light pink and cranberry are two of my all-time favorite colors, and both are guaranteed to look great on your dog, plus give off that sweet-on-you vibe. A little too sweet for you? I’ll let you in on a secret — it also comes in navy and in brown. Available in sizes 8 (up to 4 pounds) through 18 (23-28 pounds). Retails for $50; caninestyles.com. The Pink Follard Pattern Nesting Bed from Canine Styles. I don’t know about your dog, but my two pups have five dog beds and they use them all, plus my own bed. But I think I need to move them on up to this ever-so-fine nesting bed that not only will they love but also it will look great in my home office. Comes in two sizes: 24 inches (for up to 16 pounds) and 32 inches (up to 45 pounds). Retails for $225; caninestyles.com. Tattooed Mom handmade wool sweater by Chilly Dog. Honestly, I love this sweater. It’s got that retro tattoo look, plus shows the world how much your dog loves you and keeps him warm at the same time. Available in sizes XXS to XXXL. Retails for $29.99; chillydogsweaters.com. Hearts and Flowers & All Hearts collars from Up Country. I change my dogs’ collars at least twice a year, if not more. They’re both boys, so a collar is a simple way to bling them up. These tender heart collars have great patterns for boys and girls, showing that your dog is both stylish and loved. Both collars come in: wide (1 inch) or narrow (5/8 inch). The Hearts and Flowers collar also comes in teacup (1/2 inch). You can also get harnesses, key rings and leads in these patterns, plus there is a sweater in the Hearts and Flowers pattern. Retails from $12 to $23; upcountryinc.com. Everyone recognizes the Russell Stover box of candies. It’s practically synonymous with Valentine’s Day. This year, Russell Stover selected the nonprofit organization Red Rover, which helps pets in crisis, providing emergency sheltering, disaster relief services, financial assistance and education, to feature on limited-edition chocolate bars and signature heart-boxed chocolates. Candy bars retail for $1.49 and can be found at Wakefern stores. You can buy the 1.75-ounce hearts for $1.29 at AAFES, Rite Aid, Target, Publix, HEB, Hy-Vee, Savemart, Big Y, Food City, Dierbergs, Giant Eagle and Woodmans. Go for the bigger chocolate heart at 3.5 ounces for $2.99 at Kroger, Albertsons, Savemart, Woodmans, Giant Eagle, Hy-Vee, Wakefern, HEB and Bilo. On top of that, Russell Stover will donate $50k to Red Rover, allowing Red Rover to not only help animals in crisis but also support its mission to strengthen the human-animal bond. Red Rover created a Paws4Love contest to promote the human-animal bond. To enter, create a video that shows how much you love your animal friend. There will be prizes, which include lots of chocolate (for humans, not dogs, of course!) and other great items. The contest runs through 2 p.m. PST on February 14th. The Ugly Pugline, Wilson the Pug in Love. For those of you who enjoy a great love story for Valentine’s Day, we’ve actually found a dog-themed one for you. Check out The Ugly Pugline, Wilson the Pug in Love by Wilson the Pug with Nancy Levine, published in January 2018 by Skyhorse Publishing. When Wilson falls in love with Hedy, a Mastiff puppy, challenges and comedy await! This book reminds us all that when it comes to love, it’s what inside that really counts. Retails for $14.99 and available at Amazon, Barnes & Noble and indieboundo.org. Tell us: What Valentine’s Day dog products do you love? What are you gifting your dog or the dog lover in your life this V-Day? Stay tuned for more from Executive Editor Melissa L. Kauffman on Dogster.com’s Dug Up at Dogster column and on social media with #DugUpatDogster. The post Dug Up at Dogster: 11 Sweet Valentine’s Day Dog Products for Your Beloved Pup (and You!) appeared first on Dogster.
# -*- coding: utf-8 -*- ''' Github User State Module .. versionadded:: 2016.3.0. This state is used to ensure presence of users in the Organization. .. code-block:: yaml ensure user test is present in github: github.present: - name: 'Example TestUser1' - email: [email protected] - username: 'gitexample' ''' def __virtual__(): ''' Only load if the github module is available in __salt__ ''' return 'github' if 'github.list_users' in __salt__ else False def present(name, profile="github", **kwargs): ''' Ensure a user is present .. code-block:: yaml ensure user test is present in github: github.present: - fullname: 'Example TestUser1' - email: '[email protected]' - name: 'gitexample' The following parameters are required: name This is the github handle of the user in the organization ''' email = kwargs.get('email') full_name = kwargs.get('fullname') ret = { 'name': name, 'changes': {}, 'result': None, 'comment': '' } target = __salt__['github.get_user'](name, profile=profile, **kwargs) # If the user has a valid github handle and is not in the org already if not target: ret['result'] = False ret['comment'] = 'Couldnt find user {0}'.format(name) elif isinstance(target, bool) and target: ret['comment'] = 'User {0} is already in the org '.format(name) ret['result'] = True elif not target.get('in_org', False) and target.get('membership_state') != 'pending': if __opts__['test']: ret['comment'] = 'User {0} will be added to the org'.format(name) return ret # add the user result = __salt__['github.add_user']( name, profile=profile, **kwargs ) if result: ret['changes'].setdefault('old', None) ret['changes'].setdefault('new', 'User {0} exists in the org now'.format(name)) ret['result'] = True else: ret['result'] = False ret['comment'] = 'Failed to add user {0} to the org'.format(name) else: ret['comment'] = 'User {0} has already been invited.'.format(name) ret['result'] = None return ret def absent(name, profile="github", **kwargs): ''' Ensure a github user is absent .. code-block:: yaml ensure user test is absent in github: github.absent: - name: 'Example TestUser1' - email: [email protected] - username: 'gitexample' The following parameters are required: name Github handle of the user in organization ''' email = kwargs.get('email') full_name = kwargs.get('fullname') ret = { 'name': name, 'changes': {}, 'result': None, 'comment': 'User {0} is absent.'.format(name) } target = __salt__['github.get_user'](name, profile=profile, **kwargs) if not target: ret['comment'] = 'User {0} does not exist'.format(name) ret['result'] = True return ret elif isinstance(target, bool) and target: if __opts__['test']: ret['comment'] = "User {0} will be deleted".format(name) ret['result'] = None return ret result = __salt__['github.remove_user'](name, profile=profile, **kwargs) if result: ret['comment'] = 'Deleted user {0}'.format(name) ret['changes'].setdefault('old', 'User {0} exists'.format(name)) ret['changes'].setdefault('new', 'User {0} deleted'.format(name)) ret['result'] = True else: ret['comment'] = 'Failed to delete {0}'.format(name) ret['result'] = False else: ret['comment'] = "User {0} has already been deleted!".format(name) if __opts__['test']: ret['result'] = None return ret ret['result'] = True return ret
Here I am in the medical center (forgot the name..) taking my travel related shots. Yellow fever (mandatory), meningitis (highly recommended) and others I can't remember. I took the flu vaccine (the first time taking it in my entire life) and got a weakened strain, two days before leaving. Initially I was pissed off about it but It came and went quickly. Thank god! I was not looking forward to flying 18+ hours with the flu. And, I forgot to mention the Malaria medicine. Horse sized Antibiotic tablets (DOXYCYCLINE HYCLATE) that I had to begin taking two days prior to flying. It was prescribed for the entire month of my trip and for a full four weeks after my return. Total... two months and two days. Needless to say...I decided (after careful consideration and the advice of many kind and concerned friends) to stop taking them two days after I arrived. Plus, there were no mosquitos in Addis at the time and it wasn't Malaria season anyway. I'll save them for the next trip.
import os import time import dbus.service from gi.repository import GObject as gobject from datetime import datetime from oacids.helpers.dbus_props import GPropSync, Manager, WithProperties from ifaces import BUS, IFACE, PATH, INTROSPECTABLE_IFACE, TRIGGER_IFACE, OPENAPS_IFACE # class Heartbeat (GPropSync, Manager): class Heartbeat (GPropSync): OWN_IFACE = OPENAPS_IFACE + '.Heartbeat' active = False sleep_interval = 1000 started_at = None def __init__ (self, bus, ctrl): self.bus = bus self.path = PATH + '/Heartbeat' self.master = ctrl self.started_at = time.time( ) self.now = datetime.fromtimestamp(self.started_at) GPropSync.__init__(self, bus, self.path) self.handle = None self.Start( ) PROP_SIGS = { 'interval': 'u' , 'Ticking': 'b' , 'StartedAt': 'd' , 'Uptime': 'd' } @gobject.property(type=int, default=1000) def interval (self): return self.sleep_interval @gobject.property(type=bool, default=False) def Ticking (self): return self.active @gobject.property(type=float) def StartedAt (self): return self.started_at @gobject.property(type=float) def Uptime (self): return time.time( ) - self.started_at @dbus.service.method(dbus_interface=OWN_IFACE, in_signature='u', out_signature='s') def Start (self, ms=1000): self.active = True self.sleep_interval = ms self.handle = gobject.timeout_add(self.interval, self._tick) @dbus.service.method(dbus_interface=OWN_IFACE, in_signature='', out_signature='s') def Stop (self): gobject.source_remove (self.handle) self.active = False self.handle = None @dbus.service.signal(dbus_interface=OWN_IFACE, signature='') def Heartbeat (self): # print "scanning" pass def _tick (self): self.Heartbeat( ) return self.Ticking
Relationship Between Content And SEO! Many people believe that SEO is breathing its last breaths. While most people believe, that it has been brought back to life due to recent changes in its techniques and practices. It includes new methods of link building, social media marketing, and content marketing. In past few years, content marketing has become a vital asset for SEO professionals. Google algorithm updates – Google is dead set on improving the quality of results shown in SERPs. For this purpose, tech giant has launched numerous algorithm updates effectively targeting websites with low quality and duplicate information. For business owners, it has become important to pay attention to the information their websites are displaying. It should be relevant, original, and fresh with the best possible quality. Increasing importance of visual information – Importance of visual content has increased significantly in past few years. Infographics, power point presentations (PPTs), images, graphics, videos etc., are slowly taking the place of text on internet. They are easily sharable and have the ability to attract more visitors. It can get more traffic for websites, which is why SEO professionals practicing these techniques more often. Additionally, it is a known fact that an individual remembers images better than text. It is essential to pay attention to information shared on website, blogs, social media profiles etc. Regular updates on blogs, social media profiles are necessary. It is also important to avoid duplicate, thin, and low quality information. Visual content is the key to increased traffic, visitors, and online presence. It helps in increasing social presence via social signals, which is good for SEO as well as for brand awareness. Quality and relevant information defends your website from Google algorithm updates. Many SEO professionals are providing comprehensive search engine optimization services including content marketing. Content is the king that now rules the SEO world. Every business owner must understand this fact and get their websites optimized accordingly. Give your customers something relevant and fresh to read or see (in visual information) and notice the difference in your rankings, traffic, online presence etc.
#!/usr/bin/env python '''Utilities for reading and writing a Kobo ebook reader's database. ''' import os, sys import sqlite3 def escape_quotes(s): return s.replace("'", "''") class KoboDB: '''Interact with a Kobo e-reader's database: either one that's mounted live from the device, or a local copy. ''' def __init__(self, mountpath): '''Initialize with the path to where your Kobo is mounted, or where you keep local copies of the files. ''' self.mountpath = mountpath self.dbpath = None self.conn = None self.cursor = None def connect(self, dbpath=None): '''Open the database at the specified path. Defaults to .kobo/KoboReader.sqlite in the mountpath you've provided. ''' if dbpath: self.dbpath = dbpath elif self.mountpath: self.dbpath = os.path.join(mountpath, ".kobo/KoboReader.sqlite") else: print "No DB path specified" return self.conn = sqlite3.connect(self.dbpath) self.cursor = self.conn.cursor() def close(self): '''Commit any changes and close the database.''' self.conn.commit() self.conn.close() self.conn = None self.cursor = None def get_field_names(self, tablename): '''Get names of fields within a specified table. I haven't found documentation, but PRAGMA table_info returns: (index, fieldname, type, None, 0) I don't know what the None and 0 represent. ''' self.cursor.execute('PRAGMA table_info(%s);' % tablename) return [ row[1] for row in self.cursor.fetchall() ] def get_list(self, tablename, **kwargs): '''Usage: get_list(tablename, selectors='*', modifiers='', order='') ''' selectors = '*' modifiers = '' order = '' if kwargs: if 'selectors' in kwargs and kwargs['selectors']: if type(kwargs['selectors']) is list: selectors = ','.join(kwargs['selectors']) else: selectors = kwargs['selectors'] if 'modifiers' in kwargs and kwargs['modifiers']: if type(kwargs['modifiers']) is list: modifiers = " WHERE " + 'AND'.join(kwargs['modifiers']) else: modifiers = " WHERE " + kwargs['modifiers'] if 'order' in kwargs and kwargs['order']: order = " ORDER BY " + kwargs['order'] sql = "SELECT %s FROM %s%s%s;" % (selectors, tablename, modifiers, order) print sql self.cursor.execute(sql) return self.cursor.fetchall() def get_dlist(self, tablename, **kwargs): '''Usage: get_dlist(tablename, selectors='*', modifiers='', order='') ''' l = self.get_list(tablename, **kwargs) if kwargs and 'selectors' in kwargs: fields = kwargs['selectors'] else: fields = self.get_field_names(tablename) return [ dict(zip(fields, values)) for values in l ] def get_book_by_id(self, id): sql = "SELECT Title,Attribution FROM content WHERE ContentID='%s';" \ % escape_quotes(id); # print sql self.cursor.execute(sql) return self.cursor.fetchall()[0] def list_books(self): '''List all books in the database. ''' books = self.get_dlist("content", selectors=[ 'ContentID', 'Title', 'Attribution', 'Description', 'NumShortcovers', 'IsEncrypted', 'IsDownloaded', 'adobe_location' ], modifiers="content.BookTitle is null", order="content.Title") for book in books: print "%s (%s)" % (book["Title"], book["Attribution"]) print " ContentID:", book["ContentID"] if book["NumShortcovers"]: print " Chapters:", book["NumShortcovers"] print " Encrypted?", book["IsEncrypted"], print " Downloaded?", book["IsDownloaded"], if book["adobe_location"]: if book["adobe_location"] == book["ContentID"]: print " adobe_location: Yes" else: print "\n adobe_location:", book["adobe_location"] else: print # Description is very long; make this optional. # print " Description:", book["Description"] print def list_shelves(self, names=None): '''List all shelves (collections) in the database. ''' allshelves = {} if names: modifiers = " AND ".join(["ShelfName=%s" % name for name in names]) else: modifiers = None sc = self.get_dlist("ShelfContent", modifiers=modifiers) for item in sc: if item["ShelfName"] not in allshelves: allshelves[item["ShelfName"]] = [ item["ContentId"] ] else: allshelves[item["ShelfName"]].append(item["ContentId"]) for shelf in allshelves: print "\n===", shelf, "===" for id in allshelves[shelf]: print " %s (%s)" % self.get_book_by_id(id) def has_shelf(self, shelfname): '''Does a given shelfname exist? Helpful when checking whether to add a new shelf based on a tag. ''' shelves = self.get_dlist("Shelf", selectors=[ "Name" ], modifiers=[ "Name='%s'" % shelfname ]) print "Has shelf %s?" % shelfname, bool(shelves) return bool(shelves) def print_table(self, tablename, **kwargs): '''Usage: print_table(tablename, selectors='*', modifiers='', order='') ''' if kwargs and 'selectors' in kwargs and kwargs['selectors']: fields = kwargs['selectors'] print "kwargs: fields =", fields else: fields = self.get_field_names(tablename) print "no kwargs: fields =", fields for row in self.get_list(tablename, **kwargs): for i, f in enumerate(fields): # Must coerce row[i] to unicode before encoding, # even though it should be unicode already, # because it could be null. print f.encode('UTF-8'), ":", unicode(row[i]).encode('UTF-8') # Adding entries to shelves: def make_new_shelf(self, shelfname): '''Create a new shelf/collection. ''' print "=== Current shelves:" self.print_table("Shelf", selectors=[ "Name" ]) print "===" print "Making a new shelf called", shelfname # Skip type since it's not clear what it is and it's never set. # For the final three, PRAGMA table_info(Shelf); says they're # type BOOL, and querying that table shows true and false there, # but absolutely everyone on the web says you have to use # 1 and 0 for sqlite3 and that there is no boolean type. query = '''INSERT INTO Shelf(CreationDate, Id, InternalName, LastModified, Name, _IsDeleted, _IsVisible, _IsSynced) VALUES (DATETIME('now'), %s, '%s', DATETIME('now'), '%s', 0, 1, 1); ''' % (shelfname, shelfname, shelfname) print query self.cursor.execute(query) def add_to_shelf(self, kobobook, shelfname): print "===" print "Adding", kobobook["Title"], "to shelf", shelfname query = '''INSERT INTO ShelfContent(ShelfName, ContentId, DateModified, _IsDeleted, _IsSynced) VALUES ('%s', '%s', DATE('now'), 0, 0);''' % (shelfname, escape_quotes(kobobook['ContentID'])) print query self.cursor.execute(query) self.conn.commit() if __name__ == '__main__': import argparse parser = argparse.ArgumentParser(description="""Show details about a Kobo ebook reader. By default, show a list of books. Copyright 2015 by Akkana Peck; share and enjoy under the GPLv2 or later.""", formatter_class=argparse.RawDescriptionHelpFormatter) # Options: parser.add_argument("-m", "--mountdir", default="/kobo", help="""Path where the Kobo is mounted. Default: /kobo""") parser.add_argument("-d", "--db", default="$mountdir/.kobo/KoboReader.sqlite", help="""Path to the database. Default: $mountdir/.kobo/KoboReader.sqlite""") # Things we can do: parser.add_argument("-s", "--shelves", action='store_true', default=False, help="""Show shelves""") parser.add_argument("-S", "--shelfnames", action='store_true', default=False, help="""Show shelf names but not their contents""") args = parser.parse_args() args.db = args.db.replace('$mountdir', args.mountdir) try: koboDB = KoboDB(args.mountdir) koboDB.connect(args.db) except Exception, e: print "Couldn't open database at %s for Kobo mounted at %s" % \ (args.db, args.mountdir) print e sys.exit(1) if args.shelfnames: shelves = koboDB.get_dlist("Shelf", selectors=[ "Name" ]) for shelf in shelves: print shelf["Name"] elif args.shelves: koboDB.list_shelves() else: koboDB.list_books()
Please follow these rules for the best Domino Synchronizer experience possible. Since the dates are changed in Notes after using CTRL-X and CTRL+V (copy and paste) of an appointment we have marked this as a bug in Notes. Having a start date after the due date is not valid in Notes. This is reported to IBM but obviously there's nothing InfoBridge Software can do about it now. Currently with the Domino Synchronizer we do not support running the synchronizer in different times zones. The synchronizer just uses the time zone from the computer where it is installed. This is because SuperOffice implemented their own time zone structure, basically you can compare this with the Windows time zone, but SuperOffice also allows extra time zones which cannot be specified by the Windows time zone. If the Lotus Notes Domino environment uses the same time zone structures (values) as the Windows time zone, the answer is clear, meaning no official support for times zones, meaning that the time zones being used by the Domino Synchronizer server and the Notes Domino server must be in sync. Due to a limitation in Notes, SuperOffice appointments longer than 24 hours will only get a 24 hour block synchronized and not the full block.
""" PLEASE READ: This is the config file for the program. The file is written in python to make it easy for the program to read it, however this means you must follow the formatting in the file. For example, if the value is enclosed in quotation marks, it needs to stay that way. Additionally, do not edit anything past the DO NOT EDIT comment. Everything past there contains code which allows the program to retrieve this information. """ #-----WINDOW GEOMETRY SETTINGS----- #Width and Height in pixels WindowWidth = 1000 WindowHeight = 700 #Determines whether or not the windows can be resized. AllowResizing = True #-----DATABASE SETTINGS----- #Visitor Card Database File Name #The file extension will be automatically added DBFileName = "data" #-----LOGGING SETTINGS----- #Determines the folder name log files are sent to. loggingDir = "logs" #If true, the program will use a different drive to save log files. #NOTE: You MUST set this to true if using a different drive! useDDrive = False #If true, the program will periodically delete log files older than a certain #age specified by logPurgePeriod. doLogPurge = False #Age in days for log files to be purged. For example, if you put in 7, log #files older than 7 days will be removed on startup. However, in order to do #this, doLogPurge must be set to True. If it is not, this will be ignored. logPurgePeriod = 30 #-----OTHER SETTINGS----- #Allows operators to issue visitor passes despite a patron being banned overrideBans = False #Determines whether or not the SAM message is displayed. dispSAM = False # Determines which characters are acceptable in the name fields #---IMPORTANT!--- #Remember that as this config file is written in python, rules for characters #which need to be escaped MUST BE FOLLOWED. allowedNameChars = ["'", "-", " "] #Determines which characters are acceptable in the location/municipality #fields. #---IMPORTANT!--- #Remember that as this config file is written in python, rules for characters #which need to be escaped MUST BE FOLLOWED. allowedMuniChars = ["'", "-", " "] #Determines which states or regions appear in the drop down list for the #state field. #The states will appear on the list in the order they are entered in here. #The very first one will be the default state. states = ['MICHIGAN', 'ALABAMA', 'ALASKA', 'ARIZONA', 'ARKANSAS', 'CALIFORNIA', 'COLORADO', 'CONNECTICUT', 'DELAWARE', 'FLORIDA', 'GEORGIA', 'HAWAII', 'IDAHO', 'ILLINOIS', 'INDIANA', 'IOWA', 'KANSAS', 'KENTUCKY', 'LOUISIANA', 'MAINE', 'MARYLAND', 'MASSACHUSETTS', 'MINNESOTA', 'MISSISSIPPI', 'MISSOURI', 'MONTANA', 'NEBRASKA', 'NEVADA', 'NEW HAMPSHIRE', 'NEW JERSEY', 'NEW MEXICO', 'NEW YORK', 'NORTH CAROLINA', 'NORTH DAKOTA', 'OHIO', 'OKLAHOMA', 'OREGON', 'PENNSYLVANIA', 'RHODE ISLAND', 'SOUTH CAROLINA', 'SOUTH DAKOTA', 'TENNESSEE', 'TEXAS', 'UTAH', 'VERMONT', 'VIRGINIA', 'WASHINGTON', 'WEST VIRGINIA', 'WISCONSIN', 'WYOMING', 'AMERICAN SAMOA', 'DISTRICT OF COLUMBIA', 'GUAM', 'NORTHERN MARIANA ISLANDS', 'PUERTO RICO', 'VIRGIN ISLANDS', 'OTHER'] #Determines how the database will be sorted when the program is launched. #Defaults to 'id' #'id' = by ID number #'last' = by last name #'first' = by first name #'middle' = by middle name #'muni' = by municipality #'state' = by state #'cards' = by number of cards issued #'status' = by status defaultsort = 'last' #Determines the order by which the default sort is sorted. #Defaults to 'up' #'up' = ascending #'down' = descending defaultsortorder = 'up' #Determines the period within visitor cards can be issued. #Essentially, if the period is set to 10 days and #a patron gets a visitor card on 11-1-2015, 11-2-2015, and 11-3-2015, #then they may not get another visitor card until 11-11-2015 at the earliest. #Even then, that patron can only get 1 visitor card on that day. They'll have #to wait a day to get 2, and another day to get three. #The value is a positive integer, and is in days. VisitorCardPeriod = 365 #The number of visitor cards per VisitorCardPeriod that a patron can get. #This should be a positive whole number integer which is not zero. CardsPerPeriod = 3 #If it is set, determines which municipality(s) the visitor card period above #will apply to. The value is ALWAYS an array of strings, even if there is only #one municipality. If you set it to a blank array [] then the period will apply to all #patrons. Additionally, the municipalities must ALWAYS be in all caps. PeriodDependentMunis = ['NORTHVILLE'] #If set to True, the program will check for duplicate records when adding or #editing existing records. If it finds an existing record which matches, #it will not allow you to add the new one. EnforceNoDuplicates = True #----------DO NOT EDIT PAST THIS LINE----------# class Config(): def __init__(self): self.geo = str(WindowWidth) + 'x' + str(WindowHeight) self.lockgeo = AllowResizing self.dbfn = DBFileName + ".db" self.overrideBans = overrideBans self.dispSAM = dispSAM self.allowedNameChars = allowedNameChars self.allowedMuniChars = allowedMuniChars self.states = states self.defaultsort = defaultsort self.defaultsortorder = defaultsortorder self.visitorcardperiod = VisitorCardPeriod self.loggingDir = loggingDir + "/" self.useDDrive = useDDrive self.doLogPurge = doLogPurge self.logPurgePeriod = logPurgePeriod self.PeriodDependentMunis = PeriodDependentMunis self.CardsPerPeriod = CardsPerPeriod self.EnforceNoDuplicates = EnforceNoDuplicates
Corporate services and business support services in Malta are provided by specialized and competent firms in serving the business partners and customers. There are numerous corporate service providers in Malta that offer high level of corporate services to business. These services are essential in setting up and running a business especially in a competitive market such as Malta. A company should, therefore, seek a reputable, technology advanced and preferably a specialized firm. However, there are also one-stop solution management companies in Malta which comprise of company managers, tax advisors, chartered accountants and corporate lawyers. Such firms provide all-round expertise such as legal, tax, structuring and accounting services. Corporate services companies help both medium and large entrepreneurs during the incorporation process of Malta companies. They also help international companies which wish to locate their business in Malta by efficiently helping them to structure the business in tax compliant manner. Some of the examples of corporate services include business advisory services. These services give advice to clients’ business prospects and involve examining the finance, tax market, legal and risk factors involved in making changes to an existing company or startups. It, therefore, requires expertise from companies with experience in business formation. There are international corporate service consultants who deal with Company Corporation in Malta. Due to variation in laws in all countries these consultants provide specialized services in Company Corporation in Malta. Another major corporate service is banking and finance. They help the client’s organization in dealing with pressure from financing and banking issues. Their series include offshore bank accounts, corporate loans, corporate bank accounts, multicurrency accounts and private banking. Accounting and tax services also comprise major corporate services. They are essential to companies which want to outsource their basic work. It also involves submitting necessary documents to the authorities. Some of the services include maintaining cash flow, tax registration, tax returns, tax planning and maintaining cash flow books. Corporate services also help companies in market research and risk evaluation in making financial decisions. They provide investment and stock exchange services. In conclusion, it is vital for companies such as real estate agents which want to take advantage of Malta’s pro-business environment to use a state of the corporate art services to achieve full potential. This is because company law is a backbone of modern business. Corporate services provide administration and regulatory services on Malta corporate law matters.
import asyncio import functools from unittest import mock as utm import hashlib import arrow from pyfakefs import fake_filesystem as ffs class PathMock(utm.Mock): def __init__(self, fs=None, *pathsegments, **kwargs): super(PathMock, self).__init__() self._fs = fs self._path = self._fs.JoinPaths(*pathsegments) def iterdir(self): fake_os = ffs.FakeOsModule(self._fs) for child in fake_os.listdir(self._path): yield PathMock(self._fs, self._path, child) def stat(self): fake_os = ffs.FakeOsModule(self._fs) return fake_os.stat(self._path) def mkdir(self, mode=0o777, parents=False, exist_ok=False): fake_os = ffs.FakeOsModule(self._fs) try: fake_os.makedirs(self._path) except OSError as e: # iDontCare pass return True def is_file(self): fake_os = ffs.FakeOsModule(self._fs) return fake_os.path.isfile(self._path) def is_dir(self): fake_os = ffs.FakeOsModule(self._fs) return fake_os.path.isdir(self._path) def unlink(self): fake_os = ffs.FakeOsModule(self._fs) return fake_os.unlink(self._path) def open(self, mode): fake_open = ffs.FakeFileOpen(self._fs) return fake_open(self._path, mode) def __truediv__(self, name): return PathMock(self._fs, self._path, name) def __str__(self): return self._path class NodeMock(utm.Mock): def __init__(self, fs, path, *args, **kwargs): super(NodeMock, self).__init__() self._fs = fs self._path = path @property def name(self): dirname, basename = self._fs.SplitPath(self._path) return basename @property def modified(self): f = self._fs.GetObject(self._path) return arrow.fromtimestamp(f.st_mtime).replace(tzinfo='local') @property def trashed(self): return False @property def is_folder(self): fake_os = ffs.FakeOsModule(self._fs) return fake_os.path.isdir(self._path) @property def size(self): fake_os = ffs.FakeOsModule(self._fs) return fake_os.path.getsize(self._path) @property def md5(self): fake_open = ffs.FakeFileOpen(self._fs) return get_md5(fake_open, self._path) def create_async_mock(return_value=None): loop = asyncio.get_event_loop() f = loop.create_future() f.set_result(return_value) return utm.Mock(return_value=f) def create_fake_local_file_system(): fs = ffs.FakeFilesystem() file_1 = fs.CreateFile('/local/file_1.txt', contents='file 1') file_1.st_mtime = 1467800000 file_2 = fs.CreateFile('/local/folder_1/file_2.txt', contents='file 2') file_2.st_mtime = 1467801000 folder_1 = fs.GetObject('/local/folder_1') folder_1.st_mtime = 1467802000 return fs def create_fake_remote_file_system(): fs = ffs.FakeFilesystem() file_3 = fs.CreateFile('/remote/file_3.txt', contents='file 3') file_3.st_mtime = 1467803000 file_4 = fs.CreateFile('/remote/folder_2/file_4.txt', contents='file 4') file_4.st_mtime = 1467804000 folder_2 = fs.GetObject('/remote/folder_2') folder_2.st_mtime = 1467805000 return fs def get_md5(open_, path): hasher = hashlib.md5() with open_(path, 'rb') as fin: while True: chunk = fin.read(65536) if not chunk: break hasher.update(chunk) return hasher.hexdigest()
On April 2, 2019 the Consolidated election will occur, affecting the various communities in DeKalb County. There are a variety of races that have drawn significant attention, including the Mayor’s race in Shabbona and the School Board race in DeKalb. With that said, there are other races in the City of DeKalb that are on the ballot and deserve attention. There are candidates for Kishwaukee College Board, Dekalb Park District and one contested Ward race for City of DeKalb Council seat. In the past, Consolidated elections have had low turnout. In 2015, the Consolidated election had a meager turnout of 7%. And that 2015 election included Mayoral races in both DeKalb and Sycamore. One difference in this election is that according to DeKalb County Clerk, Doug Johnson, voters will have a better experience at the polls because of newer ballot machines that offer a larger screen. What seems to be the “hot” race in the City of DeKalb is the election of School Board members. There are nine (9) candidates running for four (4) Board positions. There were three (3) candidate forums for DeKalb School Board which gave community members a chance to learn more about the candidates. It is important to thank the candidates for making the effort to both run and represent the community on the school board. If you have never run for office, it is intimidating to sit on a stage and be drilled by unannounced questions. The first forum was held at DeKalb High School. When the house lights went down and the stage and spot lights lit the stage, some of the candidates seemed a little nervous with their answers. As the evening continued, the answers and the candidates seemed more relaxed in the setting. The questions seemed a little soft for this event. Two of the candidates did not attend: Fred Davis and Stephen Irving. The next forum was hosted by the DeKalb Chamber and held at the Egyptian Theater. This larger venue had a smaller crowd but gave the candidates a chance to expound upon their answers. Some of the questions were a little more insightful and gave the candidates more to ponder. One question that scored low was the reason for the higher cost of Chrome books for students (not confirmed) in DeKalb District 428, compared to Sycamore district 427. Some questions were directed at select members, with other candidates having the option to respond. Because the DeKalb Chamber poorly scheduled the event on the same evening as the DeKalb School Board meeting, Rick Smith and Fred Davis were unable to attend. Stephen Irving was also missing. The final forum was held at the University Village Meeting Room and was much more relaxed, as the audience and candidates were up-close and personal. The questions were more thorough and it was exciting to see the comfort level of the candidates improved. One problem, though, was that more questions could have been asked if candidates would have had the option to speak (as in the Chamber event), instead of ALL six of those in attendance being required to respond to every question. It also would have been more productive if there were not two forums in the same week, which could have affected the attendance of candidates. Missing at this event were Smith, Davis and Irving. Each of the candidates running for School Board bring something positive to the table. To expect the candidates to have ALL the answers is unreasonable. DeKalb County Online attended all of the candidate forums. Jermony Olson: Jeromy is extremely concerned about accountability of school funding. Jeromy (49) is a CEO of a major company in the St. Charles area. He feels strongly that School Bonds should be a major concern and addressed that future taxing and bond requirements will have a higher value if addressed in advanced. Being a responsible steward of the school’s money is extremely important. Sarah Moses: Sarah stated at the last forum that the community is concerned by high taxes and low ranking schools. Sarah (44) is a former teacher and current business owner in DeKalb. She is concerned that student expectations are not being met for a variety of reasons. Samatha McDavid: Samatha feels a need for more diversity in the teaching staff and more vision in paying down debt while maintaining academic standards. Her husband works in the school district. Samatha (30) feels School Board members should take a more active role in visiting the schools and attending events. This comment was endorsed by other candidates, who agreed that board members should attend activities and sporting events. Side Bar: Not one candidate talked about attending concerts, dance recitals and theater events. All of DeKalb schools have good band programs, theater events and a growing orchestra program. Orion Carey: Orion is concerned about declining achievement levels in the DeKalb Schools compared to other schools in the state. Orion (31) would like to see a school board member attend City Council meetings, as this is the legislative body that can be supportive (look at your home tax bill to see how much the school district, Kish College and the Park District get). Side bar: Superintendent Craven is a non-voting member of the DeKalb Chamber Board of Directors. December Richardson: December feels that there is a tax burden, but there is also a need to spend money to improve the schools. December (28) believes her background and advanced education makes her one of the best and most-learned candidates. Her comments on discipline questioned whether the issue is a problem with the student or with the parent, or a combination of both. Side bar: Some of the older people in attendance remembered when growing up, if you got in trouble at school you knew there would be repercussions when you got home. David Seymour: David feels his background and education would make him as asset to the Board. David (41) wants more diversity in the teachers’ ranks for both mentoring and acceptance. He wants NIU to take a more active role in being an outreach as well as a resource to the DeKalb School District. Side bar: Recent reports show a declining number of college students seeking a career in education, and with 48.4% of all high school graduates leaving the State of Illinois for school it makes it even more of a challenge to recruit future teachers. Rick Smith: Rick feels it is important to address the future by acquiring good teachers and administrators when current ones depart. Rick also feels it is important to have continuity within the School Board by having current members re-elected. Note: Mr. Smith attended only one of the forums. Steve Irving: Steve Irving contacted DCO and acknowledged that he did not attend the forums. Steve relayed that he had planned and purchased a family vacation months before deciding to run for the school board. Steve wants home owners to look at their tax bill and choose wisely. He is a local businessman and on the Board at Kishwaukee Country Club. Note: Mr. Irving did not attend any of the forums. Fred Davis: Fred Davis did not attend any forums and made no effort to respond to a request for information. NOTE: The High School forum is available to view. The local newspaper also posted background stories on most of the candidates. Jermony Olson: The board recently decided to allocate $2 million per year, from the district fund balance, for the next 5 years as an abatement to help ease taxpayers into the annual increases that are coming for the repayment of the outstanding bonds. This is a good thing, however in this case the dollars are paying off principal and interest and thus do not have a huge impact on the debt service in total. Plus put the entire TIF funds, which is actually $3M toward the debt, but that we should front end load the $2M/year and pay a total of $13M toward our debt in 2019. December Richardson: The school district should put the majority of the surplus money into our district’s underperforming schools. The money can be used to equip our teachers and students with everything they need to be successful both in and out of the classroom. Orion Carey: Surplus funds, outside of normal contingency funds, should be used to pay down our significant debt. If the debt isn’t paid down, we will be in trouble when we need to borrow to once again increase our capacity. Steve Irving: Pay down the debt immediately. Jeromy Olson: Our average classroom size in the district is actually 20 students according to Illinoisreportcard.com. We currently have a few larges classes that are working their way through the elementary system. According to the demographers that studied our population we should actually be seeing a reduction in the student population over the next 5 years. As stated before, if there are children in the district that should not be, even 2% reduction would represent almost 6 classrooms worth of children which would help. December Richardson: Increasing class sizes should be a last resort for any school district. It should never be a starting point. There are many other ways to trim a budget. If all other options are exhausted, then schools may be forced to RIF (Reduction in Force) teachers and increase class size. Schools should use benchmark assessments to determine student placement. Orion Carey: As unfortunate as it is, crowded classrooms are not a new issue nor is it an issue easily remedied. Until such time that facility size can be increased, some creativity is going to be necessary. Additional teacher’s assistants in classrooms could be a way to provide some relief. Communication with other districts to see how they are handling large class sizes would be beneficial, no doubt. Steve Irving: With crowded classrooms – if funds are available, create another section to reduce overcrowding. How can the school board be more accountable to the public on student achievement, school performance and progress over time? Jeromy Olson: Only by setting goals with an action plan to achieve them and then measuring the progress or regression. In order to do that, board members need to attend meetings, be engaged and accountable. December Richardson: If the school Board committee was required to answer questions from the public, then the public can start holding the appropriate people accountable when certain needs or concerns are addressed. How many school visits has the Superintendent made to observe classroom instruction? Orion Carey: Transparency goes a long way towards accountability. To the public that takes an interest, I believe total transparency with grades and test results is the best way to provide that public with the means to approach the schools with anything they may hold an issue with or like to see changed. Steve Irving: School Board (member) can do several things: move monthly meeting each month to a different school, publish board minutes on the school web site, issue minutes via e-mail to all parents, have each Board member visit a school for a full day and sit in on classrooms, have regular dialogue with Teachers, Principals, Secretaries, Kitchen and Janitorial staff and seek input. How will you engage the community to improve public schools in the district? Jeromy Olson: First, board members need to visit the classrooms. You cannot inspect what you expect if you never see it. If the board is going to create policy to address the situations in our classrooms, they need to be present in order to give input and ideas. Second, have at least one board member attend each city council meeting. This will benefit both the district and the city by creating open channels for dialogue about how help solve problems. Lastly, engage the University on a scheduled basis to ensure that the goals between parties are aligned. This would also help us with the teacher recruiting process. The stronger this relationship is the better. December Richardson: I would like to organize a curriculum fair and host an open house. This will create opportunities for the community to see high-quality work your school is addressing through a curriculum fair or school open house. This will develop a strong connection to my community by demonstrating my school’s good deeds in an inviting setting. I plan to send an email newsletter. Orion Carey: I believe that the school board needs a firm relationship established with the city council. Power is in numbers and if we can work together, both will come out ahead and for the better of DeKalb. Steve Irving: Engage the community more: move the meetings around as mentioned (above), post achievements via newsletter. What is the biggest challenge to the current school district? Jeromy Olson: Without a doubt it is the continued downward slide of our standardized test scores as compared to the rest of the state. Our district school rankings make it difficult to attract growth and maintain a strong tax base. People are leaving DeKalb at an alarming rate to move into other districts and the only way to reverse that is to improve our results academically. December Richardson: We need to put the necessary resources in place to ensure our students feel the support that they need to succeed both in and out of the classroom. We want more of our students to see going to college as a must and not an option. Failure is not an option when it comes to our students. They are our future workforce and this election will largely impact them in a negative way, if the appropriate people are not elected for the school board. Orion Carey: Getting to kids as early as possible in their scholastic life. Pre-K school and early education is going to be what gets us ahead on improving test scores and results. Making sure students and families without sufficient resources are worked with to guarantee kids are making it to school prepared for their education. Steve Irving: Biggest challenge: Improve low rankings, reduce debt to allow for a lower property tax burden, improve discipline by hiring/promoting strong/firm Principals, and enforce a code of conduct. NOTE: Other questions not listed above were answered at the various forums. DCO would like to thank former Mayor Bessie Chronopoulos for reviewing and endorsing the questionnaire for the School Board Candidates. The candidate forums touched on over 14 different issues. It is important that any Board have Board Diversity, with its members specializing in a variety of areas: i.e.: pedagogy, academics, finance, economics, problem solving, negotiations, and most importantly common sense. Conclusion, DeKalb County Online encourages all citizens to be informed and learn about all candidates in all races. This is not a battle of neighborhood Yard Signs or “My candidate is better than your candidate.” On Wednesday morning there will be new faces in select offices and it will be time to work together for the betterment of everyone. ONLY (4) School Board Candidates were willing to answer questions posed by DCOL. Steve Irving, Jeromy Olson, Orion Carey, and December Richardson. CONGRATS!!! NOT RESPONDING WERE: Samantha McDavid, Sara Moses, Fred Davis (I), Rick Smith (I), and David Seymour.
from __future__ import print_function from math import pi from astropy.io import fits import numpy as np import matplotlib.pyplot as plt import stsci.convolve._lineshape as ls from klpysci.fit import fittools as ft # Utility function to open and plot original spectrum def openNplot1d (filename, extname=('SCI',1)): hdulist = fits.open(filename, 'readonly') sp = hdulist[extname].data x = np.arange(sp.shape[0]) plt.clf() plt.plot (x, sp) return hdulist # Interactive specification of the section around the feature to work on def getsubspec (sp): # here it should be graphical, but I'm still working on that x1 = input("Left edge pixel: ") x2 = input("Right edge pixel: ") flux = sp[x1:x2] pixel = np.arange(x1,x2,1) sub = np.zeros((2,flux.shape[0])) sub[0] = pixel sub[1] = flux #plt.clf() #plt.plot (pixel, flux) #input('continue') return sub def plotresult (sp, bf, nsp): plt.clf() x = np.arange(0,sp.shape[0],1) plt.plot (x, sp) plt.plot (x, nsp) x = np.arange(0,bf.shape[0],1) plt.plot (x, bf) def rmfeature (inspec, outspec, params=None, profile='voigt'): #---- plot and get data spin = openNplot1d(inspec) specdata = spin['SCI'].data spin.close() #---- Get data for section around feature linedata = getsubspec(specdata) #---- Calculate and set initial parameter from linedata if params is None: contslope = (linedata[1][0] - linedata[1][-1]) / \ (linedata[0][0] - linedata[0][-1]) contlevel = linedata[1][0] - (contslope * linedata[0][0]) lineindex = linedata.argmin(1)[1] lineposition = linedata[0][lineindex] linestrength = linedata[1][lineindex] - \ ((contslope*linedata[0][lineindex]) + contlevel) linewidth = 20. # pixels. should find a better way. cte = ft.Parameter(contlevel) m = ft.Parameter(contslope) A = ft.Parameter(linestrength) mu = ft.Parameter(lineposition) fwhmL = ft.Parameter(linewidth) fwhmD = ft.Parameter(linewidth) else: cte = ft.Parameter(params[0]) m = ft.Parameter(params[1]) A = ft.Parameter(params[2]) mu = ft.Parameter(params[3]) fwhmL = ft.Parameter(params[4]) fwhmD = ft.Parameter(params[5]) #---- Define function [linear (continuum) + lorentz (feature)] # I don't know where the factor 10 I need to apply to A() comes from. # I'll need to figure it out. # # Also, those functions apparently need to be defined after the # cte(), m(), etc. Parameter instances are defined. def line(x): return cte() + m()*x def lorentz(x): # The version in numarray.convolve._lineshape is wrong amp = A() * fwhmL() * pi / 2. return amp * (fwhmL()/(2.*pi))/((x-mu())**2. + (fwhmL()/2.)**2.) def voigt(x): # import numarray.convolve._lineshape as ls # a = sqrt(log(2.)) * fwhmL() / (2. * fwhmD()) # b = 2. * sqrt(log(2.)) * (x - mu()) / fwhmD() # H = exp(-(b**2)) + a/(sqrt(pi)*b**2.) amp = A() * 1. # not right # amp = A() * (1. + (fwhmL()/fwhmD())*(fwhmL()*pi/2.)) return amp * ls.voigt(x, (fwhmD(),fwhmL()), mu()) def contlorentz(x): return line(x) + lorentz(x) def contvoigt(x): return line(x) + voigt(x) #---- Non-linear least square fit (optimize.leastsq) if (params==None): if profile=='voigt': # Get initial params from Lorentz fit. ft.nlfit(contlorentz, [cte, m, A, mu, fwhmL], linedata[1], x=linedata[0]) ft.nlfit(contvoigt, [cte, m, A, mu, fwhmD, fwhmL], linedata[1], x=linedata[0]) elif profile=='lorentz': ft.nlfit(contlorentz, [cte, m, A, mu, fwhmL], linedata[1], x=linedata[0]) fwhmD=ft.Parameter(None) else: pass #---- retrieve line profile parameters only and create a profile # with zero continuum for the entire range for the original spectrum # Then remove the feature if profile=='voigt': newspecdata = specdata - voigt(np.arange(0,specdata.shape[0],1)) bestfit = contvoigt(np.arange(0,specdata.shape[0],1)) elif profile=='lorentz': newspecdata = specdata - lorentz(np.arange(0,specdata.shape[0],1)) bestfit = contlorentz(np.arange(0,specdata.shape[0],1)) #---- display the original spectrum, the best fit and the # new spectrum. The feature should be gone plotresult(specdata, bestfit, newspecdata) print("Best Fit Parameters:") print(" section = ",linedata[0][0],",",linedata[0][-1]+1) print(" cte = ",cte()) print(" m = ",m()) print(" A = ",A()) print(" mu = ",mu()) print(" fwhmL = ",fwhmL()) print(" fwhmD = ",fwhmD()) try: input = raw_input except NameError: pass write = input('Write corrected spectrum to '+outspec+'? (y/n): ') #---- write output spectrum if write=='y': spout = fits.open(inspec,'readonly') # just to create copy of HDUList spout['SCI'].data = newspecdata spout.writeto(outspec, output_verify='ignore') #print ("Not implemented yet, but it isn't the app cool!") else: print("Too bad.")
The most anticipated bout between Conor McGregor and Khabib Nurmagomedov has come to fruition. On Saturday, October 6, the biggest fight in the UFC history has set the seal on Khabib’s name as he remains undefeated. The undefeated Nurmagomedov (27-0) destroyed McGregor via submission in round 4. That being said, the name of the undefeated Russian fighter is expected to be cemented in the UFC’s pound-for-pound rankings. “The Biggest Night” no more as it turned out to be the worst night for MMA fans. Most of the supporters are cut-up about the ending and they can’t still get over it. The MMA world at large knows the beef circulating around the internet between “The Notorious” and “The Eagle”. Remember Conor’s bus incident at the Barclay’s Center in Brooklyn back in April? The undefeated Dagestan pride went out of control and jumped over the Octagon and attacked Dillon Danis. Even so, the chaotic UFC 229 didn’t stop there. If you have been following the news, you know for sure about how Khabib jumped out and attacked Conor’s teammate after the fight, as well as how a Team Khabib guy went inside the Octagon and sucker-punched the Irish fighter while observing the whole thing. Yet, another camera angle showed McGregor punched Nurmagomedov’s team first. UFC President Dana White later confirmed that three members of Khabib’s team have been arrested and taken to jail. To boot, before this unforeseen ending happened at the UFC 229 main event, a fight broke out in the crowd after the UFC 229 weigh-ins caused by the Russian and Irish fans. The Dublin pride made $3 million whilst the undefeated Lightweight champion made a disclosed $2 million flat rate. However, there’s news circulating on the Internet that McGregor was paid. Whilst NSAC freeze and withholding the undefeated paycheck because of the incident. According to MMA Junkie, there is no word on how long the NSAC might withhold Nurmagomedov’s pay while it starts an investigation into the post-fight melee. I believe they behaved as badly as each other and very unsportsmanlike behaviour, however Tennis players and others do it regularly. I believe they should be fined but one shouldn’t be paid without the other one who clearly won. Haha they mad McGregor got beat, I’m sure kabib is rich anyways so waiting for his check is probably the least of his worries. 27-0 haha McGregor looked like an amateur. First of all I NEVER try to hide my comments: I am NOT a runner. To start with Mc Gregor proved to be NOT a good sport man, and on this; I am very glad that he lost very soundly: the attack on the bus was a criminal act, and he should have being charged. People like him, like Tyson; they should be banned to ply sport, the word “Sport” is based on nobility, fairness and NO bias. Once again the Sport has been damaged by people who SHOULD NOT be there and corruption in most Sporting activities mostly by selfishness; but more for betting. I remember when Australia had an old saying: if something was not fear; thy use to say; “is not Cricket”: then they discovered that they use to bet even on a single ball. The word “sport” has been; and will be abused: it is sad; but that is the way it is.