text
stringlengths
29
850k
# Copyright (c) 2020 Ultimaker B.V. # Uranium is released under the terms of the LGPLv3 or higher. import json import time import uuid from collections import deque from threading import RLock from typing import Callable, cast, Dict, Set, Union, Optional, Any from PyQt5.QtCore import QObject, QUrl, Qt, pyqtSignal, pyqtProperty from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkRequest, QNetworkReply from UM.Logger import Logger from UM.TaskManagement.HttpRequestData import HttpRequestData from UM.TaskManagement.HttpRequestScope import HttpRequestScope from UM.TaskManagement.TaskManager import TaskManager # # Summary: # # HttpRequestManager is a wrapper for Qt's QNetworkAccessManager and make it more convenient to do the following things: # (1) Keep track of the HTTP requests one has issued. This is done via the HttpRequestData object. Each HttpRequestData # object represents an issued HTTP request. # (2) A request can be aborted if it hasn't been issued to QNetworkAccessManager or if it's still running by # QNetworkAccessManager. # (3) Updates on each request is done via user-specified callback functions. So, for each request, you can give # optional callbacks: # - A successful callback, invoked when the request has been finished successfully. # - An error callback, invoked when an error has occurred, including when a request was aborted by the user or # timed out. # - A download progress callback, invoked when there's an update on the download progress. # - An upload progress callback, invoked when there's an update on the upload progress. # (4) An optional timeout can be specified for an HTTP request. Note that this timeout is the max wait time between # each time the request gets a response from the server. This is handled via the download and upload progress # callbacks. A QTimer is used for each request to track its timeout if set. If the timer gets triggered and there # is indeed a timeout, the request will be aborted. All requests that are aborted due to a timeout will result in # invoking its error callback with an error code QNetworkReply::OperationCanceledError, but the HttpRequestData # will have its "is_aborted_due_to_timeout" property set to True. # Because of # # All requests are handled by QNetworkAccessManager. We consider that all the requests that are being handled by # QNetworkAccessManager at a certain point are running concurrently. # # # A dedicated manager that processes and schedules HTTP requests. It provides public APIs for issuing HTTP requests # and the results, successful or not, will be communicated back via callback functions. For each request, 2 callback # functions can be optionally specified: # # - callback: This function will be invoked when a request finishes. (bound to QNetworkReply.finished signal) # Its signature should be "def callback(QNetworkReply) -> None" or other compatible form. # # - error_callback: This function will be invoked when a request fails. (bound to QNetworkReply.error signal) # Its signature should be "def callback(QNetworkReply, QNetworkReply.NetworkError) -> None" or other compatible # form. # # - download_progress_callback: This function will be invoked whenever the download progress changed. (bound to # QNetworkReply.downloadProgress signal) # Its signature should be "def callback(bytesReceived: int, bytesTotal: int) -> None" or other compatible form. # # - upload_progress_callback: This function will be invoked whenever the upload progress changed. (bound to # QNetworkReply.downloadProgress signal) # Its signature should be "def callback(bytesSent: int, bytesTotal: int) -> None" or other compatible form. # # - timeout (EXPERIMENTAL): The timeout is seconds for a request. This is the timeout since the request was first # issued to the QNetworkManager. NOTE that this timeout is NOT the timeout between each response from the other # party, but the timeout for the complete request. So, if you have a very slow network which takes 2 hours to # download a 1MB file, and for this request you set a timeout of 10 minutes, the request will be aborted after # 10 minutes if it's not finished. # class HttpRequestManager(TaskManager): __instance = None # type: Optional[HttpRequestManager] internetReachableChanged = pyqtSignal(bool) @classmethod def getInstance(cls, *args, **kwargs) -> "HttpRequestManager": if cls.__instance is None: cls.__instance = cls(*args, **kwargs) return cls.__instance def __init__(self, max_concurrent_requests: int = 4, parent: Optional["QObject"] = None, enable_request_benchmarking: bool = False) -> None: if HttpRequestManager.__instance is not None: raise RuntimeError("Try to create singleton '%s' more than once" % self.__class__.__name__) HttpRequestManager.__instance = self super().__init__(parent) self._network_manager = QNetworkAccessManager(self) self._account_manager = None self._is_internet_reachable = True # All the requests that have been issued to the QNetworkManager are considered as running concurrently. This # number defines the max number of requests that will be issued to the QNetworkManager. self._max_concurrent_requests = max_concurrent_requests # A FIFO queue for the pending requests. self._request_queue = deque() # type: deque # A set of all currently in progress requests self._requests_in_progress = set() # type: Set[HttpRequestData] self._request_lock = RLock() self._process_requests_scheduled = False # Debug options # # Enabling benchmarking will make the manager to time how much time it takes for a request from start to finish # and log them. self._enable_request_benchmarking = enable_request_benchmarking @pyqtProperty(bool, notify = internetReachableChanged) def isInternetReachable(self) -> bool: return self._is_internet_reachable # Public API for creating an HTTP GET request. # Returns an HttpRequestData instance that represents this request. def get(self, url: str, headers_dict: Optional[Dict[str, str]] = None, callback: Optional[Callable[["QNetworkReply"], None]] = None, error_callback: Optional[Callable[["QNetworkReply", "QNetworkReply.NetworkError"], None]] = None, download_progress_callback: Optional[Callable[[int, int], None]] = None, upload_progress_callback: Optional[Callable[[int, int], None]] = None, timeout: Optional[float] = None, scope: Optional[HttpRequestScope] = None) -> "HttpRequestData": return self._createRequest("get", url, headers_dict = headers_dict, callback = callback, error_callback = error_callback, download_progress_callback = download_progress_callback, upload_progress_callback = upload_progress_callback, timeout = timeout, scope = scope) # Public API for creating an HTTP PUT request. # Returns an HttpRequestData instance that represents this request. def put(self, url: str, headers_dict: Optional[Dict[str, str]] = None, data: Optional[Union[bytes, bytearray]] = None, callback: Optional[Callable[["QNetworkReply"], None]] = None, error_callback: Optional[Callable[["QNetworkReply", "QNetworkReply.NetworkError"], None]] = None, download_progress_callback: Optional[Callable[[int, int], None]] = None, upload_progress_callback: Optional[Callable[[int, int], None]] = None, timeout: Optional[float] = None, scope: Optional[HttpRequestScope] = None) -> "HttpRequestData": return self._createRequest("put", url, headers_dict = headers_dict, data = data, callback = callback, error_callback = error_callback, download_progress_callback = download_progress_callback, upload_progress_callback = upload_progress_callback, timeout = timeout, scope = scope) # Public API for creating an HTTP POST request. Returns a unique request ID for this request. # Returns an HttpRequestData instance that represents this request. def post(self, url: str, headers_dict: Optional[Dict[str, str]] = None, data: Optional[Union[bytes, bytearray]] = None, callback: Optional[Callable[["QNetworkReply"], None]] = None, error_callback: Optional[Callable[["QNetworkReply", "QNetworkReply.NetworkError"], None]] = None, download_progress_callback: Optional[Callable[[int, int], None]] = None, upload_progress_callback: Optional[Callable[[int, int], None]] = None, timeout: Optional[float] = None, scope: Optional[HttpRequestScope] = None) -> "HttpRequestData": return self._createRequest("post", url, headers_dict = headers_dict, data = data, callback = callback, error_callback = error_callback, download_progress_callback = download_progress_callback, upload_progress_callback = upload_progress_callback, timeout = timeout, scope = scope) # Public API for creating an HTTP DELETE request. # Returns an HttpRequestData instance that represents this request. def delete(self, url: str, headers_dict: Optional[Dict[str, str]] = None, callback: Optional[Callable[["QNetworkReply"], None]] = None, error_callback: Optional[Callable[["QNetworkReply", "QNetworkReply.NetworkError"], None]] = None, download_progress_callback: Optional[Callable[[int, int], None]] = None, upload_progress_callback: Optional[Callable[[int, int], None]] = None, timeout: Optional[float] = None, scope: Optional[HttpRequestScope] = None) -> "HttpRequestData": return self._createRequest("deleteResource", url, headers_dict=headers_dict, callback=callback, error_callback=error_callback, download_progress_callback=download_progress_callback, upload_progress_callback=upload_progress_callback, timeout=timeout, scope=scope) # Public API for aborting a given HttpRequestData. If the request is not pending or in progress, nothing # will be done. def abortRequest(self, request: "HttpRequestData") -> None: with self._request_lock: # If the request is currently pending, just remove it from the pending queue. if request in self._request_queue: self._request_queue.remove(request) # If the request is currently in progress, abort it. if request in self._requests_in_progress: if request.reply is not None and request.reply.isRunning(): request.reply.abort() Logger.log("d", "%s aborted", request) @staticmethod def readJSON(reply: QNetworkReply) -> Any: """ Read a Json response into a Python object (list, dict, str depending on json type) :return: Python object representing the Json or None in case of error """ try: return json.loads(HttpRequestManager.readText(reply)) except json.decoder.JSONDecodeError: Logger.log("w", "Received invalid JSON: " + str(reply.url())) return None @staticmethod def readText(reply: QNetworkReply) -> str: """Decode raw reply bytes as utf-8""" return bytes(reply.readAll()).decode("utf-8") @staticmethod def replyIndicatesSuccess(reply: QNetworkReply, error: Optional["QNetworkReply.NetworkError"] = None) -> bool: """Returns whether reply status code indicates success and error is None""" return error is None and 200 <= reply.attribute(QNetworkRequest.HttpStatusCodeAttribute) < 300 @staticmethod def safeHttpStatus(reply: Optional[QNetworkReply]): """Returns the status code or -1 if there isn't any""" if reply is None: return -1 return reply.attribute(QNetworkRequest.HttpStatusCodeAttribute) or -1 @staticmethod def qt_network_error_name(error: QNetworkReply.NetworkError): """String representation of a NetworkError, eg 'ProtocolInvalidOperationError'""" for k, v in QNetworkReply.__dict__.items(): if v == error: return k return "Unknown Qt Network error" # This function creates a HttpRequestData with the given data and puts it into the pending request queue. # If no request processing call has been scheduled, it will schedule it too. # Returns an HttpRequestData instance that represents this request. def _createRequest(self, http_method: str, url: str, headers_dict: Optional[Dict[str, str]] = None, data: Optional[Union[bytes, bytearray]] = None, callback: Optional[Callable[["QNetworkReply"], None]] = None, error_callback: Optional[Callable[["QNetworkReply", "QNetworkReply.NetworkError"], None]] = None, download_progress_callback: Optional[Callable[[int, int], None]] = None, upload_progress_callback: Optional[Callable[[int, int], None]] = None, timeout: Optional[float] = None, scope: Optional[HttpRequestScope] = None ) -> "HttpRequestData": # Sanity checks if timeout is not None and timeout <= 0: raise ValueError("Timeout must be a positive number if provided, but [%s] was given" % timeout) request = QNetworkRequest(QUrl(url)) # Make sure that Qt handles redirects if hasattr(QNetworkRequest, "FollowRedirectsAttribute"): # Patch for Qt 5.6-5.8 request.setAttribute(QNetworkRequest.FollowRedirectsAttribute, True) if hasattr(QNetworkRequest, "RedirectPolicyAttribute"): # Patch for Qt 5.9+ request.setAttribute(QNetworkRequest.RedirectPolicyAttribute, True) # Set headers if headers_dict is not None: for key, value in headers_dict.items(): request.setRawHeader(key.encode("utf-8"), value.encode("utf-8")) if scope is not None: scope.requestHook(request) # Generate a unique request ID request_id = uuid.uuid4().hex # Create the request data request_data = HttpRequestData(request_id, http_method = http_method, request = request, data = data, manager_timeout_callback = self._onRequestTimeout, callback = callback, error_callback = error_callback, download_progress_callback = download_progress_callback, upload_progress_callback = upload_progress_callback, timeout = timeout) with self._request_lock: self._request_queue.append(request_data) # Schedule a call to process pending requests in the queue if not self._process_requests_scheduled: self.callLater(0, self._processNextRequestsInQueue) self._process_requests_scheduled = True return request_data # For easier debugging, so you know when the call is triggered by the timeout timer. def _onRequestTimeout(self, request_data: "HttpRequestData") -> None: Logger.log("d", "Request [%s] timeout.", self) # Make typing happy if request_data.reply is None: return with self._request_lock: if request_data not in self._requests_in_progress: return request_data.reply.abort() request_data.is_aborted_due_to_timeout = True # Processes the next requests in the pending queue. This function will issue as many requests to the QNetworkManager # as possible but limited by the value "_max_concurrent_requests". It stops if there is no more pending requests. def _processNextRequestsInQueue(self) -> None: # Process all requests until the max concurrent number is hit or there's no more requests to process. while True: with self._request_lock: # Do nothing if there's no more requests to process if not self._request_queue: self._process_requests_scheduled = False return # Do not exceed the max request limit if len(self._requests_in_progress) >= self._max_concurrent_requests: self._process_requests_scheduled = False return # Fetch the next request and process next_request_data = self._request_queue.popleft() self._processRequest(cast(HttpRequestData, next_request_data)) # Processes the given HttpRequestData by issuing the request using QNetworkAccessManager and moves the # request into the currently in-progress list. def _processRequest(self, request_data: "HttpRequestData") -> None: now = time.time() # Get the right http_method function and prepare arguments. method = getattr(self._network_manager, request_data.http_method) args = [request_data.request] if request_data.data is not None: args.append(request_data.data) # Issue the request and add the reply into the currently in-progress requests set reply = method(*args) request_data.reply = reply # Connect callback signals reply.error.connect(lambda err, rd = request_data: self._onRequestError(rd, err), type = Qt.QueuedConnection) reply.finished.connect(lambda rd = request_data: self._onRequestFinished(rd), type = Qt.QueuedConnection) # Only connect download/upload progress callbacks when necessary to reduce CPU usage. if request_data.download_progress_callback is not None or request_data.timeout is not None: reply.downloadProgress.connect(request_data.onDownloadProgressCallback, type = Qt.QueuedConnection) if request_data.upload_progress_callback is not None or request_data.timeout is not None: reply.uploadProgress.connect(request_data.onUploadProgressCallback, type = Qt.QueuedConnection) with self._request_lock: self._requests_in_progress.add(request_data) request_data.setStartTime(now) def _onRequestError(self, request_data: "HttpRequestData", error: "QNetworkReply.NetworkError") -> None: error_string = None if request_data.reply is not None: error_string = request_data.reply.errorString() if error == QNetworkReply.UnknownNetworkError or QNetworkReply.HostNotFoundError: self._setInternetReachable(False) # manager seems not always able to recover from a total loss of network access, so re-create it self._network_manager = QNetworkAccessManager(self) # Use peek() to retrieve the reply's body instead of readAll(), because readAll consumes the content reply_body = request_data.reply.peek(request_data.reply.bytesAvailable()) # unlike readAll(), peek doesn't consume the content Logger.log("d", "%s got an QNetworkReplyError %s. The server returned: %s", request_data, error_string, reply_body) with self._request_lock: # Safeguard: make sure that we have the reply in the currently in-progress requests set if request_data not in self._requests_in_progress: # TODO: ERROR, should not happen Logger.log("e", "%s not found in the in-progress set", request_data) pass else: # Disconnect callback signals if request_data.reply is not None: if request_data.download_progress_callback is not None: request_data.reply.downloadProgress.disconnect(request_data.onDownloadProgressCallback) if request_data.upload_progress_callback is not None: request_data.reply.uploadProgress.disconnect(request_data.onUploadProgressCallback) request_data.setDone() self._requests_in_progress.remove(request_data) # Schedule the error callback if there is one if request_data.error_callback is not None: self.callLater(0, request_data.error_callback, request_data.reply, error) # Continue to process the next request self._processNextRequestsInQueue() def _onRequestFinished(self, request_data: "HttpRequestData") -> None: # See https://doc.qt.io/archives/qt-5.10/qnetworkreply.html#abort # Calling QNetworkReply.abort() will also trigger finished(), so we need to know if a request was finished or # aborted. This can be done by checking if the error is QNetworkReply.OperationCanceledError. If a request was # aborted due to timeout, the request's HttpRequestData.is_aborted_due_to_timeout will be set to True. # # We do nothing if the request was aborted or and error was detected because an error callback will also # be triggered by Qt. reply = request_data.reply if reply is not None: reply_error = reply.error() # error() must only be called once if reply_error != QNetworkReply.NoError: if reply_error == QNetworkReply.OperationCanceledError: Logger.log("d", "%s was aborted, do nothing", request_data) # stop processing for any kind of error return # No error? Internet is reachable self._setInternetReachable(True) if self._enable_request_benchmarking: time_spent = None # type: Optional[float] if request_data.start_time is not None: time_spent = time.time() - request_data.start_time Logger.log("d", "Request [%s] finished, took %s seconds, pending for %s seconds", request_data, time_spent, request_data.pending_time) with self._request_lock: # Safeguard: make sure that we have the reply in the currently in-progress requests set. if request_data not in self._requests_in_progress: # This can happen if a request has been aborted. The finished() signal will still be triggered at the # end. In this case, do nothing with this request. Logger.log("e", "%s not found in the in-progress set", request_data) else: # Disconnect callback signals if reply is not None: # Even after the request was successfully finished, an error may still be emitted if # the network connection is lost seconds later. Bug in Qt? Fixes CURA-7349 reply.error.disconnect() if request_data.download_progress_callback is not None: reply.downloadProgress.disconnect(request_data.onDownloadProgressCallback) if request_data.upload_progress_callback is not None: reply.uploadProgress.disconnect(request_data.onUploadProgressCallback) request_data.setDone() self._requests_in_progress.remove(request_data) # Schedule the callback if there is one if request_data.callback is not None: self.callLater(0, request_data.callback, reply) # Continue to process the next request self._processNextRequestsInQueue() def _setInternetReachable(self, reachable: bool): if reachable != self._is_internet_reachable: self._is_internet_reachable = reachable self.internetReachableChanged.emit(reachable)
We are always learning, growing, and challenging ourselves to be better at everything we do. Isaac Sloan guest speaker on the Crystal language. Our third quarterly app day with three different apps. Come join us for Goblet of Fire round 2. Our team of 6 is building an app in one day. Come join us.
from flask import Flask, request, render_template, redirect from flask import send_from_directory from mongoengine import Document, StringField, DateTimeField, connect import os import datetime import tasks app = Flask(__name__) app.debug = True class Entry(Document): name = StringField() zip = StringField() timestamp = DateTimeField() @app.route("/", methods=["GET", ]) def index(): """List all the entries.""" all_entries = Entry.objects return render_template("index.html", entries=all_entries) @app.route("/new", methods=["GET", "POST", ]) def sign(): """Allow users to create new entries.""" if request.method == "GET": return render_template("new.html") else: the_zip = request.form["the_zip"] current_time = datetime.datetime.now() entry = Entry( zip=the_zip, timestamp=current_time ) entry.save() tasks.lookup.delay(entry.id) return redirect("/") # Redirect after POST is Good Behavor! @app.route("/styles/<path:filename>") def styles(filename): """Allow Flask to server our CSS files.""" return send_from_directory("styles", filename) if __name__ == "__main__": host = "localhost" port = int(os.getenv("PORT", 5000)) if port != 5000: host = "0.0.0.0" else: connect("mempyflaskcelery") # A MongoDB connection app.run(port=port, host=host)
Hello! I am on the Simple Stories blog today! 2016 had been awesome and in no time, it would be 2017! Time sure passes way too fast! It has nearly been a 10 year tradition since we dated that we would spend the new year counting down at the comfort in his house and ordering pizza for dinner. This year, our first year as a married couple, is going to be slightly different as I promised my Aunt to go on a short trip with her. Feeling a little guilty, I decided to make this layout to accompany my husband! Of course, the 'Happy New Year' collection is the best collection for this layout. I love the fun and vibrant colors! I first started with some stitching to accompany the colors. Using simple back stitches to accomplish the title 2017. The sticker sheet has many great phrases to accompany the title! Next, I fussy cut the elements from the pattern papers to decorate the layout. The party hats make the elements super fun and I totally love layering them! Another set of elements! Here I used the star from the sticker sheet and another die-cut vellum star to accompany it. The second layout today is inspired by the fact that it is also the 10th year since I started dating my husband! This is inspired by the pattern paper in the collection, where the countdown of 1-10 corresponds to the ten years of love! I am so inspired by the countdown, that I used 2 sheets of the same pattern paper for this layout. I fussy cut the numbers and layered it on the same pattern paper for impact. Another part of the layout where I cut strips and sew it to the background. This helps to direct the attention to the photo as well. For a fun element, I curved the sides of the strips a little by folding it upwards slightly. More party hat and phrases! Last but not least, adding some more stitched element to the title! I wanted to highlight the number 10. This time, I did random stitching inside the number so it becomes more bold. Another important basic element that all should have is the Sn@p Basics 4x6 Basics Stickers! They are super useful - as titles or sub-titles! Here I used the white ones for the word 'Amazing' and I love how it pops out! 2016 had been an exciting year and I look forward to 2017! Here is wishing everyone a great crafty year ahead!
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import re import configman import collections import inspect #------------------------------------------------------------------------------ # support methods # a regular expression that will parse out all pairs in the form: # a=b, c=d, e=f kw_list_re = re.compile('([^ =]+) *= *("[^"]*"|[^ ]*)') def kw_str_parse(a_string): """convert a string in the form 'a=b, c=d, e=f' to a dict""" try: return dict((k, eval(v.rstrip(','))) for k, v in kw_list_re.findall(a_string)) except (AttributeError, TypeError): if isinstance(a_string, collections.Mapping): return a_string return {} #============================================================================== class TransformRule(object): """a pairing of two functions with default parameters to be used as transformation rule.""" #-------------------------------------------------------------------------- def __init__(self, predicate, predicate_args, predicate_kwargs, action, action_args, action_kwargs): """construct a new predicate/action rule pair. input parameters: pedicate - the name of a function to serve as a predicate. The function must accept two dicts followed by any number of constant args or kwargs. Alternatively, this could be a classname for a class that has a method called 'predicate' with the aforementioned charactistics predicate_args - arguments to be passed on to the predicate function in addition to the two required dicts. predicate_kwargs - kwargs to be passed on to the predicate function in addition to the two required dicts. action - the name of a function to be run if the predicate returns True. The method must accept two dicts followed by any number of args or kwargs. Alternatively, this could be a classname for a class that has a method called 'predicate' with the aforementioned charactistics action_args - arguments to be passed on to the action function in addition to the two required dicts action_kwargs - kwargs to be passed on to the action function in addition to the two required dicts """ try: self.predicate = configman.converters.class_converter(predicate) except TypeError: # conversion failed, let's assume it was already function or a # callable object self.predicate = predicate if inspect.isclass(self.predicate): # the predicate is a class, instantiate it and set the predicate # function to the object's 'predicate' method self._predicitate_implementation = self.predicate() self.predicate = self._predicitate_implementation.predicate else: self._predicitate_implementation = type(self.predicate) try: if predicate_args in ('', None): self.predicate_args = () elif isinstance(predicate_args, tuple): self.predicate_args = predicate_args else: self.predicate_args = tuple([eval(x.strip()) for x in predicate_args.split(',')]) except AttributeError: self.predicate_args = () self.predicate_kwargs = kw_str_parse(predicate_kwargs) try: self.action = configman.class_converter(action) except TypeError: # the conversion failed, let's assume that the action was passed in # as something callable. self.action = action if inspect.isclass(self.action): # the action is actually a class, go on and instantiate it, then # assign the 'action' to be the object's 'action' method if self._predicitate_implementation.__class__ is self.action: # if the predicate and the action are implemented in the same # class, only instantiate one copy. self._action_implementation = self._predicitate_implementation else: self._action_implementation = self.action() self.action = self._action_implementation.action try: if action_args in ('', None): self.action_args = () elif isinstance(action_args, tuple): self.action_args = action_args else: self.action_args = tuple([eval(x.strip()) for x in action_args.split(',')]) except AttributeError: self.action_args = () self.action_kwargs = kw_str_parse(action_kwargs) #-------------------------------------------------------------------------- @staticmethod def function_invocation_proxy(fn, proxy_args, proxy_kwargs): """execute the fuction if it is one, else evaluate the fn as a boolean and return that value. Sometimes rather than providing a predicate, we just give the value of True. This is shorthand for writing a predicate that always returns true.""" try: return fn(*proxy_args, **proxy_kwargs) except TypeError: return bool(fn) #-------------------------------------------------------------------------- def act(self, *args, **kwargs): """gather a rules parameters together and run the predicate. If that returns True, then go on and run the action function returns: a tuple indicating the results of applying the predicate and the action function: (False, None) - the predicate failed, action function not run (True, True) - the predicate and action functions succeeded (True, False) - the predicate succeeded, but the action function failed""" pred_args = tuple(args) + tuple(self.predicate_args) pred_kwargs = kwargs.copy() pred_kwargs.update(self.predicate_kwargs) if self.function_invocation_proxy(self.predicate, pred_args, pred_kwargs): act_args = tuple(args) + tuple(self.action_args) act_kwargs = kwargs.copy() act_kwargs.update(self.action_kwargs) bool_result = self.function_invocation_proxy(self.action, act_args, act_kwargs) return (True, bool_result) else: return (False, None) #-------------------------------------------------------------------------- def __eq__(self, another): if isinstance(another, TransformRule): return self.__dict__ == another.__dict__ else: return False #============================================================================== class TransformRuleSystem(object): """A collection of TransformRules that can be applied together""" #-------------------------------------------------------------------------- def __init__(self): self.rules = list() #-------------------------------------------------------------------------- def load_rules(self, an_iterable): """cycle through a collection of Transform rule tuples loading them into the TransformRuleSystem""" self.rules = [TransformRule(*x) for x in an_iterable] #-------------------------------------------------------------------------- def append_rules(self, an_iterable): """add rules to the TransformRuleSystem""" self.rules.extend(TransformRule(*x) for x in an_iterable) #-------------------------------------------------------------------------- def apply_all_rules(self, *args, **kwargs): """cycle through all rules and apply them all without regard to success or failure returns: True - since success or failure is ignored""" # print 'args:', args, 'kwargs:', kwargs for x in self.rules: x.act(*args, **kwargs) return True #-------------------------------------------------------------------------- def apply_until_action_succeeds(self, *args, **kwargs): """cycle through all rules until an action is run and succeeds returns: True - if an action is run and succeeds False - if no action succeeds""" for x in self.rules: predicate_result, action_result = x.act(*args, **kwargs) if action_result: return True return False #-------------------------------------------------------------------------- def apply_until_action_fails(self, *args, **kwargs): """cycle through all rules until an action is run and fails returns: True - an action ran and it failed False - no action ever failed""" for x in self.rules: predicate_result, action_result = x.act(*args, **kwargs) if not action_result: return True return False #-------------------------------------------------------------------------- def apply_until_predicate_succeeds(self, *args, **kwargs): """cycle through all rules until a predicate returns True returns: True - an action ran and it succeeded False - an action ran and it failed None - no predicate ever succeeded""" for x in self.rules: predicate_result, action_result = x.act(*args, **kwargs) if predicate_result: return action_result return None #-------------------------------------------------------------------------- def apply_until_predicate_fails(self, *args, **kwargs): """cycle through all rules until a predicate returns False returns: False - a predicate ran and it failed None - no predicate ever failed""" for x in self.rules: predicate_result, action_result = x.act(*args, **kwargs) if not predicate_result: return False return None #------------------------------------------------------------------------------ # Useful rule predicates and actions #------------------------------------------------------------------------------ # (True, '', '', copy_key_value, '', 'source_key=sally, destination_key=fred') def copy_value_action(source, destination, source_key=None, destination_key=None): """copy a key from a mapping source to a mapping destination""" destination[destination_key] = source[source_key] #------------------------------------------------------------------------------ # (True, '', '', # format_new_value, '', 'destination_key='Version', format_str=%(Version)sesr' # ) def format_new_value_action(source, destination, destination_key='', format_str=''): """replace a mapping destination with a string formatted from the mapping source. parameters: source - a mapping to use as a source destination - a mapping to use as the destination destination_key - the key in the destination to insert/replace format - a string in standard python format form""" destination[destination_key] = format_str % source # (eq_constant_predicate, '', 'source_key="fred", value="wilma"', ...) #------------------------------------------------------------------------------ def eq_constant_predicate(source, destination, source_key='', value=''): """a predicate to test equality between a source key and a constant parameters: source - the source of the value to test destination - not used source_key - the key into the source to use for the test value - the constant to check for equality""" return source[source_key] == value # (eq_key_predicate, '', 'left_mapping_key="fred", right_mapping_key="wilma"', # ...) #------------------------------------------------------------------------------ def eq_key_predicate(left_mapping, right_mapping, left_mapping_key='', right_mapping_key=''): """a predicate to test equality between a left mapping key and a right mapping key parameters: left_mapping - the mapping containing the first value to test right_mapping - the mapping containing the second value left_mapping_key - the key into the source for the first value right_mapping_key - the key into the second data source""" return left_mapping[left_mapping_key] == right_mapping[right_mapping_key] # (is_not_null_predicate, '', 'key="fred", # ...) #------------------------------------------------------------------------------ def is_not_null_predicate(source, other, key=''): """a predicate that converts the key'd source to boolean. parameters: source - the mapping containing the value to test other - unused key - the key into the source for the first value""" try: return bool(source[key]) except KeyError: return False
SMU notched a monster conference win Wednesday night. Going into Wednesday night, SMU was playing its worst basketball of the Tim Jankovich era. The Mustangs had lost three straight games, and a fourth seemed inevitable as No. 7 Wichita State loomed on their schedule. But no one told that to Shake Milton, who poured in 33 points as the Mustangs stunned the Shockers 83-78 Wednesday night. While the win is huge for SMU, it may turn out to be a pyrrhic victory. Jarrey Foster went down clutching his knee early in the first half and did not return. Foster is SMU’s most versatile player, providing scoring, defense and rebounds for the Mustangs. He also has a history of knee injuries, as he tore his ACL in high school. The win was not easy to pull off for SMU, as Wichita State made a pair of second half surges to threaten SMU’s lead. With just more than 13 minutes remaining in the second half, a Conner Frankamp 3-pointer brought Wichita State within one point of SMU, but the Mustangs responded with an 8-0 run. Six of those points came from Milton. Then with 2:06 left, Milton launched a 3-pointer to extend SMU’s lead to 11, but the Shockers quickly battled back, embarking on an 11-3 run in the next minute-and-a-half to cut the lead to three. SMU was able to seal the game with two clutch free throws from Milton, one free throw from Jimmy Whitt and a spree of missed 3s by the Shockers. Milton seemed to come up huge whenever SMU needed a basket. He dominated Wichita State in the pick and roll, draining jumpers when defenders gave him space and driving into the lane when they pressed him. “When you see a guy do that, you see a guy’s heart, not going to let the team lose,” Jankovich said. SMU had to overcome foul trouble Wednesday night. Ethan Chargois and Ben Emelogu both missed key minutes in the second half with four fouls. A shorter bench forced Jankovich to lean on unexpected players for key minutes down the stretch. Jahmal McMurray is one player who stepped up, bouncing back from a rough trio of games with 16 key points off the bench. It was clear that SMU missed Foster at times. Without their rebounding leader, the Mustangs were bullied on the glass, allowing 11 offensive boards to the Shockers. The Mustangs shot a scorching 63.8 percent from the field, largely due to Milton, who nailed 11 of his 14 shots. Many of those were contested and some of them were far behind the 3-point arc. Across from Milton was his top competitor for AAC Player of the Year, Wichita State guard Landry Shamet. Shamet had an outstanding game in his own right, recording a double-double with 20 points and 10 assists. Shaquille Morris added 17 points for the Shockers. The win is SMU’s first over an AP top-10 opponent since it beat No. 10 Houston in 1982. The Mustangs now have three top-15 wins, adding this one to victories over No. 2 Arizona and No. 14 USC. The loss was Wichita State’s first conference loss of the season and thus its first since joining the AAC. Next, SMU will return to Moody Coliseum to face Tulane on Saturday. Tip-off for that game is at 5 p.m.
# -*- coding: utf-8 -*- """ Created on Mon Sep 28 13:43:55 2015 @author: Janez """ import sys sys.path.append('../keras') import load_data import models import misc import paraphrase import numpy as np import itertools import os if __name__ == "__main__": train, dev, test = load_data.load_all_snli_datasets('data/snli_1.0/') glove = load_data.import_glove('data/snli_vectors.txt') for ex in train+dev: load_data.load_word_vecs(ex[0] + ex[1], glove) load_data.load_word_vec('EOS', glove) wi = load_data.WordIndex(glove) def grid_experiments(train, dev, glove, embed_size = 300, hidden_size = 100): lr_vec = [0.001, 0.0003, 0.0001] dropout_vec = [0.0, 0.1, 0.2] reg_vec = [0.0, 0.001, 0.0003, 0.0001] for params in itertools.product(lr_vec, dropout_vec, reg_vec): filename = 'lr' + str(params[0]).replace('.','') + '_drop' + str(params[1]).replace('.','') + '_reg' + str(params[2]).replace('.','') print 'Model', filename model = models.init_model(embed_size, hidden_size, params[0], params[1], params[2]) models.train_model(train, dev, glove, model, 'models/' + filename) def test_model2(model, dev, glove): from misc import predict_example tp = 0 for ex in dev: probs = predict_example(" ".join(ex[0]), " ".join(ex[1]), model, glove) label = load_data.LABEL_LIST[np.argmax(probs)] if label == ex[2]: tp +=1 return tp / float(len(dev)) def test_all_models(dev, test, glove, folder = 'models/'): files = os.listdir(folder) extless = set([file.split('.')[0] for file in files if os.path.isfile(file)]) - set(['']) epoch_less = set([file.split('~')[0] for file in extless]) for model_short in epoch_less: if model_short in extless: modelname = model_short else: same_exper = [m for m in extless if m.startswith(model_short)] epoch_max = max([int(file.split('~')[1]) for file in same_exper]) modelname = model_short + '~' + str(epoch_max) print modelname model = models.load_model(folder + modelname) dev_acc = models.test_model(model, dev, glove) test_acc = models.test_model(model, test, glove) print "Dev:", '{0:.2f}'.format(dev_acc * 100), "Test_acc:", '{0:.2f}'.format(test_acc * 100) print def accuracy_for_subset(y_pred, y_gold, subset): pred = y_pred[subset] gold = y_gold[subset] return np.sum(np.argmax(pred, axis=1) == np.argmax(gold, axis=1)) / float(len(gold)) def augmented_dataset(glove, dataset, ppdb): new_examples = [] for ex in dataset: new_examples += augment_example(glove, ex, ppdb) return new_examples def augment_example(glove, example, ppdb): new_examples = [] for word in set(example[0] + example[1]): if word in ppdb: for rep in ppdb[word]: if word in glove and rep in glove: new_examples.append(make_new_ex(example, word, rep)) return new_examples def make_new_ex(example, original, replacement): premise = [replacement if word == original else word for word in example[0]] hypo = [replacement if word == original else word for word in example[1]] return (premise, hypo, example[2]) def test_augmentation(glove, dev, ppdb_file): ppdb = paraphrase.load_parap(ppdb_file) aug = augmented_dataset(glove, dev, ppdb) return aug def parapharse_models(glove, train, dev, ppdb_file): ppdb = paraphrase.load_parap(ppdb_file) aug = augmented_dataset(glove, train, ppdb) train_aug = train + aug models.train_model(train_aug, dev, glove, model_filename = 'models/train_aug') models.train_model(train, dev, glove, model_filename = 'models/train_noaug') def tune_model(observed_example, train_example, model, glove): class_arg = load_data.LABEL_LIST.index(observed_example[2]) prem = " ".join(observed_example[0]) hypo = " ".join(observed_example[1]) print prem, hypo, observed_example[2], class_arg for i in range(30): probs = misc.predict_example(prem, hypo, model, glove)[0] print i, probs if probs.argmax() == class_arg: break models.update_model_once(model, glove, [train_example]) def generate_tautologies(dataset): unique = set() result = [] for ex in dataset: premise = " ".join(ex[0]) if premise not in unique: result.append((ex[0], ex[0], 'entailment')) unique.add(premise) return result def generate_contradictions(dataset): result = [] for ex in dataset: if ex[2] == 'contradiction': result.append((ex[1],ex[0],ex[2])) return result def generate_neutral(dataset): result = [] for ex in dataset: if ex[2] == 'entailment': result.append((ex[1],ex[0],'neutral')) return result def generate_all(dataset): return generate_tautologies(dataset) + generate_contradictions(dataset) + generate_neutral(dataset) def unknown_words_analysis(train, dev): train_words = set.union(*[set(ex[0]+ex[1]) for ex in train]) indices = [[],[]] for i in range(len(dev)): diff = len(set(dev[i][0] + dev[i][1]) - train_words) if diff == 0: indices[0].append(i) else: indices[1].append(i) return indices def color_analysis(dev): COLORS = set(['black', 'blue', 'orange', 'white', 'yellow', 'green', 'pink', 'purple', 'red', 'brown', 'gray', 'grey']) indices = [[],[]] for i in range(len(dev)): diff = len(set(dev[i][0] + dev[i][1]) & COLORS) if diff == 0: indices[0].append(i) else: indices[1].append(i) return indices def mixture_experiments(train, dev, glove, splits = 5): for i in range(splits): model_name = 'mixture' + str(i) print 'Model', model_name model = models.init_model() div = len(train) / splits models.train_model(train[:i*div] + train[(i+1)*div:splits*div], dev, glove, model, 'models/' + model_name) def extended_tautologies(train, dev, glove): augment_data = generate_all(train) from random import shuffle shuffle(augment_data) augment_weight = [0, 0.05, 0.15, 0.5] for w in augment_weight: new_train = train + augment_data[:int(len(train)*w)] str = str(w).replace('.','') model = models.init_model() models.train_model(new_train, dev, glove, model = model, model_dir = 'models/aug' + w_str) def test_tautologies(train, dev, glove, paths = ['aug0','aug005','aug015','aug05']): testsets = [dev, generate_tautologies(dev), generate_contradictions(dev), generate_neutral(dev)] names = ['dev' , 'ent', 'contr' ,'neu'] for path in paths: print path model_path = misc.best_model_path('models/' + path) model = models.load_model(model_path) accs = [models.test_model(model, dataset, glove) for dataset in testsets] for name, dataset, acc in zip (names, testsets, accs): print name, acc, len(dataset)
Foldable Phones: The New Norm in Smartphones | Hitech News Daily. Foldable phones have been creating a buzz for years. Samsung was the first one that teased it 2014 and then gave hints about it. Nonetheless, until recently, it seemed to be more like a concept phone that will be probable in the future. This all changed when Royole Corporation, a global manufacturer of advanced flexible displays and more, launched its FlexPai device, the world’s first commercially available foldable phone. Following this launch, the South Korean multinational conglomerate, Samsung revealed its flexible smartphone prototype during its developer conference. Likely to reach the market in April 2019, this foldable smartphone features a 7.3” QXGA+ Dynamic AMOLED display, a 4.6” HD+ Super AMOLED display, cover camera, dual camera, and tripe camera, 4380mAh battery capacity, fast charging compatible on wired and wireless, 7nm 64-bit octa-core processor, 12GB RAM with 512GB internal storage, and Android 9.0 (Pie) operating system. This foldable smartphone features a 6.6″ display, 8 GB of RAM, and a screen resolution of 1148 x 2480 pixels. It operates on Android v9.0 (Pie) and is powered by an octa-core processor. It also has a 4500 mAh battery and a 40MP + 16MP + 8MP rear camera. The phone also has other sensors such as light sensor, proximity sensor, accelerometer, barometer, and gyroscope. Oppo is another vendor that brings foldable phone to the market. Brian Shen, Vice President at Oppo showcased a prototype of the phone without revealing any specifications or other information. As per the images of the prototype, the phone resembles the Huawei Mate X and seems to have an outward-wrapped flexible display that provides two screens when folded and a single screen experience when unfolded. The phone also seems to have a thick bar on one side that includes a camera module, which appears to have a dual-LED flash. It also seems to have a new ColorOS version backed by Android. Moto is likely to launch its latest new product – Motorola RAZR V4 this year. Some of the prime features of the foldable phone are its 41.0 DSLR Pop Up camera, eye acre technology, and its beautiful design. The smartphone also features a stainless-steel design with four-sided 3D glasses. Its other features include a dual-SIM (Nano), Android 8.1 Oreo (upgradable to Android 9), a 64-bit octa-core Qualcomm Snapdragon 835 SoC paired with Adreno GPU and 8GB of RAM, and a 6.15-inch full-HD+ (1080×2280 pixels) IPS display. Its dual rear camera setup has a 41-megapixel primary sensor and a 16-megapixel secondary sensor, along with an LED flash. The front side has a 16-megapixel camera sensor with Face Wake support.
# Generated by Django 2.2.10 on 2020-02-13 22:29 from decimal import Decimal import django.contrib.gis.db.models.fields import django.core.validators from django.db import migrations, models import django.db.models.deletion import django.utils.timezone import gwells.db_comments.model_mixins class Migration(migrations.Migration): dependencies = [ ('wells', '0108_auto_20200213_1741'), ('aquifers', '0029_aquifer_area_20200206_1651'), ] operations = [ migrations.CreateModel( name='VerticalAquiferExtent', fields=[ ('create_user', models.CharField(max_length=60)), ('create_date', models.DateTimeField(default=django.utils.timezone.now)), ('update_user', models.CharField(max_length=60)), ('update_date', models.DateTimeField(default=django.utils.timezone.now)), ('id', models.AutoField(db_column='vertical_aquifer_extent_id', primary_key=True, serialize=False, verbose_name='VerticalAquiferExtent Resource Identifier')), ('geom', django.contrib.gis.db.models.fields.PointField(blank=False, null=False, srid=4326, verbose_name="Geo-referenced location of aquifer's depth")), ('start', models.DecimalField(blank=False, db_column='vertical_aquifer_extent_from', decimal_places=2, max_digits=7, null=False, validators=[django.core.validators.MinValueValidator(Decimal('0.00'))], verbose_name='From')), ('end', models.DecimalField(blank=True, db_column='vertical_aquifer_extent_to', decimal_places=2, max_digits=7, null=True, validators=[django.core.validators.MinValueValidator(Decimal('0.01'))], verbose_name='To')), ('aquifer', models.ForeignKey(blank=False, db_column='aquifer_id', null=False, on_delete=django.db.models.deletion.PROTECT, to='aquifers.Aquifer')), ('well', models.ForeignKey(blank=True, db_column='well_tag_number', null=True, on_delete=django.db.models.deletion.PROTECT, to='wells.Well')), ], options={ 'verbose_name_plural': 'VerticalAquiferExtent', 'db_table': 'vertical_aquifer_extents', 'ordering': ['start'], }, bases=(models.Model, gwells.db_comments.model_mixins.DBComments), ), ]
Add every color to your fun look for your next big day of costume fun, or a Halloween costume party with the help of the Rainbow Adult Cape – Costume Accessory! This poncho-like item is the perfect way to make your look perfect! This costume does not come with pants or shoes.
Experiment(description='PL2 empiricism', data_dir='../data/tsdlr-renamed/', max_depth=10, random_order=False, k=1, debug=False, local_computation=False, n_rand=9, sd=2, jitter_sd=0.1, max_jobs=400, verbose=False, make_predictions=False, skip_complete=True, results_dir='../results/2014-02-18-GPSS-add-pl2/', iters=250, base_kernels='SE,Per,Lin,Const,Noise', random_seed=2, period_heuristic=3, max_period_heuristic=5, period_heuristic_type='min', subset=True, subset_size=250, full_iters=10, bundle_size=5, additive_form=True, mean='ff.MeanZero()', # Starting mean kernel='ff.NoiseKernel()', # Starting kernel lik='ff.LikGauss(sf=-np.Inf)', # Starting likelihood score='pl2', search_operators=[('A', ('+', 'A', 'B'), {'A': 'kernel', 'B': 'base'}), ('A', ('*', 'A', 'B'), {'A': 'kernel', 'B': 'base-not-const'}), ('A', ('*-const', 'A', 'B'), {'A': 'kernel', 'B': 'base-not-const'}), ('A', 'B', {'A': 'kernel', 'B': 'base'}), ('A', ('CP', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}), ('A', ('CW', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}), ('A', ('B', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}), ('A', ('BL', 'd', 'A'), {'A': 'kernel', 'd' : 'dimension'}), ('A', ('None',), {'A': 'kernel'})])
A nurse, checking on a patient in the hospital with pneumonia, is concerned about the patient's progress and pulls out her smartphone to text a doctor and a pulmonologist. No, the nurse isn't breaking HIPAA laws or hospital rules by sharing patient information in an insecure text. She's actually using what could be the next big thing in enterprise social collaboration - secure enterprise texting. For several years now, companies have been turning to enterprise-grade social collaboration tools like Jive Software, Cisco's WebEx Social and Novell's Vibe collaboration platform. Now, companies are beginning to adopt enterprise texting tools that offer workers a quick way to connect with each other, just as they connect with people in their personal lives. That's especially true for younger workers, who text their friends more often than they call them. As they enter the workforce, they expect to be able to text their colleagues just as they do their friends. Workers who are texting on the job without enterprise tools available to them could leave a company open to security issues, said said Brad Brooks, founder and CEO of TigerText Inc. "With SMS, it's a completely unmanaged, insecure and not an enterprise great solution," said Brooks. TigerText offers an enterprise texting tool that is encrypted and lets users know when their messages have been read. "We think [texting] is conducive to improving enterprise workflow but it needs to be managed and controlled to prevent data leakage out of the enterprise," Brooks said. With an enterprise texting tool, companies can better enforce corporate messaging policies, make it easier for employees to find and connect with coworkers, and log text histories on the backend while periodically deleting them on workers' devices. Gene Thomas, vice president and CIO of Memorial Hospital of Gulf Port. "We had nurses, physicians, nursing homes, skilled nursing facilities all wanting to communicate using texting and we couldn't allow unsecure text messaging," said Gene Thomas, vice president and CIO of the hospital. "As a healthcare entity, we take patient privacy very important and patient privacy is something we have to, by law, protect. We needed to find a way to allow people to text securely." Thomas, who adopted enterprise texting for the hospital in the spring of 2012, said he opted for TigerText because it met the hospital's criteria for being fast, reliable and secure. He noted that one benefit of enterprise-level texting is that anyone sending a text message is alerted when it's been delivered, as well as when it's been read. "If I send you a [regular] text message right now, I don't know if you've received it," he added. "In health care, that's important. If a nurse is texting a doctor, she needs to know that that message was received and that it was read.... If I'm with a patient, I can instantly text message transporters and the appropriate people in radiology instead of going to a computer and messaging these people. It's easier to pick up your phone and not leave the patient's bedside." "For wound care, when we're waiting for a piece of equipment or forms to be signed we used to have to go search for a doctor to sign the forms," said Melhuish. "Now, I send the document to be signed through TigerText and they sign it and send it back to me. It might speed up the process by two days... It's fast. It's easy. It's the way of the future." "Texting has become so second nature to people that it's the easiest tool for them to use when they're on the job," she added. "This is poised for growth. You'll see more physicians turning to this rather than the insecure methods they're normally using. When you hear stories of clinicians being fired for texting, that 'll put more pressure on organizations to use secure texting."
import numpy as np import matplotlib.pyplot as plt plt.switch_backend('Agg') def correl_graph(my_path, calced, experimental, graph_name): """X axis -> experimental values, Y axis -> calculated values "calced" is a dict containing values for residues (as keys) "experimental" is a list containing STR record objects""" min_calc = min(calced.values()) max_calc = max(calced.values()) exp_values = [] for record in experimental: exp_values.append(record.value) min_exp = min(exp_values) max_exp = max(exp_values) miny = min(min_calc, min_exp) # get minimum value maxy = max(max_calc, max_exp) # get maximum value exp_line, calc_line = [], [] for i, j in enumerate(calced.keys()): # fetch data from arguments calc = calced[j] exp = experimental[i].value exp_line.append(exp) calc_line.append(calc) diag = [] margin = int(abs(miny - maxy) * 0.05) if abs(miny - maxy) < 10: margin = 0.3 elif abs(miny - maxy) < 2: margin = 0.01 elif abs(miny - maxy) < 1: margin = 0 maxy += margin miny -= margin for i in np.arange(miny, maxy * 1.42, 0.1): # draw graph diagonal diag.append(i) plt.figure(figsize=(6, 5), dpi=80) plt.plot(diag, diag, linewidth=2.0, color='#FD6C6C', alpha=.7) plt.plot(exp_line, calc_line, color='#027A8B', marker='o', linestyle='') plt.axis([miny, maxy, miny, maxy]) plt.xlabel('experimental') plt.ylabel('calculated') plt.tight_layout(pad=1.08) plt.savefig(my_path + "/" + graph_name, format="svg", transparent=True) plt.close()
This bust is unpainted and requires assembly. Total height 170mm. The bust is made of high quality resin. Plinth is not part of set.
import shapely.ops from shapely import geometry def cut_polygon(poly,line): # slice the exterior separately from interior, then recombine ext_sliced=poly.exterior.union( line ) ext_poly=geometry.Polygon(poly.exterior) int_sliced=[ p.union(line) for p in poly.interiors ] ext_parts, _dangles,_cuts,_invalids = shapely.ops.polygonize_full( ext_sliced ) ext_parts=list(ext_parts) # so we can update entries # It's possible to introduce some new area here - places where the cut line # goes outside the exterior but forms a loop with the exterior. ext_parts=[p_ext for p_ext in ext_parts if p_ext.intersection(ext_poly).area / p_ext.area > 0.99 ] for p in int_sliced: int_parts, _dangles,_cuts,_invalids = shapely.ops.polygonize_full( p ) # remove from an ext_part if there's overlap for p_int in int_parts: for i_ext, p_ext in enumerate(ext_parts): if p_ext.intersects(p_int): ext_parts[i_ext] = p_ext.difference(p_int) return ext_parts
Born and raised in London of Franco-Vietnamese heritage, Louise Loubatieres received a BA in Textiles at Chelsea College of Art, followed by an MA in Menswear Fashion at the prestigious Royal College of Art. Louise worked in the fashion industry for several years before moving to Siem Reap, Cambodia to start her own business. Her boutique was born out of a love of beautiful things, a passion for travel and a desire to mix the old with the new, using traditional local craft methods and applying them to a modern design aesthetic. We worked with Louise to curate a collection of hand-woven silk necklaces made in rural Cambodian villages. The beautiful colors represent the natural elements of the Sea to Sky corridor at different elevations. These necklaces are a sophisticated souvenir that will bring back the memories of time spent in Whistler. The grey, black, white collection represents the alpine in the winter, when we are at the peak of Whistler searching for the perfect line. The black & grey represents the rugged mountain landscape and the white represents the powder like snow…the “goods”. The dark green, moss green and brown/black collection are the colors of the forest in the summer when we are hiking in Whistler. The dark green represents the pine trees, the moss green represents the forest floor and the brown/black represents the bark of old growth trees. The blue hues and white collection is the color of the magnificent Pacific Ocean and Howe Sound that accompanies us along the Sea to Sky corridor. Whether it is a clear sunny day or a misty morning, the water presents us with every hue of blue. The white represents the light that shimmers on the surface.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='SuccessNotification', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('InvId', models.IntegerField(verbose_name='\u041d\u043e\u043c\u0435\u0440 \u0437\u0430\u043a\u0430\u0437\u0430', db_index=True)), ('OutSum', models.CharField(max_length=15, verbose_name='\u0421\u0443\u043c\u043c\u0430')), ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='\u0414\u0430\u0442\u0430 \u0438 \u0432\u0440\u0435\u043c\u044f \u043f\u043e\u043b\u0443\u0447\u0435\u043d\u0438\u044f \u0443\u0432\u0435\u0434\u043e\u043c\u043b\u0435\u043d\u0438\u044f')), ], options={ 'verbose_name': '\u0423\u0432\u0435\u0434\u043e\u043c\u043b\u0435\u043d\u0438\u0435 \u043e\u0431 \u0443\u0441\u043f\u0435\u0448\u043d\u043e\u043c \u043f\u043b\u0430\u0442\u0435\u0436\u0435', 'verbose_name_plural': '\u0423\u0432\u0435\u0434\u043e\u043c\u043b\u0435\u043d\u0438\u044f \u043e\u0431 \u0443\u0441\u043f\u0435\u0448\u043d\u044b\u0445 \u043f\u043b\u0430\u0442\u0435\u0436\u0430\u0445 (ROBOKASSA)', }, bases=(models.Model,), ), ]
In this third and final debate, the key issue was the economy, primarily the clarification of the tax plan of each candidate. The key was not those impacted among individual employees, but how businesses would be impacted, specifically small businesses. Obama, while saying he will only increase taxes on those earning more than $250,000 a year, includes American business in his cross hairs. And while $250K seems like a large sum, most small businesses operate well in excess of that amount and will be severely impacted. The distribution of the tax will be uneven and specifically unfair to those businesses that are more capital intensive and have more employees. In response to our original article, many expressed concern because Obama did not state clearly whether he was speaking of taxing gross or net business income in the debate. Some believe Obama is trying to be intentionally misleading on the issue, so we decided to clarify. What we have found in our research is that Obama does say, buried deeply on his website, that the amount to be taxed would be the business’ net profit, not the gross receipts. If it had been gross receipts, that would have shut the doors on many small businesses in America overnight. Still, there is a major concern with his plan, because what Obama qualifies as a small business is totally suspect. Small businesses, according to the Small Business Administration, have average incomes in the millions of dollars and can employ hundreds. This site provides a summary. So, if the Small Business Administration data is correct, how can Obama claim that 90-98% of American small businesses would be excluded from his tax increase? The lie in Obama’s plan is that he is including sole proprietorships which aren’t really businesses; they are just individuals filing taxes that are not on a W2. That includes the maid, a lone painter, etc. These are businesses by tax qualifications only, not true businesses in any sense of the word. They do not employ anyone, and they do not provide the same benefits to the nation as real businesses which employ people that also contribute to the tax base. First, Obama attempts to delude people that make less than $250,000 that they will benefit under his tax plan. The plan, at first, does seem beneficial for the person in a normal job working for a US-based company. It is true that most of those people do not make more than $250K, so they would get a tax cut. Great plan, right? Think again. How many of these people work for small businesses? If you do, you definitely should consider how Obama’s plan to increase your employer’s taxes could cost you a raise or even your job. Second, and most importantly, when Obama speaks of taxing only those that make more than $250,000, he groups in what he claims are small businesses that have a net profit of over $250,000. He has often referenced statistics stating how this applies to small businesses in America. These statistics are not only misleading, they are an outright lie and the cornerstone of Obama’s campaign. Obama states that at least 90% of small businesses make less than $250K, but the huge flaw in that statistic is it includes the sole proprietor. These people are not making more than $250K on average any more than the average person does in any other job in America. They are not considered a “business” for any other reason than they do not get paid via a W2 (with automatic deductions). They do not operate as a true business, they do not employ others, and they essentially are not a business at all in any real sense of the word. What we are most interested in are small businesses that provide jobs, and how those businesses will be impacted. We want statistics that isolate small businesses that employ people, genuine small businesses. The expert on small business in America is the Small Business Administration. This SBA does a great job at summarizing the incomes and employment limits for many businesses to qualify as a “small business”. None are as low as $250,000, some employ hundreds, and all would pay significantly higher taxes under Obama’s plan. Obama claims that the companies, even though their taxes will rise sharply, will pay lower taxes than under Reagan. That is a lie as well. The rate could potentially be well north of 50% if you count Social Security contributions (a point that Obama conveniently glosses over). Then consider the added expense of health plans (which Obama wants to force upon all businesses). It is the highest tax rate since the Carter years, and we all know how that turned out…record unemployment. Tack on the cost of health care and you have a small business disaster waiting to happen. Why is this dangerous and why is it easy for Obama to mislead the average citizen? The concept of taxing a business based on its net income sounds good to the average American when you throw out what seems like a large number to most of them. Most don’t earn that much money, so we are just taxing the rich, right? Wrong, a company’s net income is not what the proprietors take home. It is extremely different from earned wages. It is the money left after expenses for the prior business year, such as salaries paid, equipment depreciated, etc. Most businesses reinvest large portions of their profits to grow the business, or in some cases, to just keep up with inflation. If there are no profits, there is no money to invest. In addition, the distribution of the tax does not take into consideration that businesses are vastly different from one to the next. Some are more capital intensive, some employ more than others and others operate in areas with a significantly higher cost of living (New York versus Mississippi for example).
#!/usr/bin/env python """ -------------------------------------------------------------------------------- Created: Jackson Lee 9/27/14 This script reads in a tab delimited file of annotations and querys the KEGG REST API to parse back the original KO ortholog of the entry. If the line contains an EC reference, the script will first query each line in KEGG REST as: http://rest.kegg.jp/find/genes/each+query+term+and+EC+d.d.d.d e.g. 1-deoxy-D-xylulose 5-phosphate synthase (EC 2.2.1.7) http://rest.kegg.jp/find/genes/1-deoxy-D-xylulose+5-phosphate+synthase+2.2.1.7 and save the output in query order with the format: gmx:100301901 DXS1; 1-deoxy-D-xylulose 5-phosphate synthase 1; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] pvu:PHAVU_009G095900g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] pvu:PHAVU_006G159900g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] pvu:PHAVU_003G099600g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] pvu:PHAVU_003G148900g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] pvu:PHAVU_003G287800g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] pvu:PHAVU_003G287900g hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] mtr:MTR_2g020590 1-deoxy-D-xylulose 5-phosphate synthase; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] mtr:MTR_3g107740 hypothetical protein; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] mtr:MTR_4g118640 1-deoxy-D-xylulose 5-phosphate synthase; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] mtr:MTR_8g068300 1-Deoxy-D-xylulose 5-phosphate synthase; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] mtr:MTR_8g068270 1-Deoxy-D-xylulose 5-phosphate synthase; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] mtr:MTR_8g068280 1-Deoxy-D-xylulose 5-phosphate synthase; K01662 1-deoxy-D-xylulose-5-phosphate synthase [EC:2.2.1.7] This output will then be read in and queried for the exact search term and grab by regex any Kterms in the string. These terms are aggregated and the top hit and score written out regex: "1-deoxy-D-xylulose 5-phosphate synthase;" and "EC:2.2.1.7" for "; K\d{5}" result:KO1662 Input file format: (3R)-hydroxymyristoyl-[ACP] dehydratase (EC 4.2.1.-) (R)-citramalate synthase (EC 2.3.1.182) (S)-2-haloacid dehalogenase I (EC 3.8.1.2) (S)-22C3-di-O-geranylgeranylglyceryl phosphate synthase (S)-3-O-geranylgeranylglyceryl phosphate synthase (Y14336) putative extracellular protein containing predicted 35aa signal peptide 1-acyl-sn-glycerol-3-phosphate acyltransferase (EC 2.3.1.51) 1-aminocyclopropane-1-carboxylate deaminase (EC 3.5.99.7) 1-deoxy-D-xylulose 5-phosphate reductoisomerase (EC 1.1.1.267) 1-deoxy-D-xylulose 5-phosphate synthase (EC 2.2.1.7) Output a translation table of terms and the KEGG REST output 1-deoxy-D-xylulose 5-phosphate synthase (EC 2.2.1.7)\tK01662\t5\t5 -------------------------------------------------------------------------------- usage: query_ko_from_rast_annotations.py -i in.file -d out.directory -o output.file """ #------------------------------------------------------------------------------- # #http thread pool code from: http://stackoverflow.com/questions/2632520/what-is-the-fastest-way-to-send-100-000-http-requests-in-python #------------------------------------------------------------------------------- #Header - Linkers, Libs, Constants from string import strip import os import re import collections from argparse import ArgumentParser, RawDescriptionHelpFormatter #import requests from urlparse import urlparse from threading import Thread import httplib import sys from Queue import Queue #------------------------------------------------------------------------------- #function declarations def doWork(): while not exitapp: id, urlstring, queryline = q.get() url = urlparse(urlstring) if id % 100 == 0: print 'Query: HTTP Thread: ' + str(id) + ' started.' try: conn = httplib.HTTPConnection(url.netloc) conn.request("GET", url.path) res = conn.getresponse() if res.status == 200: with open(outputdirectory + '/' + str(id) + '.KEGG_REST.txt', 'w') as restfile: restfile.write(res.read()) restfile.close() #print 'Thread: ' + str(id) + ' Query: ' + urlstring + ' ..... ' + res.reason + '\n' searchfile.write(str(id) + '\t' + queryline + '\n') else: print 'HTTP error, Thread: ' + str(id) + ' with error: ' + res.reason logfile.write(str(id) + '\t' + urlstring + '\t' + res.reason + '\n') raise except: print 'Thread: ' + str(id) + '. Error. ' print sys.exc_info()[0] q.task_done() #------------------------------------------------------------------------------- #Body print "Running..." if __name__ == '__main__': parser = ArgumentParser(usage = "query_ko_from_rast_annotations.py -i \ in.file -d out.directory -o output.file", description=__doc__, formatter_class=RawDescriptionHelpFormatter) parser.add_argument("-i", "--input_file", action="store", dest="inputfilename", help="text input file") parser.add_argument("-o", "--output_filename", action="store", dest="outputfilename", help="text output file") parser.add_argument("-d", "--output_directory", action="store", dest="outputdirectory", help="text output file") options = parser.parse_args() mandatories = ["outputfilename","outputdirectory"] for m in mandatories: if not options.__dict__[m]: print "\nError: Missing Arguments\n" parser.print_help() exit(-1) outputdirectory = options.outputdirectory ec_regex = '\d\.[\d\-]*\.[\d\-]*\.[\d\-]*' #allow for only querying the KEGG REST API once if options.__dict__['inputfilename']: if not os.path.exists(outputdirectory): os.makedirs(outputdirectory) else: print "\nError: Directory exists!\n" parser.print_help() exit(-1) print "Querying KEGG REST API Service..." inputfilename = options.inputfilename api_template = "http://rest.kegg.jp/find/genes/" infile_list = [] with open(inputfilename,'U') as infile: infile_list = [line.strip() for line in infile] infile.close() # replace all 2C, %3B infile_list = [line.replace('2C',',') for line in infile_list] infile_list = [line.replace('%3B',';') for line in infile_list] urlpool = [] for line in infile_list: if re.search('EC ' + ec_regex, line) != None: #format string for search query = line.strip() #remove ec ecnum_list = re.findall('EC ' + ec_regex,query) ecnum_list = [ecline[3:] for ecline in ecnum_list] query = re.sub(' \(EC ' + ec_regex + '\)', '', query) #remove url syntax issues query = query.replace('+','') query = query.replace('/',' ') query = query.replace('@',' ') query = query.replace(';',' ') query = query.replace (' ',' ') #query += ' '.join(ecnum_list) #urlstring = api_template + query #form url, query, and write file querylist = filter(None, query.split(' ') + ecnum_list) urlstring = api_template + '+'.join(querylist) #catch case of '+-' '+)+' '+(+' and convert url encoding urlstring = urlstring.replace('+-','+') #urlstring = urlstring.replace('+)+','+') #urlstring = urlstring.replace('+(+','+') #urlstring = urlstring.replace(' ','%20') urlstring = urlstring.replace('(','%28') urlstring = urlstring.replace(')','%29') urlpool.append([urlstring, line]) # print 'Query: ' + urlstring # r = requests.get(urlstring) # if r.raise_for_status() == None: # with open(outputdirectory + '/' + str(i) + '.KEGG_REST.txt', 'w') as restfile: # restfile.write(r.text) # restfile.close() # searchfile.write(str(i) + '\t' + line + '\n') # else: # print 'Response error raised. Exiting' # exit(-1) #setup threading for http requests and run connections concurrent = 100 exitapp = False with open(outputdirectory + '/searchlist.txt', 'w') as searchfile, open(outputdirectory + '/errorlog.txt','w') as logfile: q = Queue(concurrent * 2) for i in range(concurrent): t = Thread(target=doWork) t.daemon = True t.start() try: for id, urlentry in enumerate(urlpool): q.put([id] + urlentry) q.join() except KeyboardInterrupt: exitapp = True sys.exit(1) logfile.close() searchfile.close() logfile.close() print "Parsing REST files and writing..." outputfilename = options.outputfilename outfile = open(outputdirectory + '/' + outputfilename, 'w') with open(outputdirectory + '/searchlist.txt','U') as searchfile: for line in searchfile: i, query = line.strip().split('\t') #form string for search ecnum_list = re.findall('EC ' + ec_regex,query) ecnum_list = [ecline[3:] for ecline in ecnum_list] #querystring = '\t' + re.sub(' \(EC ' + ec_regex + '\)', '', query) querystrings = [re.sub(' \(EC ' + ec_regex + '\)', '', querystring).lower() for querystring in re.split(' / | @ |; ', query)] ecstring = '(EC:' + ' '.join(ecnum_list) + ');' ko = [] with open(outputdirectory + '/' + str(i) + '.KEGG_REST.txt', 'U') as inrestfile: for restline in inrestfile: restline = restline.strip() #if querystring == 'Chlorophyllide reductase subunit BchZ': # print querystring, ecstring # print querystring in restline, ecstring in restline # if the enzyme search string and the modified ec string and a KEGG KO number are in the rest output, record the KO number if all(querystring in restline.lower() for querystring in querystrings) and all(ecterm in restline for ecterm in ecnum_list) and re.search(r'; K\d{5}', restline) != None: ko.append(re.search(r'; K\d{5}', restline).group(0)[2:]) inrestfile.close() #determine and record the most common KO number and how common it was counter= collections.Counter(ko) if len(counter) > 0: outfile.write(query + '\t' + counter.most_common(1)[0][0] + '\t' + str(counter.most_common(1)[0][1]) + '\t' + str(sum(counter.values())) + '\t' + querystring + '\t' + ecstring + '\n') else: outfile.write(query + '\t\n') outfile.close() print "Done!"
The Security Council today extended the United Nations Mission in Liberia (UNMIL) until 30 September 2016, and authorized a reduction in the number of its personnel, as preparations continue for the security transition to the country’s authorities next year. The Council “affirms its expectation that the Government of Liberia will assume fully its complete security responsibilities from UNMIL no later than 30 June 2016 and encourages Member States and multilateral organizations to continue to provide financial, technical and other assistance to the Government of Liberia in this regard…,” said a resolution that was unanimously adopted by the 15-member body. In his latest report on UNMIL, Secretary-General Ban Ki-moon noted that it will be a “historic moment” when the Government of Liberia assumes all its security responsibilities from the Mission. “It will be an important milestone, a demonstration of confidence that the country has turned the corner from conflict, disorder and dependence to a future of sustained peace, unity and independence,” he wrote. While welcoming the important progress made to date by the Government, with support from UNMIL and other partners, Mr. Ban noted that challenges remain that will require additional assistance on all fronts in order to complete the transition by 30 June 2016. “All stakeholders must sustain their focus on implementing the ambitious transition plan developed by the Government and also on the national reconciliation and continuing political reforms that are essential for consolidating peace,” he stated. By today’s resolution, the Council also decided that the mandate of UNMIL will be the protection of civilians; reform of justice and security institutions; human rights promotion and protection; and protection of UN personnel. It also decided that UNMIL shall put “renewed focus” on supporting the Government to achieve a successful security transition. Further, it was decided that the Mission’s authorized military and police strength will be decreased, by 30 June 2016, by nearly 3,260 personnel. The Council also affirmed its intention to consider the possible withdrawal of UNMIL and transition to a future UN presence to continue to assist the Government to consolidate peace and, in this regard, requested the Secretary-General to conduct an assessment mission in order to provide recommendations to the Council by 15 November 2016. Liberia is working hard to build institutions, pass legislation and put in place mechanisms that will enable it to maintain stability without the presence of a peacekeeping force, the head of United Nations peacekeeping told the Security Council today.
#!/usr/bin/python # -*- coding: utf-8 -*- import argparse import urllib class TestRobot(object): def __init__(self, robot_ip): self.robot_ip = robot_ip def head_up(self): # http://192.168.10.18/rev.cgi?Cmd=nav&action=18&drive=13&speed=5 params = urllib.urlencode({'Cmd': 'nav', 'action': 18, 'drive': 13, 'speed' : 5}) print params try: # f = urllib.urlopen("http://" + self.robot_ip + "/rev.cgi?%s" % params) f = urllib.urlopen("http://192.168.1.9/rev.cgi?Cmd=nav&action=1") except IOError, e: print "Connection error: ", str(e) else: print f.read() def parser(): parser = argparse.ArgumentParser(description="Test ROVIO robot!", epilog="(c) 2011, Warsaw University") parser.add_argument('robot_ip', help='IP address of the ROVIO robot.') return parser if __name__ == '__main__': args = parser().parse_args() robot = TestRobot(args.robot_ip) robot.head_up()
As a small business begins the proposal submission process to federal government agencies or to prime contractors the past performance requirement is a major challenge. By definition a start-up company in government contracting has no direct government agency past performance projects to site in meeting the requirement in requests for proposals (RFP’s) for historical references to similar projects in terms of size, duration and complexity. Past performance data must be specific to the enterprise bidding a contract. It cannot site historical references to performance of individuals now in the company when they were with other firms, achievements by predecessor companies or successful projects that the current company did not perform as its current entity. The purpose for this rigid perspective by the government is to avoid "Fronting" a new enterprise with misleading information to obtain a high past performance rating. So how can a new organization or one that is new to government contracting muster a response to the past performance challenge? The answer lies in historical projects that may be similar in the commercial arena and a high quality proposal that clearly demonstrates an understanding of the requirement at hand, a unique and cost effective project plan and high performing personnel and/or products tailored to the statement of work to offset an interim, light past performance record. A past performance reference sheet usually accompanies an agency RFP. It normally requires the bidder to fill it out with references to historical projects the company has performed and the contact points for confirmation. The government may request these forms in advance of the main body of the proposal to allow enough time to send them to the references. The past performance form is sent by the government to the references and you never see the result. The input goes directly from your past performance references back to the government. Many small businesses work through prime contractors to "Grow" past performance history (subcontracts count). By teaming with a sizable firm a small entity can relate its participation to larger projects and ultimately graduate to a good library of references, carefully maintained and kept as a living, growing data base of good customer service records that can be sited again and again in proposals. It is wise to keep customer perceptions of your professionalism and products or services alive by constant vigilance, visits, surveys and other feedback mechanisms so that you are not surprised at a proposal debriefing when you find that a client you thought rated you highly did not. The major services maintain past performance records by contract that you can access. Inquire with them as to a membership at the appropriate web site and review them regularly. The GSA utilizes service companies to rate contractors. You can get your rating by inquiring with them, much like a credit rating, except pertinent to cost, schedule and technical performance. Monitor your D&B report. It is always out there for prime contractor and government assessment of your financial health, your vendor payment history, your organization profile and your rating. Insure your web site, your capability statement and your marketing plans are maintained current alive and dynamically reflective of your successes as you pursue new business and carefully develop your library of past performance records by project with accessible profiles to use in your government proposals. Thanks for the rock solid information Ken. May I add some important tidbits that may help those companies who plan to do business with U.S. government agencies? Keep your SAM business information up to date, to ensure those agencies you are doing business with directly have the correct information on file. Once "in" a federal database, your company's information is used extensively by those ordering from your company as a prerequisite, before they order from your company. Guess what typically happens when your DUNS number does not populate in SAMS on the first or second attempt, or if it does populate, the registration has expired? Remember that your company is typically NOT the only business in the registry. What business is placed in the vendor's box depends largely on real-time first impressions, and secondly on real-time excellent buyer to vendor relationship. Establishing and sustaining favorable impressions early will do wonders in remembering your company favorably.
import asyncore import gevent from gevent.event import Event from logbook import Logger from socketio.server import SocketIOServer from maildump.db import connect, disconnect, create_tables from maildump.smtp import smtp_handler, SMTPServer from maildump.web import app from maildump.web_realtime import broadcast log = Logger(__name__) stopper = Event() socketio_server = None def start(http_host, http_port, smtp_host, smtp_port, db_path=None): global socketio_server # Webserver log.notice('Starting web server on http://{0}:{1}'.format(http_host, http_port)) socketio_server = SocketIOServer((http_host, http_port), app, log='default' if app.debug else None) socketio_server.start() # SMTP server log.notice('Starting smtp server on {0}:{1}'.format(smtp_host, smtp_port)) SMTPServer((smtp_host, smtp_port), smtp_handler) gevent.spawn(asyncore.loop) # Database connect(db_path) create_tables() # Wait.... try: stopper.wait() except KeyboardInterrupt: print else: log.debug('Received stop signal') # Clean up disconnect() log.notice('Terminating') def stop(): stopper.set()
GB3VHF on 2m being to the East of all of the hills did not vary in signal strength. GB3BAA and GB3RAL similarly are to the East and were little affected by location. I was a little surprised that RAL was consistently weaker than BAA. GB3MCB is South West and on any hill North of Pinnacle hill is blocked by the other hills hence its no show.
import struct import logging from threading import Thread from time import sleep from base import BaseHandler # WebSocket implementation class BroadcastHandler(BaseHandler, Thread): @property def server(self): return self._server @property def tick_time(self): return self._tick_time def __init__(self, server, tick, *args, **kwargs): super(BroadcastHandler, self).__init__(*args, **kwargs) self._server = server self._tick_time = tick def run(self): """handle >>> help(BaseHandler.handle) """ logging.info("Broadcast every %s" % str(self.tick)) # Keep serving broadcast self.running = True while self.running: if len(self.server.connections) > 0: self.tick() sleep(self.tick_time/1000.0) def tick(self): raise NotImplementedError("Child need to implemet this!") def sendMessage(self, client, s): """ Encode and send a WebSocket message """ # Empty message to start with message = "" # always send an entire message as one frame (fin) b1 = 0x80 # in Python 2, strs are bytes and unicodes are strings if type(s) == unicode: b1 |= self.server.text payload = s.encode("UTF8") elif type(s) == str: b1 |= self.server.text payload = s # Append 'FIN' flag to the message message += chr(b1) # never mask frames from the server to the client b2 = 0 # How long is our payload? length = len(payload) if length < 126: b2 |= length message += chr(b2) elif length < (2 ** 16) - 1: b2 |= 126 message += chr(b2) l = struct.pack(">H", length) message += l else: l = struct.pack(">Q", length) b2 |= 127 message += chr(b2) message += l # Append payload to message message += payload # Send to the client client.send(str(message))
And I thought Legos were cool. Check this out: Create your own corporate labyrinth with The Cubes. Read about ‘em, then buy ‘em.
from api import send_message, send_photo import requests import bs4 def on_msg_received(msg, matches): url = "http://aulete.com.br/wap/resultado.php" headers = {'User-Agent': 'Nokia6630/1.0 (2.3.129) SymbianOS/8.0 Series60/2.6 Profile/MIDP-2.0 Configuration/CLDC-1.1'} data = { 'busca' : str(matches.group(1)).encode("iso-8859-1") } output = requests.post(url, data, headers=headers) ensopado = bs4.BeautifulSoup(output.content, "html.parser") definicao_raw = ensopado.find_all("div", id="definicao") spans = definicao_raw[0].find_all("span") definicao = "" if not spans: definicao = definicao_raw[0].find(text = True) #resultado = definicao_raw[0].findAll(text = True) #for frase in resultado: # definicao += frase else: # Usado para comparar em caso de duas acepções cabecoAnterior = False cabecoCounter = 0 for span in spans: texto = span.findAll(text = True) classe = span["class"][0] # definicao += "```" + str(span['class']) + " = " + str(span.findAll(text = True)) + "```" # Trata do caso em que existe mais de uma acepção if classe == "cabeco": if cabecoAnterior is False: if cabecoCounter > 0: definicao += "\r\n" definicao += "*{}*".format(texto[0]) cabecoAnterior = True cabecoCounter = cabecoCounter + 1 else: definicao += ":_" + texto[0] + "_ " cabecoAnterior = False else: cabecoAnterior = False if classe == "sepsil": # Gambiarrinha para conseguir pegar a sílaba tônica # Na verdade, não funciona direito, parece... Depois tento entender o porquê tonica = span.find("em", text = True) # Para dar espaço entre o termo e a separação silábica definicao += " " for sil in texto: if sil == tonica: definicao += "_{}_".format(sil) else: definicao += "{}".format(sil) # Não sei o que isso faz, mas vou colocar aqui, anyway #if classe == "ort": # Acho que não retorna nada, vou converter por segurança # Do contrário, ele pode estourar a pilha de dar erro # Adicionei também um pulo de linha para a próx. seção #definicao += "[{}]\r\n".format(str(texto)) # Acabei desativando porque realmente não servia para muita coisa if classe == "catgram": definicao += "```{}```\r\n".format(texto[0]) if classe == "numdef": definicao += " *{}* ".format(texto[0]) if classe == "rubrica" or classe == "regio" or classe == "uso": definicao += "_{}_ ".format(texto[0]) if classe == "def": definicao += "{}\r\n".format("".join(texto)) if classe == "achverb": definicao += "\n_{}_\n".format("".join(texto)) print(definicao) #parole = definicao_raw[0].find("span", class_="cabeco", text = True) #parole = parole.find(text = True) #definicao = "*{}*\nDebug: {}".format(parole, type(parole)) send_message(msg["chat"]["id"], str(definicao))
BCT Tires, also known as Beijing Capital Tire Co., Ltd., is one of twenty-four state-owned key tire companies in China. In fact, it’s one of the Six National Brands of Tires in China and is considered one of the top 75 world tire manufacturers. BCT Tires brings its passengers a whole new level of comfort, performance, and quality. And with a variety of models to choose from, it’s the right fit for thrifty shoppers and enthusiasts alike. It specializes mainly in passenger car tires, ultra-high performance tires, light truck tires, and truck & bus tires. BCT Tires, also known as Beijing Capital Tire Co., Ltd., is one of twenty-four state-owned key tire companies in China. In fact, it’s one of the Six National Brands of Tires in China and is considered one of the top 75 world tire manufacturers. BCT Tires brings its passengers a whole new level of comfort, performance, and quality. And with a variety of models to choose from, it’s the right fit for thrifty shoppers and enthusiasts alike. It specializes mainly in passenger car tires, ultra-high performance tires, light truck tires, and truck & bus tires.
from django.db import models from django.utils.safestring import mark_safe from django_crypto_fields.fields import EncryptedCharField, EncryptedTextField from edc_base.model_validators import CellNumber, TelephoneNumber from edc_constants.choices import YES_NO class SubjectContactFieldsMixin(models.Model): may_call = models.CharField( max_length=25, choices=YES_NO, verbose_name=mark_safe( 'Has the participant given permission <b>to contacted by telephone ' 'or cell</b> by study staff for follow-up purposes during the study?')) may_visit_home = models.CharField( max_length=25, choices=YES_NO, verbose_name=mark_safe( 'Has the participant given permission for study ' 'staff <b>to make home visits</b> for follow-up purposes?')) may_sms = models.CharField( max_length=25, choices=YES_NO, null=True, blank=False, verbose_name=mark_safe( 'Has the participant given permission <b>to be contacted by SMS</b> ' 'by study staff for follow-up purposes during the study?')) mail_address = EncryptedTextField( verbose_name='Mailing address ', max_length=500, null=True, blank=True) physical_address = EncryptedTextField( verbose_name='Physical address with detailed description', max_length=500, blank=True, null=True, help_text='') subject_cell = EncryptedCharField( verbose_name='Cell number', validators=[CellNumber, ], blank=True, null=True, help_text='') subject_cell_alt = EncryptedCharField( verbose_name='Cell number (alternate)', validators=[CellNumber, ], blank=True, null=True) subject_phone = EncryptedCharField( verbose_name='Telephone', validators=[TelephoneNumber, ], blank=True, null=True) subject_phone_alt = EncryptedCharField( verbose_name='Telephone (alternate)', validators=[TelephoneNumber, ], blank=True, null=True) class Meta: abstract = True
the Makerie: a special sneak peek! We can officially say we are kicking off registration on Monday, September 24 (yes ~ this coming Monday!) and thought a little sneak peek of what we've got in store would be a fun way to send you into the weekend. We are beyond elated to announce that we are doing two retreats this year ~ our original Makerie and a brand new event ~ the Makerie Sewing ~ April 18 - 21, 2013. The original Makerie is just chock full of goodness and we'll wait until Monday to reveal the details. After all, we do love surprises and can't wait for you to see the wonderful workshop offerings! But in the meantime, we wanted to share a bit about this super exciting new event focused on sewing and textiles. To give you a little background, Liesl Gibson of Oliver + S, who taught for us last year, planted a seed at the 2012 Makerie when she asked if we had ever thought about doing a sewing-specific Makerie and well... here we are! Liesl and I have been working all summer to put together an amazing team of teachers and fabulous workshops, as well as an entirely new event design for this one-time very special sewing event. Not only will you get to choose four classes with four different teachers, but each class will be four hours long, giving you two full days (16 hours total) of incredible instruction. In case you want more time to play, we will also be offering studio time to work on whatever you'd like. So without further ado... here is who will be teaching at the Makerie Sewing! This team of teachers is really beyond our wildest dreams (still pinching ourselves over here!) and we are incredibly honored to have them come to Boulder in April. The Makerie Sewing will be limited to an intimate group of participants and we have so much more in store that we can't wait to tell you. So mark your calendars for Monday when all will be revealed and registration will be open! Here's to a fabulous weekend filled with sunshine, leaf peeping, and of course, lots of happy making. See you on Monday! oh my goodness....it sounds so exciting!! Just updated my calendar - sounds fantastic! Oh that's wonderful Sandi! How fun!
# Copyright (c) 2014 Yubico AB # All rights reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Additional permission under GNU GPL version 3 section 7 # # If you modify this program, or any covered work, by linking or # combining it with the OpenSSL project's OpenSSL library (or a # modified version of that library), containing parts covered by the # terms of the OpenSSL or SSLeay licenses, We grant you additional # permission to convey the resulting work. Corresponding Source for a # non-source form of such a combination shall include the source code # for the parts of OpenSSL used as well as that of the covered work. """ Strings for Yubico Authenticator. Note: String names must not start with underscore (_). """ organization = "Yubico" domain = "yubico.com" app_name = "Yubico Authenticator" win_title_1 = "Yubico Authenticator (%s)" about_1 = "About: %s" copyright = "Copyright &copy; Yubico" libraries = "Library versions" version_1 = "Version: %s" wait = "Please wait..." error = "Error" menu_file = "&File" menu_help = "&Help" action_about = "&About" action_add = "&Add..." action_password = "Set/Change &password" action_settings = "&Settings" action_delete = "&Delete" action_show = "&Show credentials" action_close = "&Close Window" action_quit = "&Quit" password = "Password" settings = "Settings" advanced = "Advanced" search = "Search" pass_required = "Password required" remember = "Remember password" no_key = "Insert a YubiKey..." key_busy = "YubiKey already in use!" key_present = "YubiKey found. Reading..." key_removed = "YubiKey removed" key_removed_desc = "There was an error communicating with the device!" ykstd_slots = "YubiKey standard slots" enable_slot_1 = "Read from slot %d" n_digits = "Number of digits" enable_systray = "Show in system tray" kill_scdaemon = "Kill scdaemon on show" reader_name = "Card reader name" no_creds = "No credentials available" add_cred = "New credential" cred_name = "Credential name" cred_key = "Secret key (base32)" cred_type = "Credential type" cred_totp = "Time based (TOTP)" cred_hotp = "Counter based (HOTP)" algorithm = "Algorithm" invalid_name = "Invalid name" invalid_name_desc = "Name must be at least 3 characters" invalid_key = "Invalid key" invalid_key_desc = "Key must be base32 encoded" set_pass = "Set password" new_pass = "New password (blank for none)" ver_pass = "Verify new password" pass_mismatch = "Passwords do not match" pass_mismatch_desc = "Please enter the same password twice" touch_title = "Touch required" touch_desc = "Touch your YubiKey now" delete_title = "Confirm credential deletion" delete_desc_1 = """<span>Are you sure you want to delete the credential?</span> <br> This action cannot be undone. <br><br> <b>Delete credential: %s</b> """ slot = "YubiKey slot" slot_2 = "Slot %d (%s)" free = "free" in_use = "in use" require_touch = "Require touch" no_slot = "No slot chosen" no_slot_desc = "Please choose a slot to write the credential to" overwrite_slot = "Overwrite slot?" overwrite_slot_desc_1 = "This will overwrite the credential currently " \ "stored in slot %d. This action cannot be undone." overwrite_entry = "Overwrite entry?" overwrite_entry_desc = "An entry with this username already exists.\n\nDo " \ "you wish to overwrite it? This action cannot be undone." qr_scan = "Scan a QR code" qr_scanning = "Scanning for QR code..." qr_not_found = "QR code not found" qr_not_found_desc = "No usable QR code detected. Make sure the QR code is " \ "fully visible on your primary screen and try again." qr_not_supported = "Credential not supported" qr_not_supported_desc = "This credential type is not supported for slot " \ "based usage." qr_invalid_type = "Invalid OTP type" qr_invalid_type_desc = "Only TOTP and HOTP types are supported." qr_invalid_digits = "Invalid number of digits" qr_invalid_digits_desc = "An OTP may only contain 6 or 8 digits." qr_invalid_algo = "Unsupported algorithm" qr_invalid_algo_desc = "SHA1 and SHA256 are the only supported OTP " \ "algorithms at this time." tt_slot_enabled_1 = "Check to calculate TOTP codes using the YubiKey " \ "standard slot %d credential." tt_num_digits = "The number of digits to show for the credential." tt_systray = "When checked, display an icon in the systray, and leave the " \ "application running there when closed." tt_kill_scdaemon = "Kills any running scdaemon process when the window is " \ "shown. This is useful when using this application together with GnuPG " \ "to avoid GnuPG locking the device." tt_reader_name = "Changes the default smartcard reader name to look for. " \ "This can be used to target a specific YubiKey when multiple are used, " \ "or to target an NFC reader." ccid_disabled = '<b>CCID (smart card capabilities) is disabled on the ' \ 'inserted YubiKey.</b><br><br>Without CCID enabled, you will only be ' \ 'able to store 2 credentials.<br><br>' \ '<a href="%s">Learn how to enable CCID</a><br>' no_space = "No space available" no_space_desc = "There is not enough space to add another " \ "credential on your device.\n\nTo create free space to add a " \ "new credential, delete those you no longer need."
Wheelchair Carry Bag With A Removable Zip Pouch. This is an excellent quality bag, and is very spacious, good zipped pockets and pouch. Fully recommended. Very pleased with the quality of the bag and reasonably priced. I like these bags as they fit well on the back of my chair. They do however, wear out quite quickly and only last about 2 years. The zips are not very strong and break, so I\'m always careful not to over fill the bag. Great service from AMC and I will purchase from here again.
''' Module of Windows API for plyer.cpu. ''' from ctypes import ( c_ulonglong, c_ulong, byref, Structure, POINTER, Union, windll, create_string_buffer, sizeof, cast, c_void_p, c_uint32 ) from ctypes.wintypes import ( BYTE, DWORD, WORD ) from plyer.facades import CPU KERNEL = windll.kernel32 ERROR_INSUFFICIENT_BUFFER = 0x0000007A class CacheType: ''' Win API PROCESSOR_CACHE_TYPE enum. ''' unified = 0 instruction = 1 data = 2 trace = 3 class RelationshipType: ''' Win API LOGICAL_PROCESSOR_RELATIONSHIP enum. ''' processor_core = 0 # logical proc sharing single core numa_node = 1 # logical proc sharing single NUMA node cache = 2 # logical proc sharing cache processor_package = 3 # logical proc sharing physical package group = 4 # logical proc sharing processor group all = 0xffff # logical proc info for all groups class CacheDescriptor(Structure): ''' Win API CACHE_DESCRIPTOR struct. ''' _fields_ = [ ('Level', BYTE), ('Associativity', BYTE), ('LineSize', WORD), ('Size', DWORD), ('Type', DWORD) ] class ProcessorCore(Structure): ''' Win API ProcessorCore struct. ''' _fields_ = [('Flags', BYTE)] class NumaNode(Structure): ''' Win API NumaNode struct. ''' _fields_ = [('NodeNumber', DWORD)] class SystemLPIUnion(Union): ''' Win API SYSTEM_LOGICAL_PROCESSOR_INFORMATION union without name. ''' _fields_ = [ ('ProcessorCore', ProcessorCore), ('NumaNode', NumaNode), ('Cache', CacheDescriptor), ('Reserved', c_ulonglong) ] class SystemLPI(Structure): ''' Win API SYSTEM_LOGICAL_PROCESSOR_INFORMATION struct. ''' _fields_ = [ ('ProcessorMask', c_ulong), ('Relationship', c_ulong), ('LPI', SystemLPIUnion) ] class WinCPU(CPU): ''' Implementation of Windows CPU API. ''' @staticmethod def _countbits(mask): # make sure the correct ULONG_PTR size is used on 64bit # https://docs.microsoft.com/en-us/windows/ # desktop/WinProg/windows-data-types # note: not a pointer per-se, != PULONG_PTR ulong_ptr = c_ulonglong if sizeof(c_void_p) == 8 else c_ulong # note: c_ulonglong only on 64bit, otherwise c_ulong # DWORD == c_uint32 # https://docs.microsoft.com/en-us/windows/ # desktop/WinProg/windows-data-types lshift = c_uint32(sizeof(ulong_ptr) * 8 - 1) assert lshift.value in (31, 63), lshift # 32 or 64 bits - 1 lshift = lshift.value test = 1 << lshift assert test % 2 == 0, test count = 0 i = 0 while i <= lshift: i += 1 # do NOT remove!!! # test value has to be %2 == 0, # except the last case where the value is 1, # so that int(test) == int(float(test)) # and the mask bit is counted correctly assert test % 2 == 0 or float(test) == 1.0, test # https://stackoverflow.com/a/1746642/5994041 # note: useful to print(str(bin(int(...)))[2:]) count += 1 if (mask & int(test)) else 0 test /= 2 return count def _logprocinfo(self, relationship): get_logical_process_info = KERNEL.GetLogicalProcessorInformation # first call with no structure to get the real size of the required buff_length = c_ulong(0) result = get_logical_process_info(None, byref(buff_length)) assert not result, result error = KERNEL.GetLastError() assert error == ERROR_INSUFFICIENT_BUFFER, error assert buff_length, buff_length # create buffer from the real winapi buffer length buff = create_string_buffer(buff_length.value) # call again with buffer pointer + the same length as arguments result = get_logical_process_info(buff, byref(buff_length)) assert result, (result, KERNEL.GetLastError()) # memory size of one LPI struct in the array of LPI structs offset = sizeof(SystemLPI) # ok values = { key: 0 for key in ( 'relationship', 'mask', 'L1', 'L2', 'L3' ) } for i in range(0, buff_length.value, offset): slpi = cast( buff[i: i + offset], POINTER(SystemLPI) ).contents if slpi.Relationship != relationship: continue values['relationship'] += 1 values['mask'] += self._countbits(slpi.ProcessorMask) if slpi.LPI.Cache.Level == 1: values['L1'] += 1 elif slpi.LPI.Cache.Level == 2: values['L2'] += 1 elif slpi.LPI.Cache.Level == 3: values['L3'] += 1 return values def _sockets(self): # physical CPU sockets (or slots) on motherboard return self._logprocinfo( RelationshipType.processor_package )['relationship'] def _physical(self): # cores return self._logprocinfo( RelationshipType.processor_core )['relationship'] def _logical(self): # cores * threads # if hyperthreaded core -> more than one logical processor return self._logprocinfo( RelationshipType.processor_core )['mask'] def _cache(self): # L1, L2, L3 cache count result = self._logprocinfo( RelationshipType.cache ) return { key: result[key] for key in result if key in ('L1', 'L2', 'L3') } def _numa(self): # numa nodes return self._logprocinfo( RelationshipType.numa_node )['relationship'] def instance(): ''' Instance for facade proxy. ''' return WinCPU() # Resources: # GetLogicalProcessInformation # https://msdn.microsoft.com/en-us/library/ms683194(v=vs.85).aspx # SYSTEM_LOGICAL_PROCESSOR_INFORMATION # https://msdn.microsoft.com/en-us/library/ms686694(v=vs.85).aspx # LOGICAL_PROCESSOR_RELATIONSHIP enum (0 - 4, 0xffff) # https://msdn.microsoft.com/2ada52f0-70ec-4146-9ef7-9af3b08996f9 # CACHE_DESCRIPTOR struct # https://msdn.microsoft.com/38cfa605-831c-45ef-a99f-55f42b2b56e9 # PROCESSOR_CACHE_TYPE # https://msdn.microsoft.com/23044f67-e944-43c2-8c75-3d2fba87cb3c # C example # https://msdn.microsoft.com/en-us/904d2d35-f419-4e8f-a689-f39ed926644c
You know how pregnant mothers do that nesting thing? Sometimes preparing for the baby seems to be one huge To-Do list – clean under the bed, dust the house, empty the cupboards of junk. All those jobs you know you won’t get to in the busy months ahead. This day I was going through all the baby items I’d collected. Some of them were new, some of them second-hand, gifted to me from friends and family. And of course, to make them fresh and clean for my special bundle, I needed to wash and dry them this beautiful spring day. I’d been mulling over my epiphany a few weeks earlier when seven months pregnant. It had prompted much soul-searching. Here I was, about to have a little baby in my arms, and I hadn’t even really given a thought to our long-term future! Sadly, I’d had my own childhood cut short at 16 when my mother died of cancer. So, I wondered, how could I possibly do motherhood well? I had no mother to guide me on what was ahead! In reality, I did have fantastic role models in my step-mother, mother-in-law, aunts, and older sisters. But nothing in the place left where my mother used to be. It was like a raw-edged, gaping hole. As I stood there folding my freshly laundered baby items, I imagined a picture of me with an adult – my new son or daughter. We were happy, bright, relating well together and enjoying one another’s company. There was no angst, no bitterness, no sense of misunderstanding. This is where I wanted to arrive with my adult offspring. Suddenly I ‘saw’ my new future. I had a job to do, and it would be one of the most important assignments in my life. I was about to be a parent. It wasn’t a short-term project, but encompassed a much larger view. It was like the difference between seeing a bedside lamp, and looking at stars. My job as a mother is not a short project, but long-term. Like the difference between seeing a bedside lamp, and looking at stars. I realized my husband and I had quite a job ahead. We were beginning a whole brave, new journey, and we’d be attempting the miraculous. From taking this new-born, defenseless, and totally dependent baby, to that lovely, happy interdependent adult I could imagine. That was to be our task. Our challenge. And our focus. The road ahead would be a long one, because, as I calculated that afternoon, we had around 20 years to see it to the end! Click HERE for the next part of my story when the Doctor’s report was quite scary!
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('digikey', '0006_auto_20151028_0846'), ] operations = [ migrations.CreateModel( name='Order_Details', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('quantity', models.IntegerField()), ], ), migrations.RemoveField( model_name='components', name='order_id', ), migrations.RemoveField( model_name='components', name='quantity', ), migrations.AddField( model_name='order_details', name='component', field=models.ForeignKey(to='digikey.Components'), ), migrations.AddField( model_name='order_details', name='order', field=models.ForeignKey(to='digikey.Orders'), ), migrations.AddField( model_name='components', name='associated_order', field=models.ManyToManyField(to='digikey.Orders', through='digikey.Order_Details'), ), ]
Simply stunning - Fiesta's curved apron front design showcases its gorgeous hand hammered copper texture. This copper apron sink may just become the conversation starter at every party. Fiesta bar sink/prep sink is artisan crafted; forged of high-quality recycled copper. It takes thousands of hammer strikes by the artisan to create this one-of-a-kind, functional work of art. Outside Dimensions: 19" x 7.5" Inside Dimensions: 16" x 7"
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2015 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """PersistentIdentifier store and registration. Usage example for registering new identifiers:: from flask import url_for from invenio.pid_store import PersistentIdentifier # Reserve a new DOI internally first pid = PersistentIdentifier.create('doi','10.0572/1234') # Get an already reserved DOI pid = PersistentIdentifier.get('doi', '10.0572/1234') # Assign it to a record. pid.assign('rec', 1234) url = url_for("record.metadata", recid=1234, _external=True) doc = "<resource ...." # Pre-reserve the DOI in DataCite pid.reserve(doc=doc) # Register the DOI (note parameters depended on the provider and pid type) pid.register(url=url, doc=doc) # Reassign DOI to new record pid.assign('rec', 5678, overwrite=True), # Update provider with new information pid.update(url=url, doc=doc) # Delete the DOI (you shouldn't be doing this ;-) pid.delete() """ import os from datetime import datetime from invenio.config import CFG_PYLIBDIR from invenio.dbquery import run_sql from invenio.pluginutils import PluginContainer from invenio.pid_provider import PidProvider PIDSTORE_OBJECT_TYPES = ['rec', ] """ Definition of supported object types """ # # Internal configuration values. Normally you will not need to edit # any of the configuration values below. # PIDSTORE_STATUS_NEW = 'N' """ The pid has *not* yet been registered with the service provider. """ PIDSTORE_STATUS_REGISTERED = 'R' """ The pid has been registered with the service provider. """ PIDSTORE_STATUS_DELETED = 'D' """ The pid has been deleted/inactivated with the service proivider. This should very rarely happen, and must be kept track of, as the PID should not be reused for something else. """ PIDSTORE_STATUS_RESERVED = 'K' """ The pid has been reserved in the service provider but not yet fully registered. """ def plugin_builder(plugin_name, plugin_code): if 'provider' in dir(plugin_code): candidate = getattr(plugin_code, 'provider') try: if issubclass(candidate, PidProvider): return candidate except: pass raise ValueError('%s is not a valid PID provider' % plugin_name) _PID_PROVIDERS = PluginContainer( os.path.join(CFG_PYLIBDIR, 'invenio', 'pid_providers', '*.py'), plugin_builder=plugin_builder) class PersistentIdentifier(object): """ Store and register persistent identifiers Assumptions: * Persistent identifiers can be represented as a string of max 255 chars. * An object has many persistent identifiers. * A persistent identifier has one and only one object. """ def __init__(self, id=None, pid_type=None, pid_value=None, pid_provider=None, status=None, object_type=None, object_value=None, created=None, last_modified=None): """ :param id: Id of persistent identifier entry :param pid_type: Persistent Identifier Schema :param pid_str: Persistent Identifier :param pid_provider: Persistent Identifier Provider :param status: Status of persistent identifier, e.g. registered, reserved, deleted :param object_type: Object Type - e.g. rec for record :param object_value: Object ID - e.g. a record id :param created: Creation datetime of entry :param last_modified: Last modification datetime of entry """ self.id = id self.pid_type = pid_type self.pid_value = pid_value self.pid_provider = pid_provider self.status = status self.object_type = object_type self.object_value = object_value self.created = created or datetime.now() self.last_modified = last_modified or datetime.now() def __repr__(self): return self.__dict__.__repr__() # # Class methods # @classmethod def create(cls, pid_type, pid_value, pid_provider='', provider=None): """ Internally reserve a new persistent identifier in Invenio. A provider for the given persistent identifier type must exists. By default the system will choose a provider according to the pid type. If desired, the default system provider can be overridden via the provider keyword argument. Returns PID object if successful otherwise None. """ # Ensure provider exists if provider is None: provider = PidProvider.create(pid_type, pid_value, pid_provider) if not provider: raise Exception( "No provider found for %s:%s (%s)" % ( pid_type, pid_value, pid_provider) ) try: obj = cls(pid_type=provider.pid_type, pid_value=provider.create_new_pid(pid_value), pid_provider=pid_provider, status=PIDSTORE_STATUS_NEW) obj._provider = provider run_sql( 'INSERT INTO pidSTORE ' '(pid_type, pid_value, pid_provider, status,' ' created, last_modified) ' 'VALUES (%s, %s, %s, %s, NOW(), NOW())', (obj.pid_type, obj.pid_value, obj.pid_provider, obj.status) ) obj.log("CREATE", "Created") return obj except Exception, e: obj.log("CREATE", e.message) raise e @classmethod def get(cls, pid_type, pid_value, pid_provider='', provider=None): """ Get persistent identifier. Returns None if not found. """ res = run_sql( 'SELECT id, pid_type, pid_value, pid_provider, status, ' 'object_type, object_value, created, last_modified ' 'FROM pidSTORE ' 'WHERE pid_type=%s and pid_value=%s and pid_provider=%s', (pid_type, pid_value, pid_provider) ) try: obj = cls(*res[0]) obj._provider = provider return obj except IndexError: return None @classmethod def exists(cls, pid_type, pid_value): """Check existence of a PID.""" res = run_sql( 'SELECT id from pidSTORE where pid_type=%s and pid_value=%s', (pid_type, pid_value)) return True if res else False # # Instance methods # def has_object(self, object_type, object_value): """ Determine if this persistent identifier is assigned to a specific object. """ if object_type not in PIDSTORE_OBJECT_TYPES: raise Exception("Invalid object type %s." % object_type) return self.object_type == object_type and \ self.object_value == object_value def get_provider(self): """ Get the provider for this type of persistent identifier """ if self._provider is None: self._provider = PidProvider.create( self.pid_type, self.pid_value, self.pid_provider ) return self._provider def assign(self, object_type, object_value, overwrite=False): """ Assign this persistent identifier to a given object Note, the persistent identifier must first have been reserved. Also, if an exsiting object is already assigned to the pid, it will raise an exception unless overwrite=True. """ if object_type not in PIDSTORE_OBJECT_TYPES: raise Exception("Invalid object type %s." % object_type) if not self.id: raise Exception( "You must first create the persistent identifier before you " "can assign objects to it." ) if self.is_deleted(): raise Exception( "You cannot assign objects to a deleted persistent identifier." ) # Check for an existing object assigned to this pid existing_obj_id = self.get_assigned_object(object_type) if existing_obj_id and existing_obj_id != object_value: if not overwrite: raise Exception( "Persistent identifier is already assigned to another " "object" ) else: self.log( "ASSIGN", "Unassigned object %s:%s (overwrite requested)" % ( self.object_type, self.object_value) ) self.object_type = None self.object_value = None elif existing_obj_id and existing_obj_id == object_value: # The object is already assigned to this pid. return True self.object_type = object_type self.object_value = object_value self._update() self.log("ASSIGN", "Assigned object %s:%s" % (self.object_type, self.object_value)) return True def update(self, with_deleted=False, *args, **kwargs): """ Update the persistent identifier with the provider. """ if self.is_new() or self.is_reserved(): raise Exception( "Persistent identifier has not yet been registered." ) if not with_deleted and self.is_deleted(): raise Exception("Persistent identifier has been deleted.") provider = self.get_provider() if provider is None: self.log("UPDATE", "No provider found.") raise Exception("No provider found.") if provider.update(self, *args, **kwargs): if with_deleted and self.is_deleted(): self.status = PIDSTORE_STATUS_REGISTERED self._update() return True return False def reserve(self, *args, **kwargs): """ Reserve the persistent identifier with the provider Note, the reserve method may be called multiple times, even if it was already reserved. """ if not (self.is_new() or self.is_reserved()): raise Exception( "Persistent identifier has already been registered." ) provider = self.get_provider() if provider is None: self.log("RESERVE", "No provider found.") raise Exception("No provider found.") if provider.reserve(self, *args, **kwargs): self.status = PIDSTORE_STATUS_RESERVED self._update() return True return False def register(self, *args, **kwargs): """ Register the persistent identifier with the provider """ if self.is_registered() or self.is_deleted(): raise Exception( "Persistent identifier has already been registered." ) provider = self.get_provider() if provider is None: self.log("REGISTER", "No provider found.") raise Exception("No provider found.") if provider.register(self, *args, **kwargs): self.status = PIDSTORE_STATUS_REGISTERED self._update() return True return False def delete(self, *args, **kwargs): """ Delete the persistent identifier """ if self.is_new(): # New persistent identifier which haven't been registered yet. Just # delete it completely but keep log) # Remove links to log entries (but otherwise leave the log entries) run_sql('UPDATE pidLOG ' 'SET id_pid=NULL WHERE id_pid=%s', (self.id, )) run_sql("DELETE FROM pidSTORE WHERE id=%s", (self.id, )) self.log("DELETE", "Unregistered PID successfully deleted") else: provider = self.get_provider() if not provider.delete(self, *args, **kwargs): return False self.status = PIDSTORE_STATUS_DELETED self._update() return True def sync_status(self, *args, **kwargs): """Synchronize persistent identifier status. Used when the provider uses an external service, which might have been modified outside of our system. """ provider = self.get_provider() result = provider.sync_status(self, *args, **kwargs) self._update() return result def get_assigned_object(self, object_type=None): if object_type is not None and self.object_type == object_type: return self.object_value return None def is_registered(self): """Returns true if the persistent identifier has been registered """ return self.status == PIDSTORE_STATUS_REGISTERED def is_deleted(self): """Returns true if the persistent identifier has been deleted """ return self.status == PIDSTORE_STATUS_DELETED def is_new(self): """ Returns true if the persistent identifier has not yet been registered or reserved """ return self.status == PIDSTORE_STATUS_NEW def is_reserved(self): """ Returns true if the persistent identifier has not yet been reserved. """ return self.status == PIDSTORE_STATUS_RESERVED def log(self, action, message): if self.pid_type and self.pid_value: message = "[%s:%s] %s" % (self.pid_type, self.pid_value, message) run_sql('INSERT INTO pidLOG (id_pid, timestamp, action, message)' 'VALUES(%s, NOW(), %s, %s)', (self.id, action, message)) def _update(self): """Update the pidSTORE (self) object status on the DB.""" run_sql( 'UPDATE pidSTORE ' 'SET status=%s, object_type=%s, object_value=%s, ' 'last_modified=NOW() WHERE pid_type=%s and pid_value=%s', (self.status, self.object_type, self.object_value, self.pid_type, self.pid_value) )
Some of the best things about Cub Scouting are the activities you get to do: camping, hiking, racing model cars, going on field trips, or doing projects that help your hometown and the people who live there. Cub Scouting means "doing." In February, when Scouting celebrates its "birthday," packs across the country hold blue and gold banquets. In nearly all packs, the banquet is a very special event.
import datetime from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse, reverse_lazy from django.http import HttpResponseRedirect from django.shortcuts import render from django.utils.decorators import method_decorator from django.views.generic.base import TemplateView, RedirectView from django.views.generic.edit import FormView from pointofsale.models import Drink, Account, DrinkOrder from pubsite.models import Participant, get_current_event, Event class SaleView(TemplateView): template_name = "pointofsale/sale.html" success_url = reverse_lazy("pos:sale") insufficient = False @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(SaleView, self).dispatch(*args, **kwargs) def get_context_data(self, **kwargs): context = super(SaleView, self).get_context_data(**kwargs) # display an error if there was not enough money in the account to buy a drink context['insufficient'] = self.insufficient # get the current event or don't do anything if there is none try: event = get_current_event() except: return context context['drinks'] = Drink.objects.all() context['accounts'] = Account.objects.filter(participant__event=event) # get the last few drinks that have been bought during the event context['log'] = DrinkOrder.objects.filter(account__participant__event=event).order_by('-time')[:10] return context class ParticipantOverview(TemplateView): template_name = "pointofsale/participants.html" @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(ParticipantOverview, self).dispatch(*args, **kwargs) def get_context_data(self, **kwargs): context = super(ParticipantOverview, self).get_context_data(**kwargs) # get the latest event and its participants try: participant_list = Participant.objects.filter(event=get_current_event()) # sort the participants according to whether they have an account or not context['not_finished'] = [] context['finished'] = [] for p in participant_list: try: p.account except Account.DoesNotExist: # participant doesn't have an account context['not_finished'].append(p) else: # participant does have an account context['finished'].append(p) except Event.DoesNotExist: pass # return empty context return context class BuyDrinkRedirectView(RedirectView): pattern_name = "pos:sale" permanent = False @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(BuyDrinkRedirectView, self).dispatch(*args, **kwargs) def get_redirect_url(self, participant, drink, quantity, *args, **kwargs): try: buy_drink(participant, drink, quantity) except InsufficientFundsException: self.pattern_name = "pos:sale_insufficient" except Account.DoesNotExist: # someone tried to buy something for an account which does not exist # let it slide for now, but TODO: handle this gracefully pass return super(BuyDrinkRedirectView, self).get_redirect_url(*args, **kwargs) class AddCreditsRedirectView(RedirectView): pattern_name = "pos:participants" permanent = False @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(AddCreditsRedirectView, self).dispatch(*args, **kwargs) def get_redirect_url(self, participant, *args, **kwargs): p = Participant.objects.get(pk=participant) try: p.account.credits += 5000 p.account.save() except: a = Account(participant=p, credits=5000) a.save() return super(AddCreditsRedirectView, self).get_redirect_url(*args, **kwargs) class GenerateCSVView(TemplateView): template_name = "pointofsale/csv.html" @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(GenerateCSVView, self).dispatch(*args, **kwargs) def get_context_data(self, **kwargs): context = super(GenerateCSVView, self).get_context_data(**kwargs) context['csv'] = "id,amount,name,address,place,IBAN,email,date\n" try: e = get_current_event() participants = Participant.objects.filter(event=e).order_by('account__debit_id') for p in participants: try: id = p.account.debit_id context['csv'] += """{id},{amount},"{name}","{address}","{place}","{iban}","{email}",{date}\n""".format( id=id*2-1, amount=p.price, name=p.user.get_full_name(), address=p.address + " " + p.postal_code, place=p.city, iban=p.iban, email=p.user.email, date=e.start_date) context['csv'] += """{id},{amount},"{name}","{address}","{place}","{iban}","{email}",{date}\n""".format( id=id*2, amount=p.account.get_credits_used()/100.0, name=p.user.get_full_name(), address=p.address + " " + p.postal_code, place=p.city, iban=p.iban, email=p.user.email, date=e.end_date) except: # Nothing to do here, the participant doesn't have any costs so it shouldn't be reported in the csv pass except Event.DoesNotExist: return context # There are no events so there is no CSV to be generated return context def render_to_response(self, context, **kwargs): return super(TemplateView, self).render_to_response(context, content_type="text/plain", **kwargs) # override the MIME type class InsufficientFundsException(Exception): pass def buy_drink(participant, drink, quantity): p = Participant.objects.get(pk=participant) d = Drink.objects.get(pk=drink) quantity = int(quantity) if p.account.get_credits_remaining() < d.price * quantity: raise InsufficientFundsException() for i in range(quantity): do = DrinkOrder.objects.create(account=p.account, drink=d) do.save()
You are at:Home»MOVIE NEWS»Red Suits Anjali More! Actress Anjali started her career in Tamil and became popular even in Telugu states. She couldn’t became a star in both languages but she is known for her acting skills. She has been fluctuating between fat and fit body shapes quite regularly but her looks haven’t lost the charm she naturally has, in any way. She, in a recent photoshoot, wore red long dress and it seems to be the best suited outfit for her in long time. The actress is busy in Tamil as her career in Telugu seems to be almost over! Pawan’s Hat-trick Flick With This Director?
import datetime import math import sys import time from Keysight34972A import Keysight34972A from Fluke7341 import Fluke7341 from Fluke1502A import Fluke1502A NUM_PROBES = 2 PROBE_LIST = [1, 2] SAMPLE_INTERVAL = 5 BUFFER_SIZE = 5000 STD_HOLD_COUNT = 5000 TEMP_INCREMENT = 0.0 """ QUICK VALUES FOR TESTING SAMPLE_INTERVAL = 2 BUFFER_SIZE = 5 STD_HOLD_COUNT = 5 """ class RingBuffer(): def __init__(self, size): self.size = size self.buffer = [0] * size self.pointer = 0 self.count = 0 """ for i in range(size): if i % 2: self.buffer[i] = -1e16 else: self.buffer[i] = 1e16 """ def update(self, value): self.buffer[self.pointer] = value self.pointer = (self.pointer + 1) % self.size self.count += 1 def reset(self): self.count = 0 def getAverage(self, silent=True): if self.count < self.size: if not silent: print "[WARNING] Buffer has not been filled completely: [{}/{}]".format(self.count, self.size) return sum(self.buffer) / self.size def getSTD(self): std = 0 avg = self.getAverage() for i in range(self.size): std += (self.buffer[i] - avg) ** 2 std /= self.size return math.sqrt(std) def thermalRamp(start, end, increment, daq, bath, thermalProbe): setpoint = start bath.setSetpoint(setpoint) TEMP_INCREMENT = increment timestamp = datetime.datetime.now().isoformat().split('.')[0].replace(':', '-') csvFile = "calibration{}.csv".format(timestamp) probeTitles = ",".join(["probe {}".format(i) for i in PROBE_LIST]) averageTitles = ",".join(["average {}".format(i) for i in PROBE_LIST]) f = open(csvFile, "w") f.write("time, elapsed time, setpoint, bath temp, probe temp,{},{}\n".format(probeTitles, averageTitles)) f.close() # create ring buffer for each thermistor buffers = [RingBuffer(BUFFER_SIZE) for i in range(NUM_PROBES)] minSTDs = [1e9 for i in range(NUM_PROBES)] maxSTDs = [[0] * STD_HOLD_COUNT for i in range(NUM_PROBES)] counts = [0 for i in range(NUM_PROBES)] done = False numMeasurements = 0 t0 = datetime.datetime.now() equilibriumTime = time.time() while not done: try: t1 = time.time() bathTemp = float(bath.readTemp()) probeTemp = float(thermalProbe.readTemp()) resistances = daq.readValues() numMeasurements += 1 t = datetime.datetime.now() timestamp = "{}/{}/{} {}:{}:{}".format(t.month, t.day, t.year, t.hour, t.minute, t.second) # calculate STD for all probes, update count if minimum doesn't change for i in range(NUM_PROBES): buffers[i].update(resistances[i]) std = buffers[i].getSTD() if std < minSTDs[i]: print "new lowest std" minSTDs[i] = std counts[i] = 0 elif int(std) > max(maxSTDs[i]): print "std too high" counts[i] = 0 else: if numMeasurements > BUFFER_SIZE: print "stabilizing" counts[i] += 1 else: print "need more measurements" maxSTDs[i] = maxSTDs[i][1:] + [int(std)] if abs(bathTemp - setpoint) > 0.01: print "bathTemp ({}) != setpoint ({})".format(bathTemp, setpoint) bath.setSetpoint(setpoint) counts = [0 for count in counts] # check if any probes are not at equilibrium allEqualized = True for i in range(NUM_PROBES): if counts[i] < STD_HOLD_COUNT: allEqualized = False break r = ",".join([str(i) for i in resistances]) a = ",".join([str(buffer.getAverage()) for buffer in buffers]) t = datetime.datetime.now() - t0 seconds = t.seconds % 60 minutes = (t.seconds / 60) % 60 hours = (t.seconds / 3600) % 24 elapsedTime = "{}:{}:{}".format(hours, minutes, seconds) f = open(csvFile, "a") f.write("{},{},{},{},{},{},{}".format(timestamp, elapsedTime, setpoint, bathTemp, probeTemp, r, a)) # go to next setpoint if allEqualized and numMeasurements > BUFFER_SIZE: print "equalized" f.write(",{}".format(",".join([str(buffer.getAverage()) for buffer in buffers]))) if abs(setpoint - end) < 0.001: done = True else: setpoint += TEMP_INCREMENT bath.setSetpoint(setpoint) for i in range(NUM_PROBES): buffers[i].reset() counts[i] = 0 numMeasurements = 0 equilibriumTime = time.time() - equilibriumTime f.write(",{}".format(equilibriumTime)) equilibriumTime = time.time() f.write("\n") f.close() print counts sampleTime = time.time() - t1 if sampleTime < SAMPLE_INTERVAL: print "Elapsed: {}, Sleeping: {}".format(sampleTime, SAMPLE_INTERVAL - sampleTime) time.sleep(SAMPLE_INTERVAL - sampleTime) except KeyboardInterrupt: done = True if __name__ == "__main__": # Connect to and initialize DAQ daq = Keysight34972A() if not daq.connect(): print "Failed to connect to Keysight34972A".format() exit(1) daq.initialize(Keysight34972A.MODE_RESISTANCE, PROBE_LIST) # Connect to and initialize bath bath = Fluke7341() if not bath.connect("COM5"): print "Failed to connect to Fluke7341" exit(1) thermalProbe = Fluke1502A() if not thermalProbe.connect("COM7"): print "Failed to connect to Fluke1502A" exit(1) changeSetpoint = False setpoint = 21.0 if len(sys.argv) > 1: try: setpoint = float(sys.argv[1]) changeSetpoint = True except ValueError: print "parameter must be a float" bath.disconnect() daq.disconnect() exit() if changeSetpoint: bath.setSetpoint(setpoint) # thermalRamp(1, -10, -0.1, daq, bath, thermalProbe) # thermalRamp(-10, 1, 0.1, daq, bath, thermalProbe) # thermalRamp(0, -4, -0.02, daq, bath, thermalProbe) # thermalRamp(-4, -6, -0.1, daq, bath, thermalProbe) # thermalRamp(-6, -10, -1.0, daq, bath, thermalProbe) # thermalRamp(-10, -6, 1.0, daq, bath, thermalProbe) # thermalRamp(-6, -4, 0.1, daq, bath, thermalProbe) # thermalRamp(-4, 0, 0.02, daq, bath, thermalProbe) thermalRamp(0.0, 1.0, 0.0, daq, bath, thermalProbe) bath.disconnect() daq.disconnect() thermalProbe.disconnect()
In order to make your transition to student life a smooth one, both campuses offer orientation events for first-semester students. The events are typically held during the week before regular lectures and classes begin. In addition to information about your course of study, a lot of practical questions are also answered for you: How do I sign up for a course? How do I get a university email address? Where can I go to eat? Bar evenings, rallies or first-year breakfast are all designed to help you make contact with fellow students. Because these events are organized by the AStA student executive committee or student council organizations, they can be very helpful for getting valuable tips on your studies, about the university and of course about the best bars in town. At the Koblenz campus, the Orientation Week, also known as the ‘O-phase’, is organized by the OPA ( Orientation Phase Committee). The details of the program are posted in good time on Unipedia, the Uniwiki for the Koblenz campus. At the Landau campus, the Orientation Days are run by the Office of Student Affairs, the Alumni Network and the AStA student executive committee. See ‘Orientation Days Landau’ online for more information. Both campuses also offer a special Welcome Days program for international students. You will receive more information on that in an email from your Welcome Center.
#!/usr/bin/env python # -*- coding: utf-8 -*- from CTFd.models import Users, db from tests.helpers import ( create_ctfd, destroy_ctfd, gen_team, gen_user, login_as_user, register_user, ) def test_banned_team(): app = create_ctfd(user_mode="teams") with app.app_context(): register_user(app) team = gen_team(app.db, banned=True) user = Users.query.filter_by(id=2).first() user.team_id = team.id db.session.commit() client = login_as_user(app) routes = ["/", "/challenges", "/api/v1/challenges"] for route in routes: r = client.get(route) assert r.status_code == 403 destroy_ctfd(app) def test_teams_join_get(): """Can a user get /teams/join""" app = create_ctfd(user_mode="teams") with app.app_context(): register_user(app) with login_as_user(app) as client: r = client.get("/teams/join") assert r.status_code == 200 destroy_ctfd(app) def test_teams_join_post(): """Can a user post /teams/join""" app = create_ctfd(user_mode="teams") with app.app_context(): gen_user(app.db, name="user") gen_team(app.db, name="team") with login_as_user(app) as client: r = client.get("/teams/join") assert r.status_code == 200 with client.session_transaction() as sess: data = { "name": "team", "password": "password", "nonce": sess.get("nonce"), } r = client.post("/teams/join", data=data) assert r.status_code == 302 incorrect_data = data incorrect_data["password"] = "" r = client.post("/teams/join", data=incorrect_data) assert r.status_code == 200 destroy_ctfd(app) def test_team_login(): """Can a user login as a team""" app = create_ctfd(user_mode="teams") with app.app_context(): user = gen_user(app.db, name="user") team = gen_team(app.db) user.team_id = team.id team.members.append(user) app.db.session.commit() with login_as_user(app) as client: r = client.get("/team") assert r.status_code == 200 destroy_ctfd(app) def test_team_join_ratelimited(): """Test that team joins are ratelimited""" app = create_ctfd(user_mode="teams") with app.app_context(): gen_user(app.db, name="user") gen_team(app.db, name="team") with login_as_user(app) as client: r = client.get("/teams/join") assert r.status_code == 200 with client.session_transaction() as sess: data = { "name": "team", "password": "wrong_password", "nonce": sess.get("nonce"), } for _ in range(10): r = client.post("/teams/join", data=data) data["password"] = "password" for _ in range(10): r = client.post("/teams/join", data=data) assert r.status_code == 429 assert Users.query.filter_by(id=2).first().team_id is None destroy_ctfd(app)
Ensure your business processes are lean, functional and meet the specific needs of your unique organization. Identify use of technology to enable your business processes to deliver maximum result. Empower your people to support and operate your business processes. In healthcare today, reducing spend means so much more than just managing the price of supplies. The key to sourcing a particular item more effectively is to understand all of the costs associated with its purchase and use ' not just the visible pricing elements. Experienced supply chain specialists with extensive product and market knowledge will work with you from initial engagement and identification through implementation. MedRev provide supply chain consulting offer with superior knowledge, data analysis and communications skills, and extensive consulting experience in driving savings across your supply chain operations. They will help you develop a clear strategy and implement smart solutions that deliver optimal cost, quality and clinical outcomes. 2. Pharmacy And Drug Solution- MedRev Healthcare ensure an efficient, cost-effective corporate pharmacy by improving processes and renewing your commitment to quality.
# coding: utf-8 # # `travelmaps`: Functions and settings to create beautiful global and local travel maps # # [Blog](http://werthmuller.org/blog) # [Repo](http://github.com/prisae/blog-notebooks) # # See the blog post [Travel Maps](http://werthmuller.org/blog/2015/travelmap) for more explanations and some examples. # # - country : Plot/fill countries. # - city : Plot and annotate cities. # - arrow : Plot arrows from city to city. # # These functions are very basic, and include almost no checking or similar at all. Feel free to fork and improve them! # In[2]: import shapefile import numpy as np import matplotlib.pyplot as plt from matplotlib import rcParams, patheffects from matplotlib.collections import LineCollection # In[3]: # Customized plt.xkcd()-settings # http://jakevdp.github.io/blog/2013/07/10/XKCD-plots-in-matplotlib rcParams['font.family'] = ['Humor Sans', 'Comic Sans MS'] rcParams['font.size'] = 8.0 rcParams['path.sketch'] = (1, 100, 2) rcParams['path.effects'] = [patheffects.withStroke(linewidth=2, foreground="w")] rcParams['axes.linewidth'] = 1.0 rcParams['lines.linewidth'] = 1.0 rcParams['figure.facecolor'] = 'white' rcParams['grid.linewidth'] = 0.0 rcParams['axes.unicode_minus'] = False # *Bayesian Methods for Hackers*-colour-cylce # (https://github.com/pkgpl/PythonProcessing/blob/master/results/matplotlibrc.bmh.txt) rcParams['axes.color_cycle'] = ['#348ABD', '#A60628', '#7A68A6', '#467821', '#D55E00', '#CC79A7', '#56B4E9', '#009E73', '#F0E442', '#0072B2'] # Adjust dpi, so figure on screen and savefig looks the same rcParams['figure.dpi'] = 300 rcParams['savefig.dpi'] = 300 # In[4]: def country(countries, bmap, fc=None, ec='none', lw=1, alpha=1, adm=0, gadmpath='data/TravelMap/'): """Colour <countries> with a <bmap> projection. This script is adapted from: http://www.geophysique.be/2013/02/12/ matplotlib-basemap-tutorial-10-shapefiles-unleached-continued I downloaded the countries shapefile from the *Global Administrative Areas* website, [gadm.org](http://gadm.org). => You have to use the same abbreviations for the countries as GADM does, or adjust the script. => You have to download the shapefiles from GADM, and extract them into the <gadmpath> directory. Of course, you can use any other shapfiles you have, and adjust the script accordingly. Parameters ---------- countries : string or list of strings Countries to be plotted. bmap : handle As you get from bmap = Basemap(). fc : None or colour, or list of colours; <None> Face-colour for country; if <None>, it will cycle through colour-cycle. ec : 'none' or colour (scalar or list); <'none'> Edge-colour for country. lw : scalar or list; <1> Linewidth for country. alpha: scalar or list; <1> Transparency. adm : {0, 1, 2, 3}; <0> Administrative area to choose. gadmpath : 'string' Absolut or relative path to shapefiles. """ # Ensure countries is a list if not isinstance(countries, list): countries = [countries,] # Get current axis cax = plt.gca() # Loop through the countries for country in countries: # Get shapefile for the country; extract shapes and records r = shapefile.Reader(gadmpath+country+'_adm/'+country+'_adm'+str(adm)) shapes = r.shapes() records = r.records() # Loop through the records; for adm0 this is only 1 run n = 0 for record, shape in zip(records,shapes): lons,lats = zip(*shape.points) data = np.array(bmap(lons, lats)).T if len(shape.parts) == 1: segs = [data,] else: segs = [] for i in range(1,len(shape.parts)): index = shape.parts[i-1] index2 = shape.parts[i] segs.append(data[index:index2]) segs.append(data[index2:]) lines = LineCollection(segs,antialiaseds=(1,)) # If facecolor is provided, use; else cycle through colours if fc: if not isinstance(fc, list): lines.set_facecolors(fc) else: lines.set_facecolors(fc[n]) else: lines.set_facecolors(next(cax._get_lines.color_cycle)) # Edge colour if not isinstance(ec, list): lines.set_edgecolors(ec) else: lines.set_edgecolors(ec[n]) # Alpha if not isinstance(alpha, list): lines.set_alpha(alpha) else: lines.set_alpha(alpha[n]) # Line width if not isinstance(lw, list): lines.set_linewidth(lw) else: lines.set_linewidth(lw[n]) # Add to current plot cax.add_collection(lines) n += 1 # In[5]: def city(city, name, bmap, mfc=None, color='b', offs=[.1, .1], halign='left'): """Plot a circle at <city> and annotate with <name>, with a <bmap> projection. Parameters ---------- city : List of two scalars [Northing, Easting]. name : string name to be plotted with city. bmap : handle As you get from bmap = Basemap(). mfc : None or colour; <None> Marker face-colour for city; if <None>, it will cycle through colour-cycle. colour : 'none' or colour; <'b'> Colour for <name>. offs : List of two scalars; <[.1, .1]> Offset for <name> from <city>. halign : {'left', 'right', 'center'}; <'left'> Alignment of <name> relative to <city>. """ # Get current axis cax = plt.gca() # Plot dot # If mfc is provided, use; else cycle through colours if not mfc: mfc = next(cax._get_patches_for_fill.color_cycle) bmap.plot(city[1], city[0], 'o', mfc=mfc, ms=4, mew=1, latlon=True) # Annotate name cax.annotate(name, bmap(city[1]+offs[0], city[0]+offs[1]), horizontalalignment=halign, color=color, fontsize=7, zorder=10) # In[6]: def arrow(start, end, bmap, ec="k", fc="w", rad=-.3): """Plot an arrow from <start> to <end>, with a <bmap> projection. Parameters ---------- start : List of two scalars Start of arrow [Northing, Easting]. end : List of two scalars End of arrow [Northing, Easting]. bmap : handle As you get from bmap = Basemap(). ec : 'none' or colour; <'k'> Edge-colour for arrow. fc : 'none' or colour; <w> Face-colour for arrow. rad : Scalar; <.3]> Curvature of arrow. """ # Get current axis cax = plt.gca() # Plot arrow arrowstyle='Fancy, head_length=.6, head_width=.6, tail_width=.4' cax.annotate('', bmap(end[1], end[0]), bmap(start[1], start[0]), arrowprops=dict(arrowstyle=arrowstyle, alpha=.6, patchA=None, patchB=None, shrinkA=3, shrinkB=3, fc=fc, ec=ec, connectionstyle="arc3, rad="+str(rad), ))
"The Shortest Path Between the Tone You Have and the Tone You Want" We specialize in precision ALL ANALOG audio switching systems and MIDI accessories for the performing musician and the recording studio. WHY SHOULD YOU SIGN UP? Because we are putting together a great new PDF on creating the perfect floorboard system. It is not finished but should be ready in the coming weeks. This document is loaded with cool tips about what you need to do to have that perfect floorboard system. SIGN UP AND WE WILL SEND YOU A LINK WHEN THE DOCUMENT IS FINISHED. We'll also send out an occasional email if we have a sale coming up or if there is something new we think you'd like to hear about. GO AHEAD! We don't share, you can unsubscribe at any time right from the email, and we won't trash your inbox. WE PROMISE! Welcome to Sound Sculpture Musical Instrument Products. You have found the official site of the world's most advanced effect switching systems and related accessories. Our family of products are used around the world by touring musicians who depend on our products year after year to integrate their systems into reliable and friendly workhorses. Please enjoy your stay and feel free to contact us by email or phone if you have any questions or if you need to discuss your requirements for upgrading your rack and floor effect systems with our products. We are here to help! We are celebrating over 30 years of providing essential products with 100% all analog audio paths. Browse through our pages and be amazed by the solutions you've been searching for. January 11, 2018- HIGH SIERRA USERS - We've updated BladeEdit to work on High Sierra. You can download the latest version on our Download Page. COMING SOON - We will be launching a kickstarter project (www.kickstarter.com) to officially release our new Abyss advanced A/B switch. This is an opportunity for you to get the Abyss at a greatly reduced price and to be the first to get one! The exact launch date will be put here very soon. Sign up for our newsletter to get up to the minute information. ‍The Switchblade series is the only all analog effect routing system that allows you to not only route effects in series and parallel in any order, but also gives you full control over the levels at each and every connection. ‍‍‍With decades of experience building road worthy, reliable systems for the performing musician, it is no wonder that the top artists in the world look to Sound Sculpture for their effect switching systems and related accessories. Electronic Musician Magazine awards Sound Sculpture the Editor's Choice Award for the Switchblade design. World class guitar techs have found our products to be reliable and a pleasure to use. Many of these tech's we consider our friends and always love to hear their stories from the road. One of our newest products, the Volcano has proven to be a best seller and offers remote control over volume levels anywhere in a system either under MIDI or by directly connecting an expression pedal. Other MIDI "Power Tools" are available from us to help you with special needs. ‍‍The remarkable new Switchblade 8F is the perfect Switching system for your floor effects board. With all the functionality of its bigger brothers with some surprising new features, this Switchblade will knock your socks off!
# stdlib import asyncio import json import time from typing import Dict as TypeDict from typing import Optional # third party from aries_cloudcontroller import AriesAgentController # syft absolute from syft.grid.duet.exchange_ids import DuetCredentialExchanger class AriesDuetTokenExchanger(DuetCredentialExchanger): def __init__(self, agent_controller: AriesAgentController) -> None: super().__init__() self.agent_controller: AriesAgentController = agent_controller self.responder_id: Optional[asyncio.Future] = None self.proof_request: Optional[TypeDict] = None self.is_verified: Optional[asyncio.Future] = None self.duet_didcomm_connection_id: Optional[str] = None self._register_agent_listeners() # The DuetCredentialExchanger expects this method to be implemented. # In this case we are establishing a DIDComm connection, challenging the connection # with an optional authentication policy, then with successful connections, sending # the duet token identifier over this channel. def run( self, credential: str, ) -> str: self.responder_id = asyncio.Future() self.duet_token = credential if self.join: self._accept_duet_didcomm_invite() else: self._create_duet_didcomm_invitation() loop = asyncio.get_event_loop() if self.duet_didcomm_connection_id is not None: self.await_active(self.duet_didcomm_connection_id) else: print("duet_didcomm_connection_id not set") print("Sending Duet Token", self.duet_didcomm_connection_id, credential) if self.is_verified: if self.is_verified.result() is True: print("Connection is Verified") loop.run_until_complete( self.agent_controller.messaging.send_message( self.duet_didcomm_connection_id, credential ) ) else: print("Proof request not verified") else: print("No Proof Requested") loop.run_until_complete( self.agent_controller.messaging.send_message( self.duet_didcomm_connection_id, credential ) ) loop.run_until_complete(self.responder_id) token = self.responder_id.result() print("TOKEN ", token) return token def _accept_duet_didcomm_invite(self) -> None: while True: invite = input("♫♫♫ > Duet Partner's Aries Invitation: ") # nosec loop = asyncio.get_event_loop() # is_ready = False try: response = loop.run_until_complete( self.agent_controller.connections.receive_invitation(invite) ) print(response["connection_id"]) connection_id = response["connection_id"] except Exception: print(" > Error: Invalid invitation. Please try again.") break self.duet_didcomm_connection_id = connection_id def _create_duet_didcomm_invitation(self) -> None: loop = asyncio.get_event_loop() response = loop.run_until_complete( self.agent_controller.connections.create_invitation() ) connection_id = response["connection_id"] invite_message = json.dumps(response["invitation"]) print() print("♫♫♫ > " + "STEP 1:" + " Send the aries invitation to your Duet Partner!") print() print(invite_message) print() self.duet_didcomm_connection_id = connection_id # Should be converted to asycio Future def await_active(self, connection_id: str) -> None: print("Waiting for active connection", connection_id) while True: loop = asyncio.get_event_loop() response = loop.run_until_complete( self.agent_controller.connections.get_connection(connection_id) ) is_ready = "active" == response["state"] if is_ready: print("Connection Active") if self.proof_request: self.is_verified = asyncio.Future() self.challenge_connection(connection_id) loop.run_until_complete(self.is_verified) break else: time.sleep(2) def challenge_connection(self, connection_id: str) -> None: loop = asyncio.get_event_loop() proof_request_web_request = { "connection_id": connection_id, "proof_request": self.proof_request, "trace": False, } response = loop.run_until_complete( self.agent_controller.proofs.send_request(proof_request_web_request) ) print("Challenge") print(response) pres_ex_id = response["presentation_exchange_id"] print(pres_ex_id) def _register_agent_listeners(self) -> None: print("REGISTER LISTENERS") listeners = [ {"handler": self.messages_handler, "topic": "basicmessages"}, {"topic": "issue_credential", "handler": self.cred_handler}, {"handler": self.connection_handler, "topic": "connections"}, {"topic": "present_proof", "handler": self.proof_handler}, ] self.agent_controller.register_listeners(listeners, defaults=True) def cred_handler(self, payload: TypeDict) -> None: connection_id = payload["connection_id"] exchange_id = payload["credential_exchange_id"] state = payload["state"] role = payload["role"] print("\n---------------------------------------------------\n") print("Handle Issue Credential Webhook") print(f"Connection ID : {connection_id}") print(f"Credential exchange ID : {exchange_id}") print("Agent Protocol Role : ", role) print("Protocol State : ", state) print("\n---------------------------------------------------\n") print("Handle Credential Webhook Payload") if state == "offer_received": print("Credential Offer Recieved") proposal = payload["credential_proposal_dict"] print( "The proposal dictionary is likely how you would understand and " + "display a credential offer in your application" ) print("\n", proposal) print("\n This includes the set of attributes you are being offered") attributes = proposal["credential_proposal"]["attributes"] print(attributes) # YOUR LOGIC HERE elif state == "request_sent": print( "\nA credential request object contains the commitment to the agents " + "master secret using the nonce from the offer" ) # YOUR LOGIC HERE elif state == "credential_received": print("Received Credential") # YOUR LOGIC HERE elif state == "credential_acked": # YOUR LOGIC HERE credential = payload["credential"] print("Credential Stored\n") print(credential) print( "\nThe referent acts as the identifier for retrieving the raw credential from the wallet" ) # Note: You would probably save this in your application database credential_referent = credential["referent"] print("Referent", credential_referent) def connection_handler(self, payload: TypeDict) -> None: state = payload["state"] connection_id = payload["connection_id"] their_role = payload["their_role"] routing_state = payload["routing_state"] print("----------------------------------------------------------") print("Connection Webhook Event Received") print("Connection ID : ", connection_id) print("State : ", state) print("Routing State : ", routing_state) print("Their Role : ", their_role) print("----------------------------------------------------------") def proof_handler(self, payload: TypeDict) -> None: role = payload["role"] connection_id = payload["connection_id"] pres_ex_id = payload["presentation_exchange_id"] state = payload["state"] loop = asyncio.get_event_loop() print( "\n---------------------------------------------------------------------\n" ) print("Handle present-proof") print("Connection ID : ", connection_id) print("Presentation Exchange ID : ", pres_ex_id) print("Protocol State : ", state) print("Agent Role : ", role) print("Initiator : ", payload["initiator"]) print( "\n---------------------------------------------------------------------\n" ) if state == "presentation_received": verified_response = loop.run_until_complete( self.agent_controller.proofs.verify_presentation(pres_ex_id) ) if self.is_verified is not None: self.is_verified.set_result(verified_response["verified"] == "true") print("Attributes Presented") for (name, val) in verified_response["presentation"]["requested_proof"][ "revealed_attrs" ].items(): # This is the actual data that you want. It's a little hidden print("Attribute : ", val) print("Raw Value : ", val["raw"]) else: print("is_verified Future has not been created") # Receive basic messages def messages_handler(self, payload: TypeDict) -> None: print("Handle Duet ID", payload["content"]) if self.responder_id is not None: self.responder_id.set_result(payload["content"]) else: print("responder_id Future has not been created") # Used for other Aries connections. E.g. with an issuer def receive_invitation(self, invitation: str) -> str: # Receive Invitation loop = asyncio.get_event_loop() response = loop.run_until_complete( self.agent_controller.connections.receive_invitation(invitation) ) # Print out accepted Invite and Alice's connection ID print("Connection", response) return response["connection_id"] def create_invitation(self) -> str: # Create Invitation loop = asyncio.get_event_loop() invite = loop.run_until_complete( self.agent_controller.connections.create_invitation() ) # connection_id = invite["connection_id"] invite_message = json.dumps(invite["invitation"]) return invite_message def configure_challenge(self, proof_request: TypeDict) -> None: self.proof_request = proof_request
The 2019 Application and Matching Process includes surgical fellowships in Advanced Gastrointestinal (GI), Bariatric, Flexible Endoscopy, Hepato-Pancreato-Biliary, and Advanced GI Minimally Invasive Surgery. The application process for surgical fellowships in Advanced Gastrointestinal (GI), Bariatric, Flexible Endoscopy, Hepato-Pancreato-Biliary, and Advanced GI Minimally Invasive Surgery is now closed. No further changes may be made to an applicant's confirmed program list. The 2018 Application and Matching Process includes surgical fellowships in Non-ACGME Advanced Colorectal and Thoracic surgery. The Fellowship Council application and matching process has now been closed and the results have been archived.
# -*- coding: utf-8 -*- # # Copyright © 2012 - 2015 Michal Čihař <[email protected]> # # This file is part of python-suseapi # <https://github.com/openSUSE/python-suseapi> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ''' Testing of user information connector ''' from unittest import TestCase from mockldap import MockLdap from suseapi.userinfo import UserInfo def start_ldap_mock(): """ Starts LDAP mocking. """ mockldap = MockLdap({ 'o=Novell': {'o': 'Novell'}, 'cn=mcihar,o=Novell': { 'mail': ['[email protected]'], 'ou': ['TestDept'], 'cn': ['mcihar'], 'uid': ['mcihar'], }, 'cn=foobar,o=Novell': { 'mail': ['[email protected]'], 'ou': ['L3 Maintenance'], 'cn': ['foobar'], 'uid': ['foobar'], }, }) mockldap.start() return mockldap class UserInfoTest(TestCase): ''' User information tests. ''' def test_department(self): ''' Test department lookups. ''' mockldap = start_ldap_mock() try: userinfo = UserInfo('ldap://ldap', 'o=novell') # By mail with fixup self.assertEqual( 'L3/Maintenance', userinfo.get_department('[email protected]') ) # By UID self.assertEqual( 'TestDept', userinfo.get_department('mcihar') ) # By UID from cache self.assertEqual( 'TestDept', userinfo.get_department('mcihar') ) # By email self.assertEqual( 'TestDept', userinfo.get_department('[email protected]') ) # Hardcoded entries self.assertEqual( 'Security team', userinfo.get_department('[email protected]') ) # Non existing entry self.assertEqual( 'N/A', userinfo.get_department('nobody') ) finally: mockldap.stop()
Published on 9 September 2006 in Action Alert, Islam and Society. 3 Comments Tags: bulgaria, hijab-ban, muslim_women, religious-discrimination. IHRC is reporting that Bulgaria has become the latest country to ban the rights of Muslim girls to wear the headscarf (“hijab”) to school. Worse, the country’s Education Minister has declared that Muslim girls who wish to wear the headscarf should attend separate Muslim schools, segregating them from their fellow citizens. Hot on the heels of bans in France, German and Belgium this looks like a worrying trend. Many Muslim women believe it is a religious obligation to cover their hair in public. Whether right or wrong, this is not the issue. It doesn’t harm or cause any difficulty to anyone else and they should have the right to an education whilst remaining free to practice their religion. Please heed the IHRC’s call to write to your MP, MEP and the Foreign Secretary demanding representations be made to the government of Bulgaria (which is set to accede to the EU next year). In other news… the Guardian reports that Muslim women face discrimination at the workplace, despite good academic performance. I wouldn’t be surprised if the wearing of headscarves had something to do with it.
#!/usr/bin/python -OO # This file is part of Archivematica. # # Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com> # # Archivematica is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Archivematica is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Archivematica. If not, see <http://www.gnu.org/licenses/>. # @package Archivematica # @subpackage archivematicaClientScript # @author Mark Jordan <email> # @version svn: $Id$ import sys import json import urllib # The base URL will be specific to each CONTENTdm server; everything including and # following 'dmwebservices' is the same. try: CollectionListUrl = 'http://%s/dmwebservices/index.php?q=dmGetCollectionList/json' % (sys.argv[1]) f = urllib.urlopen(CollectionListUrl) collectionListString = f.read() collectionList = json.loads(collectionListString) except: print "Cannot retrieve CONTENTdm collection list from " + sys.argv[1] sys.exit(1) # We only want two of the elements of each 'collection', alias and name. cleanCollectionList = {} for collection in collectionList: for k, v in collection.iteritems(): cleanCollectionList[collection['name']] = collection['alias'] print(cleanCollectionList)
Get the Winning Bid! Let us take the stress and frustration out of bidding at auction. Our team of experts are only a click away from securing your dream property for the best possible price. This one’s simple: tell us where and we’ll be there! This service will give you access to our expert bidding skills and negotiating powers on auction day. With over 30 years combined experience, we’re your best shot at getting the winning bid. The process of buying was stress free! It was my trusted friend of 30 plus years who suggested that I get in touch with David Melatti of Astute Buyers Advocates. She advised that the process of buying was so stress free and that the service was impeccable. David and his team secured my dream for $9000 less than I expected! "David was recommended by a good friend who had used his Auction Bidding Services. She was very impressed by the way he confidently bidded, and secured the property. Hot Auction Bidders made the process so easy for us, provided us with a huge amount of otherwise unattainable information, and gave us fantastic advice so we felt completely confident going in to the auction. And best of all – you saved us almost $20,000!! David took the worry out of finding the right house, negotiating and teaching me the art form present at Auctions and real estate as a whole. I can’t thank him and his brother John enough for saving me $40,000 below market value. Simple Saving Tips We Can ALL Benefit From! for all our tips and tricks on how to win the auction bidding game! ‘How to avoid swimming with sharks’ for all our tips and tricks on how to win the auction bidding game!
#!/usr/bin/python # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Testcases to test behavior of Keyczart. @author: [email protected] (Arkajit Dey) """ import unittest from keyczar import readers from keyczar import writers from keyczar import keyczart from keyczar import keyczar from keyczar import keyinfo class KeyczartTest(unittest.TestCase): def setUp(self): self.mock = readers.MockReader('TEST', keyinfo.ENCRYPT, keyinfo.AES) self.mock.AddKey(42, keyinfo.PRIMARY) self.mock.AddKey(77, keyinfo.ACTIVE) self.mock.AddKey(99, keyinfo.INACTIVE) keyczart.mock = self.mock # enable testing def testCreate(self): keyczart.main(['create', '--name=testCreate', '--purpose=crypt', '--asymmetric=rsa']) self.assertEquals('testCreate', self.mock.kmd.name) self.assertEquals(keyinfo.DECRYPT_AND_ENCRYPT, self.mock.kmd.purpose) self.assertEquals(keyinfo.RSA_PRIV, self.mock.kmd.type) def testAddKey(self): self.assertEquals(3, self.mock.numkeys) keyczart.main(['addkey', '--status=primary']) self.assertEquals(4, self.mock.numkeys) # The next version number will be 100, since the previous max was 99 self.assertEquals(keyinfo.PRIMARY, self.mock.GetStatus(100)) self.assertEquals(keyinfo.ACTIVE, self.mock.GetStatus(42)) def testAddKeySizeFlag(self): keyczart.main(['addkey', '--size=256']) self.assertEquals(256, self.mock.GetKeySize(100)) def testAddKeyCrypterCreatesCrypter(self): self.dummy_location = None def dummyCreateCrypter(location): self.dummy_location = location return self.mock keyczart._CreateCrypter = dummyCreateCrypter keyczart.main(['addkey', '--crypter=foo']) self.assertEquals(self.dummy_location, 'foo') def testPubKey(self): pubmock = readers.MockReader('PUBTEST', keyinfo.DECRYPT_AND_ENCRYPT, keyinfo.RSA_PRIV) pubmock.AddKey(33, keyinfo.PRIMARY, 1024) # small key size for fast tests keyczart.mock = pubmock # use pubmock instead self.assertEquals(None, pubmock.pubkmd) keyczart.main(['pubkey']) self.assertNotEqual(None, pubmock.pubkmd) self.assertEquals('PUBTEST', pubmock.pubkmd.name) self.assertEquals(keyinfo.ENCRYPT, pubmock.pubkmd.purpose) self.assertEquals(keyinfo.RSA_PUB, pubmock.pubkmd.type) self.assertTrue(pubmock.HasPubKey(33)) def testPromote(self): keyczart.main(['promote', '--version=77']) self.assertEquals(keyinfo.PRIMARY, self.mock.GetStatus(77)) self.assertEquals(keyinfo.ACTIVE, self.mock.GetStatus(42)) def testDemote(self): keyczart.main(['demote', '--version=77']) self.assertEquals(keyinfo.INACTIVE, self.mock.GetStatus(77)) def testRevoke(self): self.assertTrue(self.mock.ExistsVersion(99)) keyczart.main(['revoke', '--version=99']) self.assertFalse(self.mock.ExistsVersion(99)) def testWriteIsBackwardCompatible(self): class MockWriter(writers.Writer): num_created = 0 def WriteMetadata(self, metadata, overwrite=True): return def WriteKey(self, key, version_number, encrypter=None): return def Remove(self, version_number): return def Close(self): return @classmethod def CreateWriter(cls, location): MockWriter.num_created += 1 return MockWriter() generic_keyczar = keyczar.GenericKeyczar(self.mock) generic_keyczar.Write('foo') self.assertEquals(1, MockWriter.num_created, 'Write("string") should have created a new writer') def tearDown(self): keyczart.mock = None def suite(): suite = unittest.TestSuite() suite.addTests(unittest.TestLoader().loadTestsFromTestCase(KeyczartTest)) return suite if __name__ == "__main__": unittest.main(defaultTest='suite')
Coming soon .... A video tour through the gallery space here in Muskoka and what you will experience when you visit . What i can tell you already is that this will be a unique relaxing one on one experience, tailored to you and your specific needs. Even if you are not in the market for artwork, Peter loves to share his creative design knowledge and experience.
#!/usr/bin/python2.4 # # Copyright 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Update emoji4unicode.xml from a NamesList.txt file. During the development of Unicode 6.0/ISO 10646 AMD8, Emoji symbols may change code points, names, and annotations. This script reads emoji4unicode.xml and a NamesList.txt file, updates the XML data according to the NamesList, and writes a modified XML file to ../generated/emoji4unicode.xml. """ __author__ = "Markus Scherer" import os.path import xml.dom.minidom import emoji4unicode import nameslist def main(): here = os.path.dirname(__file__) e4u_filename = os.path.join(here, "..", "data", "emoji4unicode.xml") doc = xml.dom.minidom.parse(e4u_filename) root = doc.documentElement id_to_symbol = {} for symbol in root.getElementsByTagName("e"): id_to_symbol[symbol.getAttribute("id")] = symbol nameslist_filename = os.path.join(here, "..", "data", "unicode", "uc60-a-FDAM8-SanJose.lst") for record in nameslist.Read(nameslist_filename): if "uni" not in record: continue id = nameslist.GetEmojiID(record) if not id: continue # Extract the old data from the emoji4unicode.xml <e> symbol element. symbol = id_to_symbol[id] old_uni = symbol.getAttribute("unicode") old_name = symbol.getAttribute("name") old_annotations = [] for element in symbol.getElementsByTagName("ann"): old_annotations.append(element.firstChild.nodeValue.strip()) # Extract the new data from the NamesList record. new_uni = record["uni"] new_name = record["name"] new_annotations = record["data"] # Update the proposed Unicode code point. if old_uni and not old_uni.startswith("+"): print ("*** e-%s: setting proposed code point %s but " + "old %s was not proposed" % (id, new_uni, old_uni)) symbol.setAttribute("unicode", u"+" + new_uni) # Update the proposed character name. # Keep the previous name in an oldname attribute. if old_name == new_name: if symbol.getAttribute("oldname"): symbol.removeAttribute("oldname") else: symbol.setAttribute("oldname", old_name) symbol.setAttribute("name", new_name) # Append new annotations. for ann in new_annotations: # Skip the Emoji symbol ID alias, and annotations that are not new. if not ann.startswith(u"= e-") and ann not in old_annotations: ann_element = doc.createElement("ann") ann_element.appendChild(doc.createTextNode(ann)) symbol.appendChild(ann_element) out_filename = os.path.join(here, "..", "generated", "emoji4unicode.xml") emoji4unicode.Write(doc, out_filename) doc.unlink() if __name__ == "__main__": main()
2016 Election Update – Who Is Bernie Sanders? Since the presidential election cycle has started, we have spent a great deal of time spotlighting the Republican candidates, particularly Donald Trump who is still, as of this writing, the Number One pick on the Republican side with the highest rating; according to the latest poll by Public Policy Polling in NH on August 25, Mr. Trump sits comfortably at 35% followed by John Kasich with a mere 11%. In the Democrat side, there is as much unexpected “turmoil” as in the Republican Party. Bernie Sanders, – whom I’ve labeled (loosely) the Donald Trump of the Democrat Party – has been making a lot of waves, causing a lot of headache to Hillary Clinton’s campaign; to no fault of Bernie however, Hillary has been struggling for better traction since FBI announced that a criminal probe for her use of private server while secretary of state is under way. In the meanwhile, Bernie has been cruising freely, collecting more and more support among the disenfranchised, the disenchanted and all those who dislike what Washington stands for. Who is Bernie Sanders? According to his website (https://berniesanders.com/), Bernie Sanders is a Democratic candidate for President of the United States. He is serving his second term in the U.S. Senate after winning re-election in 2012. Sanders previously served as mayor of Vermont’s largest city for eight years before defeating an incumbent Republican to be the sole congressperson for the state in the US House of Representatives. Who is he exactly? On his website, the slogan “Ready to Start a Political Revolution?” is displayed prominently near the top. The slogan says it all, Bernie Sanders wants to revolutionize politics in America. To judge by the larger and larger crowds that have attended his rallies, it seems that Bernie is on track (at least for now) to succeed. Of course, Hillary Email-gate problem is helping as more and more of her supporters and mostly “those on the fence” are jumping on Bernie’s wagon in droves. In Washington, Bernie is known as one of the few Independent Senators (there are only two, the other is Angus King, Maine US Senator) who side with Democrats on some issues, with Republicans on others. For the past few years, Sanders has been making the round on TV networks giving interviews on various issues which are hot topics in Washington. The country being predominantly a two-party affair, the media has been blatantly, overtly biased towards third-party candidates, a very big disadvantage for anyone running for public office as Independent candidate; however, Bernie has proved thus far he is a winner despite the ever present looming disadvantage; he not only managed to win the Senate seat in the State of Vermont in 2006 but he also got re-elected in 2012 with a very large margin of the popular vote, over 70%. But Can He Win the Nomination For President? Although an Independent Senator in Washington, DC representing the state of Vermont, Bernie is running for the office of the presidency as a Democrat. It is already a very smart move on the part of Mr. Sanders. He is very aware of the political landscape on the national stage; third party candidates do not fair well; the media is complicit in squashing any chance they may have. Even the most extreme candidates on the Republican side, such as Ted Cruz, Rand Paul, Scott Walker, know better not to run as third party candidates. So Bernie is right to run as a Democrat, the party he most closely identifies with. Mr. Sanders described himself as democratic socialist and he favors policies similar to the social democratic parties’ in Europe, most specifically those instituted by the Nordic countries. Although most US media’s references to (European) socialism are unflattering, the crowds that have been attending Bernie’s rallies see no objection to Bernie’s philosophy regarding the issues (social, economic, political) that have plagued the country for so long. In the Senate, Bernie stands out as a leading progressive voice, fighting against income inequality, and fighting for universal healthcare, parental leave, LGBT rights, racial discrimination in the criminal justice system and campaign finance reform. Like most conservatives, Mr. Sanders is not fond of US foreign policy; he was one of the few who voted against the Iraq War. Bernie’s distaste for US foreign policy is limited to its widespread involvement across the globe but he doesn’t relinquish the rights to use force when necessary to defend the nation. Even the most superficial analysis of Bernie’s agenda for the country would leave one to decide today that Mr. Sanders is the right person for the job of the presidency. There are however many factors that could hamper its success as the next president, one of which – although an asset in many ways – is his age. Frequent health issues could prevent or in the best case scenario slow down his march towards the White House. And in the case of a win to the Oval Office, Bernie may not enjoy good health to implement and work his agenda. This is by no means a verdict on Bernie’s campaign. Far from it! US constituency pays very little attention (if at all) to any of those factors when choosing and electing an individual for office; past selections (of candidates) are plenty proof that US constituency elects candidates which are better at mudslinging his opponents. That could be Bernie’s Achilles heel; he refuses to engage in fight with other candidates. Mr. Sanders judges it more important to outline his vision for the country than to squabble with other candidates; it is working, for now anyway. If one were to judge by the facts that two unorthodox candidates (Donald Trump on the Republican side and Bernie Sanders on the Democrat side) have attracted, mobilized so many at such an early stage, one would conclude that the seasoned politicians are at great disadvantage. For the time being, they are. On the Republican side, fear has set in; every strategy to taper Trump’s surge in the polls has failed. On the Democrat side, Hillary seems unfazed by Bernie’s surge –at her own political peril – but is fearful of a Biden bid, according to sources close to Mrs. Clinton. Can Bernie win the Nomination for President? Analysis of the upcoming debate slated for October 13 in Nevada would provide plenty of information about the candidates. We may even reveal who is going to be the nominee for the Democrat Party and which Republican counterpart he may face in the general election. This is the only blog which will reveal such information early in the presidential election cycle.
from __future__ import absolute_import from sentry.api.serializers import Serializer, register from sentry.models import OrganizationMemberType, Project, Team @register(Project) class ProjectSerializer(Serializer): def get_attrs(self, item_list, user): organization = item_list[0].team.organization team_map = dict( (t.id, t) for t in Team.objects.get_for_user( organization=organization, user=user, ) ) result = {} for project in item_list: try: team = team_map[project.team_id] except KeyError: access_type = None else: access_type = team.access_type result[project] = { 'access_type': access_type, } return result def serialize(self, obj, attrs, user): from sentry import features feature_list = [] if features.has('projects:quotas', obj, actor=user): feature_list.append('quotas') if features.has('projects:user-reports', obj, actor=user): feature_list.append('user-reports') return { 'id': str(obj.id), 'slug': obj.slug, 'name': obj.name, 'isPublic': obj.public, 'dateCreated': obj.date_added, 'features': feature_list, 'permission': { 'owner': attrs['access_type'] <= OrganizationMemberType.OWNER, 'admin': attrs['access_type'] <= OrganizationMemberType.ADMIN, }, }
Add tags for "55 Instructive Etuder, Op. 18". Be the first. library:placeOfPublication <http://experiment.worldcat.org/entity/work/data/353235834#Place/kbh> ; # Kbh. schema:location <http://experiment.worldcat.org/entity/work/data/353235834#Place/kbh> ; # Kbh.
# coding: utf8 from __future__ import unicode_literals def get_pos_from_wiktionary(): import re from gensim.corpora.wikicorpus import extract_pages regex = re.compile(r"==={{(\w+)\|el}}===") regex2 = re.compile(r"==={{(\w+ \w+)\|el}}===") # get words based on the Wiktionary dump # check only for specific parts # ==={{κύριο όνομα|el}}=== expected_parts = [ "μετοχή", "ρήμα", "επίθετο", "επίρρημα", "ουσιαστικό", "κύριο όνομα", "άρθρο", ] wiktionary_file_path = ( "/data/gsoc2018-spacy/spacy/lang/el/res/elwiktionary-latest-pages-articles.xml" ) proper_names_dict = { "ουσιαστικό": "nouns", "επίθετο": "adjectives", "άρθρο": "dets", "επίρρημα": "adverbs", "κύριο όνομα": "proper_names", "μετοχή": "participles", "ρήμα": "verbs", } expected_parts_dict = {} for expected_part in expected_parts: expected_parts_dict[expected_part] = [] for title, text, pageid in extract_pages(wiktionary_file_path): if text.startswith("#REDIRECT"): continue title = title.lower() all_regex = regex.findall(text) all_regex.extend(regex2.findall(text)) for a in all_regex: if a in expected_parts: expected_parts_dict[a].append(title) for i in expected_parts_dict: with open("_{0}.py".format(proper_names_dict[i]), "w") as f: f.write("from __future__ import unicode_literals\n") f.write('{} = set("""\n'.format(proper_names_dict[i].upper())) words = sorted(expected_parts_dict[i]) line = "" to_write = [] for word in words: if len(line + " " + word) > 79: to_write.append(line) line = "" else: line = line + " " + word f.write("\n".join(to_write)) f.write('\n""".split())')
Malware authors have two options to get a user to install a malicious app: invest the time that it takes to build some new and useful functionality or impersonate an app with a brand and leverage its implied usefulness and the trust of the brand’s following. Any popular brand with a community is a good target, such as WhatsApp, MyEtherWallet, FlappyBirds – if you remember that one – and, oddly, flashlight apps. In fact, a fraudulent app that uses someone else’s valuable brand, is one of the lowest effort methods to attack or take advantage of users via popular stores such as Google Play. Activity with the MyEtherWallet app is a recent example of this tactic. On March 4th, @myetherwallet tweeted @GooglePlay that there appeared to be another fraudulent app claiming to be MyEtherWallet. Shortly after, the app at https://play.google.com/store/apps/details?id=com.ether.etherwallet was taken down by Google. We were able to identify these fake apps with no heuristics. We just had to ask what was similar. Many of our customers worry about just this situation – fake versions of their apps being created and taking advantage of their brand’s popularity and credibility to dupe users into downloading them. One way we’re ferreting out the fake apps is by combining human and machine learning. Appthority’s analysis engine clusters similar apps together for the purpose of finding fraudulent apps, or any apps that entice users with a known app brand but that are used for malicious purposes. We’re answering the questions, which apps are legitimate, and which are fraudulent? To do that, we’re using a combination of machine learning, extensive preprocessing of data, and a scalable platform to measure the risk of fake apps. Here, we got creative and began looking for apps which are similar, versus relying on the more traditional security analysis that use signatures and heuristics to look for specific information and compare app names and developers. Looking for similarity enables the automatic discovery of variations in fraudulent apps and the metadata describing the apps. In our cryptowallet analysis, we found several more fake MyEtherWallet apps, two of which were still live in Google Play with package names com.crypwallet and com.mewmyeth. In this case, both apps had the same name, MyEtherWallet. In many fraudulent app cases this is not the case and different titles do not negatively affect our ability to discover fraudulent those apps. This does however show that users can not rely on an app with a popular brand name being from the brand company. As our Mobile Threat Team locates these fraudulent apps, we inform our customers and provide protection not just for those organizations but for everyone in the ecosystem by working with stores to take down the apps. Finding these apps requires knowledge of every app in the store, not just the apps in a customer’s user base, so that we can identify and measure risk before an app is even installed. App store security is reasonably good at catching known malware and quickly taking down apps as new malware appears. But, for enterprises, it’s important to reduce the threats from mobile beyond what app stores are looking for. In this case, using creative methods to find and remove apps that play off of and tarnish a brand’s reputation is just good business.
#!/usr/bin/env python import Adafruit_Nokia_LCD as LCD import Adafruit_GPIO.SPI as SPI from PIL import Image from PIL import ImageFont from PIL import ImageDraw import sys from re import sub import os os.chdir('/home/root/gpio') # Edison software SPI config: SCLK = 35 # 10 DIN = 26 # 11 DC = 25 # 32 RST = 45 # 46 CS = 31 # 23 disp = LCD.PCD8544(DC, RST, SCLK, DIN, CS) with open('/home/root/gpio/contrast.txt', "r") as f: contrast = int(sub('\\n', '', f.read())) disp.begin(contrast = contrast) disp.clear() font = ImageFont.truetype('/home/root/gpio/fonts/Minecraftia-Regular.ttf', 8) #font = ImageFont.load_default() # new line positions h = [-2,6,14,22,30,38] # initialise image = Image.new('1', (LCD.LCDWIDTH, LCD.LCDHEIGHT)) draw = ImageDraw.Draw(image) draw.rectangle((0,0,LCD.LCDWIDTH,LCD.LCDHEIGHT), outline=255, fill=255) # report lines n = min(5, len(sys.argv)-1) for i in range(n): draw.text((0,h[i]+0), sys.argv[i+1], font=font) # Battery bar ind = 9 Y = 40 with open('battery_charge.txt', "r") as f: charge_val = int(f.read()) charge = int(50 * (float(charge_val) / 100)) draw.polygon([(0,1+Y), (2,1+Y), (2,0+Y), (4,0+Y), (4,1+Y), (6,1+Y), (6,7+Y), (0,7+Y)], outline=0, fill=255) draw.text((61,Y-1), str(charge_val) + '%', font=font) draw.rectangle((ind,Y+1,ind+50,7+Y), outline=0, fill=255) draw.rectangle((ind,Y+1,ind+charge,7+Y), outline=0, fill=0) disp.image(image) disp.display()
Hope your enjoying summer, today post is actually a recipe of a biscoff cake which I made. I personally love it but some don't so if you like those biscuit you get when you go to a cafe, keep reading ! 1- heat oven to 180C and line two cake tins with baking paper to avoid sticking and leave to side. 2- Then in a bowl mix together with an electric whisk the butter and sugar until smooth. 3- then add the flour, baking powder and egg and whisk or beat whatever working I used a bit of both. If the mixture a bit dry add some milk, I had to but see how it goes ! The mixture should be smooth and creamy. 4. Then evenly distribute the mixture between both tins and put in the oven for 25-30 minutes or until golden brown on top and when you stick a skewer in and it comes out clean then it's cooked. REMEMBER BEFORE ICING YOU GOT TO LET THE CAKE COOL OR IT WILL BE A DIASTER ! I waited 10 mins with them in tin and then 45-1 hour to cool. In a bowl start of by mixing just butter to cream up and then on a slow power keep mixing while you add the icing sugar. When all mixed up it should look smooth and like butter cream. Then add the biscoff spread and mix again to combine. Now have a taste test to see if you need to add more or add more icing sugar etc until your happy with you butter cream. Decorate how you desire, I didn't have any piping tubes but if you do there great I just used a palette knife to spread some in the middle, on top and made a wall around the cake out of the biscuits. Now I hope you enjoyed this a if you did remember to let me know and I'll do more recipes as I love baking ! Thanks for reading. Bye for now.
#!/usr/bin/env python '''planar-average of a cube file [email protected]''' import numpy as np import sys class Readcube: def __init__(self, filename, zdef=0): try: f = open(filename) except IOError: sys.exit('File not found.') [f.readline() for i in xrange(2)] # na: number of atoms na = int(f.readline().split()[0]) # ng: 3D grid points, ns: spacing vector ng = np.array([[0,0,0]]) ns = np.zeros((3,3)) for i in xrange(3): s = f.readline().split() ng[:,i] = int(s[0]) ns[i] = float(s[1]), float(s[2]), float(s[3]) # read the positions pos = np.zeros((na,3)) for i in xrange(na): s = f.readline().split() pos[i,:] = s[2:] # real space lattice vector rvec = ns*ng.T dat = str2array(f.readlines()) f.close() # comply to the cube format dat = dat.reshape(ng[0,:]) self.na = na self.ng = ng self.ns = ns self.dat = dat self.rvec = rvec self.pos = pos self.zdef = zdef def str2array(str): return np.fromstring(''.join(str), sep=' ') class Readpot: def __init__(self, filename): try: f = open(filename) except IOError: sys.exit('File not found.') f.readline() head = f.readline().split() ng = [0,0,0] ng[0], ng[1], ng[2] = int(head[0]), int(head[1]), int(head[2]) na = int(head[6]) ntype = int(head[7]) # number of atom types head = f.readline().split() scale = float(head[1]) # rvec: real-space lattice vector rvec = np.zeros([3,3]) for i in xrange(3): s = f.readline().split() rvec[i,:] = float(s[0]), float(s[1]), float(s[2]) rvec *= scale [f.readline() for i in xrange(ntype+1)] # direct coordinates pos = np.zeros((na,3)) for i in xrange(na): s = f.readline().split() pos[i,:] = s[1:4] dat = f.readlines() f.close() self.dat = dat self.na = na self.ng = ng self.rvec = rvec self.pos = pos class Shift1d: def __init__(self, z, y, zdef): N = z.size Z = z.max()-z.min() # fractional zdef fzdef = zdef/Z # fftshift # g: reciprocal vector g = np.fft.fftfreq(N)*N yg = np.fft.fft(y) ygs = yg*np.exp(-2j*np.pi*g*(0.5-fzdef)) ys = np.fft.ifft(ygs) # centering the defect self.zs = np.mgrid[-0.5:0.5:1./N]*Z self.fzdef = fzdef self.g = g self.ys = ys if __name__ == "__main__": cube = Readcube(sys.argv[1], float(sys.argv[2])) ngrid = cube.ng[0] # print number of atoms, and fft grid print(cube.na, ngrid) dir = 2 # 0->x, 1->y, 2->z avg_1d = np.zeros(ngrid[dir]) for i in xrange(ngrid[dir]): avg_1d[i] = np.average(cube.dat[:,:,i]) zlen = np.linalg.norm(cube.rvec[dir,:]) z = np.linspace(0, zlen, ngrid[dir],endpoint=False) if float(sys.argv[2]) == 0: dump = zip(z, avg_1d) else: shift = Shift1d(z, avg_1d, cube.zdef) dump = zip(z, shift.ys.real) np.savetxt(sys.argv[1].rsplit(".")[0]+"_1d.dat",dump) avg = np.average(avg_1d) print(avg)
The picture below entitled with Orchid Pictures of Flower Arrangements. This photo was upload at June 29, 2013 upload by Verney Levesque in Decorations category. You can save Orchid Pictures of Flower Arrangements photos in your computer by clicking resolution image in Download by size line below. Don't forget to rate and comment if you interest with the photograph. You can search for pictures above in Google with the following keyword tags : beautiful flower arrangement pictures, beautiful flower arrangements pictures, exotic flower arrangements pictures, flower arrangement pictures, flower arrangements pictures, flowers arrangements pictures, free pictures of flower arrangements, ikebana flower arrangement pictures, picture of flower arrangement, picture of flower arrangements, picture of flowers arrangements, pictures flower arrangements, pictures of beautiful flower arrangements, pictures of christmas flower arrangements, pictures of fall flower arrangements, pictures of flower arrangement, pictures of flower arrangements, pictures of flower arrangements for weddings, pictures of flower arrangements with exotic flowers, pictures of flower arrangements with roses, pictures of flower pot arrangements, pictures of flowers arrangements, pictures of funeral flower arrangements, pictures of funeral flowers arrangements, pictures of silk flower arrangements, pictures of tropical flower arrangements, pictures of wedding flower arrangements, pictures silk flower arrangements, tropical flower arrangements pictures, tropical flowers arrangements pictures, wedding flower arrangement pictures, wedding flower arrangements pictures, white flower arrangement pictures. 17 photos of the "Pictures of Flower Arrangements Interior Decorating" Related Posts of "Orchid Pictures of Flower Arrangements"
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pytest import export_model_sample PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT") MODEL_ID = ( "3422489426196955136" # permanent_swim_run_videos_action_recognition_edge_model ) GCS_URI = ( "gs://ucaip-samples-test-output/tmp/export_model_sample" ) @pytest.fixture(scope="function", autouse=True) def teardown(storage_client): yield bucket = storage_client.get_bucket("ucaip-samples-test-output") blobs = bucket.list_blobs(prefix="tmp/export_model_sample") for blob in blobs: blob.delete() def test_export_model_sample(capsys): export_model_sample.export_model_sample( project=PROJECT_ID, model_id=MODEL_ID, gcs_destination_output_uri_prefix=GCS_URI ) out, _ = capsys.readouterr() assert "output_info" in out
EDIT 15th OCT: WE HAVE INCREASED CAPACITY SLIGHTLY. PLEASE EMAIL [email protected] IF INTERESTED. EDIT 12th OCT: WE HAVE SOLD OUT BUT ARE TRYING TO ARRANGE LOGISTICS AND VENUE TO INCREASE CAPACITY. PLEASE CHECK BACK COB MONDAY 15th OCTOBER. The IdeaGen Hackathon is a 4-day event that brings together students from all faculties in a competitive and collaborative environment. In groups, students will be working together in groups of 4 to develop innovative ideas intended to epitomise the Historic Heart Precinct. Join us and meet some of our many featured guests at IdeaGen! Don't miss out on this exciting opportunity to expand YOUR network connection within the industry!! Get your tickets and come on down to our information session to learn about the event, form your groups and best of all....free pizza and drinks!! 21st Oct - 2pm - 4pm Downtown tour hosted by Historic Heart @ City of Perth Library: Cathedral Square. The two-hour tailored tour will take participants thick into the history of Perth. The tour guides will unfold hidden stories and narratives behind the heritage of Perth's East End. 23rd Oct - 3pm - 6pm Academic Day @ 410.201. Participants will go through a competitive process. Judges will hand out interim prizes and the final four teams will be determined. 25th Oct - 4pm - 6pm Final Pitch @ QT Perth, 2nd Floor Meeting Room. The final four teams will enter the final round of selection; pitching their ideas to state and federal government representatives. Winners will walk away with prizes and the champion of IdeaGen 2018! Make YOUR mark with Historic Heart! P.S Tickets ARE limited so get yours now!
#!/usr/bin/python import os import tempfile import unittest import common from autotest.client import utils from autotest.client.shared.test_utils import mock import utils_misc import cartesian_config import build_helper class TestUtilsMisc(unittest.TestCase): def test_cpu_vendor_intel(self): cpu_info = """processor : 0 vendor_id : GenuineIntel cpu family : 6 model : 58 model name : Intel(R) Core(TM) i7-3770 CPU @ 3.40GHz """ vendor = utils_misc.get_cpu_vendor(cpu_info, False) self.assertEqual(vendor, 'GenuineIntel') def test_cpu_vendor_amd(self): cpu_info = """processor : 3 vendor_id : AuthenticAMD cpu family : 21 model : 16 model name : AMD A10-5800K APU with Radeon(tm) HD Graphics """ vendor = utils_misc.get_cpu_vendor(cpu_info, False) self.assertEqual(vendor, 'AuthenticAMD') def test_vendor_unknown(self): cpu_info = "this is an unknown cpu" vendor = utils_misc.get_cpu_vendor(cpu_info, False) self.assertEqual(vendor, 'unknown') def test_get_archive_tarball_name(self): tarball_name = utils_misc.get_archive_tarball_name('/tmp', 'tmp-archive', 'bz2') self.assertEqual(tarball_name, 'tmp-archive.tar.bz2') def test_get_archive_tarball_name_absolute(self): tarball_name = utils_misc.get_archive_tarball_name('/tmp', '/var/tmp/tmp', 'bz2') self.assertEqual(tarball_name, '/var/tmp/tmp.tar.bz2') def test_get_archive_tarball_name_from_dir(self): tarball_name = utils_misc.get_archive_tarball_name('/tmp', None, 'bz2') self.assertEqual(tarball_name, 'tmp.tar.bz2') def test_git_repo_param_helper(self): config = """git_repo_foo_uri = git://git.foo.org/foo.git git_repo_foo_branch = next git_repo_foo_lbranch = local git_repo_foo_commit = bc732ad8b2ed8be52160b893735417b43a1e91a8 """ config_parser = cartesian_config.Parser() config_parser.parse_string(config) params = config_parser.get_dicts().next() h = build_helper.GitRepoParamHelper(params, 'foo', '/tmp/foo') self.assertEqual(h.name, 'foo') self.assertEqual(h.branch, 'next') self.assertEqual(h.lbranch, 'local') self.assertEqual(h.commit, 'bc732ad8b2ed8be52160b893735417b43a1e91a8') def test_normalize_data_size(self): n1 = utils_misc.normalize_data_size("12M") n2 = utils_misc.normalize_data_size("1024M", "G") n3 = utils_misc.normalize_data_size("1024M", "T") n4 = utils_misc.normalize_data_size("1000M", "G", 1000) n5 = utils_misc.normalize_data_size("1T", "G", 1000) n6 = utils_misc.normalize_data_size("1T", "M") self.assertEqual(n1, "12.0") self.assertEqual(n2, "1.0") self.assertEqual(n3, "0.0009765625") self.assertEqual(n4, "1.0") self.assertEqual(n5, "1000.0") self.assertEqual(n6, "1048576.0") class FakeCmd(object): def __init__(self, cmd): self.fake_cmds = [ {"cmd": "numactl --hardware", "stdout": """ available: 1 nodes (0) node 0 cpus: 0 1 2 3 4 5 6 7 node 0 size: 18431 MB node 0 free: 17186 MB node distances: node 0 0: 10 """}, {"cmd": "ps -eLf | awk '{print $4}'", "stdout": """ 1230 1231 1232 1233 1234 1235 1236 1237 """}, {"cmd": "taskset -p 0x1 1230", "stdout": ""}, {"cmd": "taskset -p 0x2 1231", "stdout": ""}, {"cmd": "taskset -p 0x4 1232", "stdout": ""}, {"cmd": "taskset -p 0x8 1233", "stdout": ""}, {"cmd": "taskset -p 0x10 1234", "stdout": ""}, {"cmd": "taskset -p 0x20 1235", "stdout": ""}, {"cmd": "taskset -p 0x40 1236", "stdout": ""}, {"cmd": "taskset -p 0x80 1237", "stdout": ""}, ] self.stdout = self.get_stdout(cmd) def get_stdout(self, cmd): for fake_cmd in self.fake_cmds: if fake_cmd['cmd'] == cmd: return fake_cmd['stdout'] raise ValueError("Could not locate locate '%s' on fake cmd db" % cmd) def utils_run(cmd): return FakeCmd(cmd) all_nodes_contents = "0\n" online_nodes_contents = "0\n" class TestNumaNode(unittest.TestCase): def setUp(self): self.god = mock.mock_god(ut=self) self.god.stub_with(utils, 'run', utils_run) all_nodes = tempfile.NamedTemporaryFile(delete=False) all_nodes.write(all_nodes_contents) all_nodes.close() online_nodes = tempfile.NamedTemporaryFile(delete=False) online_nodes.write(online_nodes_contents) online_nodes.close() self.all_nodes_path = all_nodes.name self.online_nodes_path = online_nodes.name self.numa_node = utils_misc.NumaNode(-1, self.all_nodes_path, self.online_nodes_path) def test_get_node_cpus(self): self.assertEqual(self.numa_node.get_node_cpus(0), '0 1 2 3 4 5 6 7') def test_pin_cpu(self): self.assertEqual(self.numa_node.pin_cpu("1230"), "0") self.assertEqual(self.numa_node.dict["0"], ["1230"]) self.assertEqual(self.numa_node.pin_cpu("1231"), "1") self.assertEqual(self.numa_node.dict["1"], ["1231"]) self.assertEqual(self.numa_node.pin_cpu("1232"), "2") self.assertEqual(self.numa_node.dict["2"], ["1232"]) self.assertEqual(self.numa_node.pin_cpu("1233"), "3") self.assertEqual(self.numa_node.dict["3"], ["1233"]) self.assertEqual(self.numa_node.pin_cpu("1234"), "4") self.assertEqual(self.numa_node.dict["4"], ["1234"]) self.assertEqual(self.numa_node.pin_cpu("1235"), "5") self.assertEqual(self.numa_node.dict["5"], ["1235"]) self.assertEqual(self.numa_node.pin_cpu("1236"), "6") self.assertEqual(self.numa_node.dict["6"], ["1236"]) self.assertEqual(self.numa_node.pin_cpu("1237"), "7") self.assertEqual(self.numa_node.dict["7"], ["1237"]) self.assertTrue("free" not in self.numa_node.dict.values()) def test_free_cpu(self): self.assertEqual(self.numa_node.pin_cpu("1230"), "0") self.assertEqual(self.numa_node.dict["0"], ["1230"]) self.assertEqual(self.numa_node.pin_cpu("1231"), "1") self.assertEqual(self.numa_node.dict["1"], ["1231"]) self.numa_node.free_cpu("0") self.assertEqual(self.numa_node.dict["0"], []) self.assertEqual(self.numa_node.dict["1"], ["1231"]) def test_bitlist_to_string(self): string = 'foo' bitlist = [0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1] self.assertEqual(utils_misc.string_to_bitlist(string), bitlist) def test_string_to_bitlist(self): bitlist = [0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0] string = 'bar' self.assertEqual(utils_misc.bitlist_to_string(bitlist), string) def tearDown(self): self.god.unstub_all() os.unlink(self.all_nodes_path) os.unlink(self.online_nodes_path) if __name__ == '__main__': unittest.main()
An alcoholic and insomniac photographer is approached by deceased persons' souls in order to try and resolve their concerns.. Torrent Tum Bin 2 Movie Full, Download the latest released Bollywood HD Movies, Games and Software directly from Torrent. Wapking and DJmaza official mp4, 3gp, avi videos.. Download Hum Tum Torrent at TorrentFunk. We have 97 Hum Tum Movie torrents for you! Verified Upload . Hum Tum (2014) Telugu Movie PDVD XviD - HTRG. Aashiqui 2 HD Movie Free Download. Like? . HD Movies > Movies torrents > Bollywood Torrents Genres: . Aksar 2 HD Movie Full 2017 Download Torrent Category: . Hum Tum, Download the latest released Bollywood HD Movies, Games and Software directly from Torrent. Wapking and DJmaza official mp4, 3gp, avi videos. . tubemate tamil songs telugu songs t download rajinikanth . Hum Saath Saath Hain Full Movie (Part 2/16 . tum humko bade nasibo se, kabil full movie .. Movie Name: Hum Tum Pe Marte . Nela Ticket 2018 Telugu . Hum Tum Pe Marte Hain 1999 Hindi DVDRip Full Movie Download Movie download in 3gp mp4 hd . To download the movie . download you should have utorrent . download link for Hum Tum .. Hum Tum(2014)Telugu 1CD DVDScrRIp x264 Team DDHRG: . Download Links. Play Now Torrent Magnet . 500.000+ full movie torrents downloads & 62.000+ movies online.. Hum Tum (2014) - Telugu Movie Watch Online.
######################################################################## # $Header: /var/local/cvsroot/4Suite/Ft/Lib/Random.py,v 1.8 2006/01/13 06:12:55 mbrown Exp $ """ Thread-safe random number generation Random number generation capabilities, speed, and thread safety in stdlib vary from version to version of Python. In addition, attempts to use an OS-specific random number source can result in unexpected exceptions being raised. Also, a bug in Python 2.3.0 can lead to a reduction in entropy, and a bug in Python 2.4.0 and 2.4.1 can result in exceptions related to open filehandles on some multithreaded Posix platforms. This module works around as many of these issues as it can by defining random number generator classes that can be used safely by multiple threads, using the best random number sources available. They support all versions of Python from 2.1 up, and fall back on more reliable generators when exception conditions occur. In addition, convenience functions equivalent to random.random() and os.urandom() are exposed. Copyright 2006 Fourthought, Inc. (USA). Detailed license and copyright information: http://4suite.org/COPYRIGHT Project home, documentation, distributions: http://4suite.org/ """ __all__ = ['urandom', 'FtRandom', 'FtSystemRandom', 'DEFAULT_RNG', 'Random', 'GetRandomBytes'] import random, threading, os, sys from sys import version_info py230 = version_info[0:3] == (2, 3, 0) py23up = version_info[0:2] > (2, 2) py24up = version_info[0:2] > (2, 3) py242up = version_info[0:3] > (2, 4, 1) posix = os.name == 'posix' win32 = sys.platform == 'win32' _lock = threading.Lock() #============================================================================= # Thread-safe implementation of os.urandom() # (still raises NotImplementedError when no OS-specific random number source) # if win32 and py24up: urandom = os.urandom elif posix: if py242up: urandom = os.urandom else: # Python 2.4.2's os.urandom() def urandom(n): """urandom(n) -> str Return a string of n random bytes suitable for cryptographic use. """ try: _urandomfd = os.open("/dev/urandom", os.O_RDONLY) except: raise NotImplementedError("/dev/urandom (or equivalent) not found") bytes = "" while len(bytes) < n: bytes += os.read(_urandomfd, n - len(bytes)) os.close(_urandomfd) return bytes if hasattr(random, '_urandom'): random._urandom = urandom else: def urandom(n): """urandom(n) -> str Return a string of n random bytes suitable for cryptographic use. """ raise NotImplementedError("There is no OS-specific random number source.") #============================================================================= # FtRandom: a non-crypto-safe PRNG (Mersenne Twister or Wichmann-Hill, made # thread-safe). By default, seeded from an OS-specific random number source, # if available. # if posix and not py24up: # posix py2.3 down: use urandom if possible from binascii import hexlify def _best_seed(self, a=None): """Initialize internal state from hashable object. None or no argument seeds from current time or from an operating system specific randomness source if available. If a is not None or an int or long, hash(a) is used instead. """ if a is None: try: a = long(hexlify(urandom(16)), 16) except NotImplementedError: # posix py2.3.0: use system clock, but avoid buggy stdlib if py230: import time a = long(time.time() * 256) super(FtRandom, self).seed(a) elif py230: # win32 py2.3.0: use system clock, but avoid buggy stdlib def _best_seed(self, a=None): import time a = long(time.time() * 256) super(FtRandom, self).seed(a) else: # posix or win32 py2.4 up: urandom if possible, fall back on system clock # win32 py2.3 down: system clock only _best_seed = random.Random.seed # random.Random.gauss() is not thread-safe def _gauss(self, *args, **kwargs): """Gaussian distribution. mu is the mean, and sigma is the standard deviation. Thread-safe. """ _lock.acquire() rv = super(self.__class__, self).gauss(*args, **kwargs) _lock.release() return rv if py23up: # Mersenne Twister, already thread-safe _random = random.Random.random def _getrandbytes(self, k): """getrandbytes(k) -> x. Returns k random bytes as a str.""" bytes = "" while len(bytes) < k: n = super(FtRandom, self).random() bytes += chr(int(n * 256)) return bytes else: # Wichmann-Hill, made thread-safe def _random(self): """Get the next random number in the range [0.0, 1.0).""" _lock.acquire() n = super(FtRandom, self).random() _lock.release() return n def _getrandbytes(self, k): """getrandbytes(k) -> x. Returns k random bytes as a str.""" bytes = "" _lock.acquire() while len(bytes) < k: n = super(FtRandom, self).random() bytes += chr(int(n * 256)) _lock.release() return bytes if py24up: _getrandbits = random.Random.getrandbits else: # This is the py2.4 implementation from binascii import hexlify def _getrandbits(self, k): """getrandbits(k) -> x. Generates a long int with k random bits.""" if k <= 0: raise ValueError('number of bits must be greater than zero') if k != int(k): raise TypeError('number of bits should be an integer') bytes = (k + 7) // 8 # bits / 8 and rounded up x = long(hexlify(self.getrandbytes(bytes)), 16) return x >> (bytes * 8 - k) # trim excess bits class FtRandom(random.Random, object): """ The best available OS-agnostic PRNG, thread-safe. Implements getrandbits() in all versions of Python. Also adds getrandbytes(), which returns a str of bytes. """ seed = _best_seed gauss = _gauss random = _random getrandbits = _getrandbits getrandbytes = _getrandbytes def __init__(self, *args, **kwargs): return super(FtRandom, self).__init__(*args, **kwargs) #============================================================================= # FtSystemRandom: a PRNG that uses an OS-specific random number source, if # available, falling back on an instance of FtRandom. It is as crypto-safe as # the OS-specific random number source, when such a source is available. # Calls to seed() and jumpahead() only affect the fallback FtRandom instance. # if win32 and not py24up: # don't bother trying OS-specific sources on win32 before py2.4 def _random(self): """Get the next random number in the range [0.0, 1.0).""" return self._fallback_prng.random() def _getrandbits(self, k): """getrandbits(k) -> x. Generates a long int with k random bits.""" return self._fallback_prng.getrandbits(k) def _getrandbytes(self, k): """getrandbytes(k) -> x. Returns k random bytes as a str.""" return self._fallback_prng.getrandbytes(k) else: # Functions that read random numbers from OS-specific sources # Use random() and getrandbits() from random.SystemRandom. # We've already replaced random._urandom with our urandom, so it's OK. try: # py2.4 up... from random import SystemRandom as _SystemRandom _sr_random = _SystemRandom.random.im_func _sr_getrandbits = _SystemRandom.getrandbits.im_func except ImportError: # py2.3 down, posix (since we tested for win32 above)... # These are based on the py2.4 implementation. from binascii import hexlify _BPF = 53 # Number of bits in a float _RECIP_BPF = 2**-_BPF def _sr_random(self): """Get the next random number in the range [0.0, 1.0).""" return (long(hexlify(urandom(7)), 16) >> 3) * _RECIP_BPF def _sr_getrandbits(self, k): """getrandbits(k) -> x. Generates a long int with k random bits.""" if k <= 0: raise ValueError('number of bits must be greater than zero') if k != int(k): raise TypeError('number of bits should be an integer') bytes = (k + 7) // 8 # bits / 8 and rounded up x = long(hexlify(urandom(bytes)), 16) return x >> (bytes * 8 - k) # trim excess bits # Wrapper functions that try OS-specific sources first, then fall back def _random(self): """Get the next random number in the range [0.0, 1.0).""" try: return _sr_random(self) except NotImplementedError: return self._fallback_prng.random() def _getrandbits(self, *args, **kwargs): """getrandbits(k) -> x. Generates a long int with k random bits.""" try: return _sr_getrandbits(self, *args, **kwargs) except NotImplementedError: return self._fallback_prng.getrandbits(*args, **kwargs) def _getrandbytes(self, k): """getrandbytes(k) -> x. Returns k random bytes as a str.""" try: return urandom(k) except NotImplementedError: return self._fallback_prng.getrandbytes(k) class FtSystemRandom(FtRandom): """ A PRNG that uses an OS-specific random number source, if available, falling back on an instance of FtRandom. Calls to seed(), jumpahead(), getstate() and setstate() only affect the fallback FtRandom instance. Implements getrandbits() in all versions of Python. Also adds getrandbytes(), which returns a str of bytes. """ random = _random getrandbits = _getrandbits getrandbytes = _getrandbytes def __init__(self, *args, **kwargs): self._fallback_prng = FtRandom() return super(FtSystemRandom, self).__init__(*args, **kwargs) def seed(self, *args, **kwargs): """Seed the fallback PRNG (an instance of FtRandom)""" return self._fallback_prng.seed(*args, **kwargs) def jumpahead(self, *args, **kwargs): """Make the fallback PRNG (an instance of FtRandom) jump ahead""" return self._fallback_prng.jumpahead(*args, **kwargs) def getstate(self): """Return internal state; can be passed to setstate() later.""" return self._fallback_prng.getstate() def setstate(self, state): """Restore internal state from object returned by getstate().""" self._fallback_prng.setstate(state) return #============================================================================= # convenience functions # DEFAULT_RNG = FtSystemRandom() def Random(): """Returns a random float, n, where 0 <= n < 1""" return DEFAULT_RNG.random() def GetRandomBytes(numBytes): """ Returns a string of random bytes from the best RNG available. Equivalent to os.urandom(), but failsafe. """ return DEFAULT_RNG.getrandbytes(numBytes)
I like baking breads in different shapes, using different methods, and with different sorts of pans. Sometimes the recipe ingredients are similar - I mean, flour, water, salt, yeast, and maybe some sugar are the basic ingredients. And those ingredients can be used to make a lot of different breads. This time around, I was playing around with an interesting gadget - the Lekue Silicone Bread Maker that I reviewed here. It's really more of a baking pan that a "maker" that mixes dough, but it's not just a pan, either. When the bread maker is open, it's bowl-shaped, so you can mix the bread ingredients in it. The company also suggest kneading the dough right in the bowl. You can do that, if you like, but I prefer to knead on a counter top. After the dough is kneaded, you let it rise in the closed pan, covered with a towel. Then it's shaped and placed back in the pan for the final rise. And finally, it's baked right in the pan. The crust was similar to the crust I've gotten from baking bread in a clay pot, no doubt because of the steam trapped inside the bread maker. The bread also ended up being a unique shape - sort of a football/submarine hybrid. If you don't happen to have this particular pan, you can bake this bread in a standard bread pan or free-form on a baking sheet. The Lekue bread maker doesn't require anything to keep the bread from sticking, but if you decide to bake in another pan, consider using cornmeal under the loaf. Combine all ingredients and knead by hand (or with your favorite machine) until smooth and elastic. Cover the dough and set aside until doubled in size, about an hour. If you're using the Lekue bread maker, close it, then cover it with a towel to cover the open ends. If you're using a standard bowl, cover it with plastic wrap. When the bread has risen, form it into a log-shaped loaf and put it back into the Lekue bread maker (or whatever pan you'll be using.) Allow it to rise until doubled in size, about 30 minutes. When the dough has doubled, bake in the closed Lekue pan (or your chosen method) until nicely browned, about 40 minutes. This silicone bread maker isn't just for bread. It's microwave-safe, so I used it for cooking vegetable in the microwave. I's a particularly good shape for microwaving on on the cob, if that's something you want to do. you could also use it for cooking or baking other items in the microwave or oven, either open or closed. Or. use it as a bowl. I tried cooking bread in the microwave, just for giggles, and it wasn't particularly successful - you'll never get the desired browning. But we didn't really expect that to work, did we? Disclaimer: I received the bread maker as a sample from the manufacturer.
# Copyright (c) 2011, 2012 Free Software Foundation # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from gstudio.models import * from objectapp.models import * from reversion.models import Version MAP = ( ('objecttype','Objecttype'), ('gbobject', 'Gbobject') ) def get_slug(name): """ returns the uri of the node. """ try: """ note: its very good to use only the meta information given for Version data, individual object information is best retrieved from the main table. # Also Version.object.meta contains a lot of valuable information about the model. """ node = NID.objects.get(title=str(name)) # Retrieving only the relevant tupleset for the versioned objects vrs = Version.objects.filter(type=0 , object_id=node.id) vrs = vrs[0] except: return "The item was not found." return vrs.object.get_absolute_url() def get_nodetype(name): """ returns the model the id belongs to. """ try: """ ALGO: get object id, go to version model, return for the given id. """ node = NID.objects.get(title=str(name)) # Retrieving only the relevant tupleset for the versioned objects vrs = Version.objects.filter(type=0 , object_id=node.id) # Returned value is a list, so splice it . vrs = vrs[0] except: return "The item was not found." return vrs.object._meta.module_name def get_node(name): """ returns a reference to the model object """ nt = get_nodetype(name) node = NID.objects.get(title=str(name)) this_id = node.id if (nt == 'gbobject'): return Gbobject.objects.get(id=this_id) if (nt == 'objecttype'): return Objecttype.objects.get(id=this_id) if (nt == 'metatype'): return Metatype.objects.get(id=this_id)
Everyone in New Zealand was shocked and saddened by the killings in two Christchurch mosques. MTA is doing what it can to help. The past few weeks have been extremely rough for all New Zealand, but particularly for the people of Christchurch. We have all been affected by the murders of 50 men, women and children in the two mosques and the wounding of a further 50. MTA has reached out to members in Christchurch to offer support to them, their staff and families. We are offering them free access to confidential counselling via the EAP (Employer Assistance Programme). Other members (and their staff and families) in wider New Zealand who may have known those who died or were injured can also use this service. Please contact EAP directly on 0800 327 669, dial 2 to make a booking, and say you’re from MTA. If there is anything else we can do, please call our MTA number 0800 00 1144. We have also expressed our sympathy to the leaders of the two mosques affected and provided them with MTA vouchers. We hope this practical donation is useful to the Muslim community over the coming months. The government reform of trade training kicked off with a call for responses to its discussion document. MTA has made its submission and you can read more about that in next month’s Radiator. In brief, we called for improvement to both student and employer support, for industry to have a strong voice in training, and for funding to reflect employers’ contributions to training. We also want the new system to recognise that skills and qualifications aren’t always the same thing, and both need to be catered for. MTA is also asking members for information that will help us to develop our case over the coming year. There will be a lot more discussion to come and your experience and knowledge is needed. Please check out our survey on the Toolbox (sign in to the Member page at www.mta.org.nz). The survey will remain live for another month or so. My thanks to the chair of the Eastland region, Grant Lower, for arranging a meeting in Waipukurau last month. It was fantastic, and not just because there were three desserts on offer! MTA President Dave Harris and I met up with around 30 members who were keen to hear what we were up to and give some feedback. We talked about the work MTA was doing in trades training, including setting up our own group apprenticeship scheme; member wellbeing; and the development of our app for WoF inspections. The app is in the final stages of testing. When it goes live, technicians will be able to use the app to upload and download information between NZTA’s WoF Online. You can also email a copy of the check sheet to the customer. Data from all your vehicle inspections is also saved into a cloud database. News of the app was a big hit with everyone at the meeting. It will certainly save on double handling of data: using a paper check sheet and then entering the information online. The evening also brought together MTA members who were interested in joining the region’s executive committee. Three people put up their hands for the Eastland committee and will be attending their first meeting in a month or so. I encourage anyone who wants to provide their experience and knowledge to the sector to get in touch with their regional coordinator. Lots of meetings and social get-togethers are held in each region and new blood and fresh ideas are always welcome. VTNZ is building up its Certified Repairer network, which is timely as some MTA members tell me they are considering getting out of WoF inspections. The NZTA crackdown, increasing costs of specialist equipment needed to carry out warrants, and the scarcity of WoF inspectors are all creating pressure on some workshops. The Certified Repairer network means that members can turn all their vehicle inspections over to VTNZ and in return VTNZ refers customers needing repairs on to the repairers.
from __future__ import absolute_import import traceback import time import sys import subprocess import platform import tempfile import os import re import glob import functools import datetime import itertools from concurrent.futures import ThreadPoolExecutor from PyQt4 import QtCore, QtGui Qt = QtCore.Qt from maya import cmds from sgfs import SGFS from sgpublish import uiutils as ui_utils from sgpublish import utils from sgpublish.exporter import maya as io_maya from sgpublish.exporter.ui.publish import maya as ui_publish from sgpublish.exporter.ui.publish.generic import PublishSafetyError def basename(src_path=None): basename = os.path.basename(src_path or cmds.file(q=True, sceneName=True) or 'untitled') basename = os.path.splitext(basename)[0] basename = re.sub(r'_*[rv]\d+', '', basename) return basename class SceneExporter(io_maya.Exporter): def __init__(self, **kwargs): kwargs.setdefault('filename_hint', basename()) kwargs.setdefault('publish_type', 'maya_scene') super(SceneExporter, self).__init__(**kwargs) def export_publish(self, publisher, **kwargs): # Save the file into the directory. src_path = cmds.file(q=True, sceneName=True) src_ext = os.path.splitext(src_path)[1] try: dst_path = os.path.join(publisher.directory, os.path.basename(src_path)) maya_type = 'mayaBinary' if src_ext == '.mb' else 'mayaAscii' cmds.file(rename=dst_path) cmds.file(save=True, type=maya_type) finally: cmds.file(rename=src_path) # Set the primary path. publisher.path = dst_path class PublishWidget(ui_publish.Widget): def safety_check(self, **kwargs): if not super(PublishWidget, self).safety_check(**kwargs): return False # Make sure they want to proceed if there are changes to the file. if cmds.file(q=True, modified=True): res = QtGui.QMessageBox.warning(self, "Unsaved Changes", "Would you like to save your changes before publishing this" " file? The publish will have the changes either way.", QtGui.QMessageBox.Save | QtGui.QMessageBox.No | QtGui.QMessageBox.Cancel, QtGui.QMessageBox.Save ) if res & QtGui.QMessageBox.Cancel: return False if res & QtGui.QMessageBox.Save: cmds.file(save=True) return True class Dialog(QtGui.QDialog): def __init__(self, exceptions=None): super(Dialog, self).__init__() self._setup_ui() def _setup_ui(self): self.setWindowTitle('Scene Publisher') self.setLayout(QtGui.QVBoxLayout()) self._exporter = SceneExporter() self._publish_widget = PublishWidget(self._exporter) self._publish_widget.layout().setContentsMargins(0, 0, 0, 0) self.layout().addWidget(self._publish_widget) self._publish_widget.beforeScreenshot.connect(self.hide) self._publish_widget.afterScreenshot.connect(self.show) button = QtGui.QPushButton('Publish') button.clicked.connect(self._on_submit) self.layout().addLayout(ui_utils.vbox(button)) self._publish_widget.beforePlayblast.connect(self._before_playblast) self._publish_widget.afterPlayblast.connect(self._after_playblast) self._msgbox = None def _before_playblast(self): self.hide() def _after_playblast(self): self.show() def _on_submit(self, *args): # DO IT. # This runs the safety check. try: publisher = self._publish_widget.export() except PublishSafetyError: return # It was an export, instead of a publish. if not publisher: return ui_utils.announce_publish_success( publisher, message="Version {publisher.version} of \"{publisher.name}\" has" " been published. Remember to version up!" ) self.close() def __before_reload__(): # We have to manually clean this, since we aren't totally sure it will # always fall out of scope. global dialog if dialog: dialog.close() dialog.destroy() dialog = None dialog = None def run(): global dialog if dialog: dialog.close() # Be cautious if the scene was never saved filename = cmds.file(query=True, sceneName=True) if not filename: res = QtGui.QMessageBox.warning(None, 'Unsaved Scene', 'This scene has not beed saved. Continue anyways?', QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No ) if res & QtGui.QMessageBox.No: return workspace = cmds.workspace(q=True, rootDirectory=True) if filename and not filename.startswith(workspace): res = QtGui.QMessageBox.warning(None, 'Mismatched Workspace', 'This scene is not from the current workspace. Continue anyways?', QtGui.QMessageBox.Yes | QtGui.QMessageBox.No, QtGui.QMessageBox.No ) if res & QtGui.QMessageBox.No: return dialog = Dialog() dialog.show()
Well, this is very cool. For the Director's Guild magazine, DGA Quarterly, “The Sopranos” creator David Chase analyzed every major shot of the series' oft-debated final scene. He discusses the focus on Members Only Guy at the counter, why he wanted to spend so much time on Meadow parallel-parking, and what he expected people to take from it. The whole thing's fascinating, but these last two passages in particular are very evocative, and both do and don't address the question of whether Tony died as the screen cut to black. I said to Gandolfini, the bell rings and you look up. That last shot of Tony ends on 'don't stop,' it's mid-song. I'm not going to go into . I thought the possibility would go through a lot of people's minds or maybe everybody's mind that he was killed. He might have gotten shot three years ago in that situation. But he didn't. Whether this is the end here, or not, it's going to come at some point for the rest of us. Hopefully we're not going to get shot by some rival gang mob or anything like that. I'm not saying that [happened]. But obviously he stood more of a chance of getting shot by a rival gang mob than you or I do because he put himself in that situation. All I know is the end is coming for all of us. This is the most clarity with which he's ever discussed the meaning and intent of that final scene, and what he says suggests a kind of Schrodinger's Tony: either Tony is dead at the end of the scene, or he isn't, but it doesn't matter anyway, because death comes for us all. It is, like almost anything Chase writes for or about that show, fascinating, so go read it.
__author__ = "Andre Merzky" __copyright__ = "Copyright 2012-2013, The SAGA Project" __license__ = "MIT" import saga.adaptors.cpi.decorators as cpi_dec import saga.adaptors.cpi.namespace as cpi_ns import saga.adaptors.cpi.attributes as cpi_att SYNC = cpi_dec.CPI_SYNC_CALL ASYNC = cpi_dec.CPI_ASYNC_CALL # keep order of inheritance! super() below uses MRO class Entry (cpi_ns.entry.Entry, cpi_att.Attributes) : # ---------------------------------------------------------------- # # initialization methods # def __init__ (self, api, adaptor) : self._cpi_nsentry = super (Entry, self) self._cpi_nsentry.__init__ (api, adaptor) @SYNC def init_instance (self, url, flags, session) : pass @ASYNC def init_instance_async (self, url, flags, session) : pass # ---------------------------------------------------------------- # # advert methods # @SYNC def set_ttl (self, ttl, ttype=None) : pass @ASYNC def set_ttl_async (self, ttl, ttype=None) : pass @SYNC def get_ttl (self, ttype) : pass @ASYNC def get_ttl_async (self, ttype) : pass @SYNC def store_object (self, object, ttype) : pass @ASYNC def store_object_async (self, object, ttype) : pass @SYNC def retrieve_object (self, ttype) : pass @ASYNC def retrieve_object_async (self, ttype) : pass @SYNC def delete_object (self, ttype) : pass @ASYNC def delete_object_async (self, ttype) : pass
With the world’s leading phone manufacturers constantly updating their flagship products, many smart phone owners upgrade on an annual basis. While this can serve as a boon to one’s ability to communicate with friends and family members, enjoy apps and surf the web, regular phone upgrades also stand to put a dent in your finances. As such, finding ways to make upgrading affordable is in everyone’s fiduciary best interest. To this end, many people have found success with selling their old phones and putting the profits toward new ones. However, before selling your current phone, there are a number of preparations you’ll need to tend to. Many people practically live their lives through their respective smart phones. For most, smart phones function as PDAs, digital navigators; web browsing devices, personal organizers and calendars. In other words, the typical smart phone holds quite a bit of sensitive personal information – and should that information fall into the wrong hands, the results can be downright disastrous. As is the case when selling a computer or tablet, it’s strongly recommended that you wipe your smart phone’s memory in advance of selling it. If you’re unclear on how to do this, consult the phone’s owner’s manual or the website of its manufacturer. Having your phone unlocked can instantly increase its value. For example, if you unlock Apple iPhone 6, you can expect to receive quite a bit more for it than you otherwise would have. Unlocking a smart phone essentially frees it from an extensive range of manufacturer-imposed restraints, providing owners with a broader selection of carrier choices and apps. Needless to say, the better the condition of your phone, the more money you stand to receive for it. This sometimes prompts sellers to misrepresent the condition of their phones when selling them over the web. However, engaging in such behavior can land you in hot water with assorted websites and result in you being forced to administer a refund. Since honesty is always the best policy, take care to give the phone a thorough inspection, keeping an eye out for both internal and external defects, prior to putting it up for sale. The hottest smart phones generally don’t come cheap. Purchasing the latest phone from a prominent manufacturer like Apple or LG stands to put a sizable dent in your wallet. Fortunately, smart phone owners can effectively offset this cost by selling their current phones for a fair price. When preparing your phone for sale, remember to wipe the device’s memory, consider having it unlocked and carefully inspect it for defects.
# Copyright (c) 1996-2015 PSERC. All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. """Power flow data for 30 bus, 6 generator case. """ from numpy import array def case30(): """Power flow data for 30 bus, 6 generator case. Please see L{caseformat} for details on the case file format. Based on data from ... Alsac, O. & Stott, B., I{"Optimal Load Flow with Steady State Security"}, IEEE Transactions on Power Apparatus and Systems, Vol. PAS 93, No. 3, 1974, pp. 745-751. ... with branch parameters rounded to nearest 0.01, shunt values divided by 100 and shunt on bus 10 moved to bus 5, load at bus 5 zeroed out. Generator locations, costs and limits and bus areas were taken from ... Ferrero, R.W., Shahidehpour, S.M., Ramesh, V.C., I{"Transaction analysis in deregulated power systems using game theory"}, IEEE Transactions on Power Systems, Vol. 12, No. 3, Aug 1997, pp. 1340-1347. Generator Q limits were derived from Alsac & Stott, using their Pmax capacities. V limits and line |S| limits taken from Alsac & Stott. @return: Power flow data for 30 bus, 6 generator case. @see: U{http://www.pserc.cornell.edu/matpower/} """ ppc = {"version": '2'} ##----- Power Flow Data -----## ## system MVA base ppc["baseMVA"] = 100.0 ## bus data # bus_i type Pd Qd Gs Bs area Vm Va baseKV zone Vmax Vmin ppc["bus"] = array([ [1, 3, 0, 0, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95], [2, 2, 21.7, 12.7, 0, 0, 1, 1, 0, 135, 1, 1.1, 0.95], [3, 1, 2.4, 1.2, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95], [4, 1, 7.6, 1.6, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95], [5, 1, 0, 0, 0, 0.19, 1, 1, 0, 135, 1, 1.05, 0.95], [6, 1, 0, 0, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95], [7, 1, 22.8, 10.9, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95], [8, 1, 30, 30, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95], [9, 1, 0, 0, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95], [10, 1, 5.8, 2, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95], [11, 1, 0, 0, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95], [12, 1, 11.2, 7.5, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95], [13, 2, 0, 0, 0, 0, 2, 1, 0, 135, 1, 1.1, 0.95], [14, 1, 6.2, 1.6, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95], [15, 1, 8.2, 2.5, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95], [16, 1, 3.5, 1.8, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95], [17, 1, 9, 5.8, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95], [18, 1, 3.2, 0.9, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95], [19, 1, 9.5, 3.4, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95], [20, 1, 2.2, 0.7, 0, 0, 2, 1, 0, 135, 1, 1.05, 0.95], [21, 1, 17.5, 11.2, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95], [22, 2, 0, 0, 0, 0, 3, 1, 0, 135, 1, 1.1, 0.95], [23, 2, 3.2, 1.6, 0, 0, 2, 1, 0, 135, 1, 1.1, 0.95], [24, 1, 8.7, 6.7, 0, 0.04, 3, 1, 0, 135, 1, 1.05, 0.95], [25, 1, 0, 0, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95], [26, 1, 3.5, 2.3, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95], [27, 2, 0, 0, 0, 0, 3, 1, 0, 135, 1, 1.1, 0.95], [28, 1, 0, 0, 0, 0, 1, 1, 0, 135, 1, 1.05, 0.95], [29, 1, 2.4, 0.9, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95], [30, 1, 10.6, 1.9, 0, 0, 3, 1, 0, 135, 1, 1.05, 0.95] ]) ## generator data # bus, Pg, Qg, Qmax, Qmin, Vg, mBase, status, Pmax, Pmin, Pc1, Pc2, # Qc1min, Qc1max, Qc2min, Qc2max, ramp_agc, ramp_10, ramp_30, ramp_q, apf ppc["gen"] = array([ [1, 23.54, 0, 150, -20, 1, 100, 1, 80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2, 60.97, 0, 60, -20, 1, 100, 1, 80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [22, 21.59, 0, 62.5, -15, 1, 100, 1, 50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [27, 26.91, 0, 48.7, -15, 1, 100, 1, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [23, 19.2, 0, 40, -10, 1, 100, 1, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [13, 37, 0, 44.7, -15, 1, 100, 1, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] ]) ## branch data # fbus, tbus, r, x, b, rateA, rateB, rateC, ratio, angle, status, angmin, angmax ppc["branch"] = array([ [1, 2, 0.02, 0.06, 0.03, 130, 130, 130, 0, 0, 1, -360, 360], [1, 3, 0.05, 0.19, 0.02, 130, 130, 130, 0, 0, 1, -360, 360], [2, 4, 0.06, 0.17, 0.02, 65, 65, 65, 0, 0, 1, -360, 360], [3, 4, 0.01, 0.04, 0, 130, 130, 130, 0, 0, 1, -360, 360], [2, 5, 0.05, 0.2, 0.02, 130, 130, 130, 0, 0, 1, -360, 360], [2, 6, 0.06, 0.18, 0.02, 65, 65, 65, 0, 0, 1, -360, 360], [4, 6, 0.01, 0.04, 0, 90, 90, 90, 0, 0, 1, -360, 360], [5, 7, 0.05, 0.12, 0.01, 70, 70, 70, 0, 0, 1, -360, 360], [6, 7, 0.03, 0.08, 0.01, 130, 130, 130, 0, 0, 1, -360, 360], [6, 8, 0.01, 0.04, 0, 32, 32, 32, 0, 0, 1, -360, 360], [6, 9, 0, 0.21, 0, 65, 65, 65, 0, 0, 1, -360, 360], [6, 10, 0, 0.56, 0, 32, 32, 32, 0, 0, 1, -360, 360], [9, 11, 0, 0.21, 0, 65, 65, 65, 0, 0, 1, -360, 360], [9, 10, 0, 0.11, 0, 65, 65, 65, 0, 0, 1, -360, 360], [4, 12, 0, 0.26, 0, 65, 65, 65, 0, 0, 1, -360, 360], [12, 13, 0, 0.14, 0, 65, 65, 65, 0, 0, 1, -360, 360], [12, 14, 0.12, 0.26, 0, 32, 32, 32, 0, 0, 1, -360, 360], [12, 15, 0.07, 0.13, 0, 32, 32, 32, 0, 0, 1, -360, 360], [12, 16, 0.09, 0.2, 0, 32, 32, 32, 0, 0, 1, -360, 360], [14, 15, 0.22, 0.2, 0, 16, 16, 16, 0, 0, 1, -360, 360], [16, 17, 0.08, 0.19, 0, 16, 16, 16, 0, 0, 1, -360, 360], [15, 18, 0.11, 0.22, 0, 16, 16, 16, 0, 0, 1, -360, 360], [18, 19, 0.06, 0.13, 0, 16, 16, 16, 0, 0, 1, -360, 360], [19, 20, 0.03, 0.07, 0, 32, 32, 32, 0, 0, 1, -360, 360], [10, 20, 0.09, 0.21, 0, 32, 32, 32, 0, 0, 1, -360, 360], [10, 17, 0.03, 0.08, 0, 32, 32, 32, 0, 0, 1, -360, 360], [10, 21, 0.03, 0.07, 0, 32, 32, 32, 0, 0, 1, -360, 360], [10, 22, 0.07, 0.15, 0, 32, 32, 32, 0, 0, 1, -360, 360], [21, 22, 0.01, 0.02, 0, 32, 32, 32, 0, 0, 1, -360, 360], [15, 23, 0.1, 0.2, 0, 16, 16, 16, 0, 0, 1, -360, 360], [22, 24, 0.12, 0.18, 0, 16, 16, 16, 0, 0, 1, -360, 360], [23, 24, 0.13, 0.27, 0, 16, 16, 16, 0, 0, 1, -360, 360], [24, 25, 0.19, 0.33, 0, 16, 16, 16, 0, 0, 1, -360, 360], [25, 26, 0.25, 0.38, 0, 16, 16, 16, 0, 0, 1, -360, 360], [25, 27, 0.11, 0.21, 0, 16, 16, 16, 0, 0, 1, -360, 360], [28, 27, 0, 0.4, 0, 65, 65, 65, 0, 0, 1, -360, 360], [27, 29, 0.22, 0.42, 0, 16, 16, 16, 0, 0, 1, -360, 360], [27, 30, 0.32, 0.6, 0, 16, 16, 16, 0, 0, 1, -360, 360], [29, 30, 0.24, 0.45, 0, 16, 16, 16, 0, 0, 1, -360, 360], [8, 28, 0.06, 0.2, 0.02, 32, 32, 32, 0, 0, 1, -360, 360], [6, 28, 0.02, 0.06, 0.01, 32, 32, 32, 0, 0, 1, -360, 360] ]) ##----- OPF Data -----## ## area data # area refbus ppc["areas"] = array([ [1, 8], [2, 23], [3, 26], ]) ## generator cost data # 1 startup shutdown n x1 y1 ... xn yn # 2 startup shutdown n c(n-1) ... c0 ppc["gencost"] = array([ [2, 0, 0, 3, 0.02, 2, 0], [2, 0, 0, 3, 0.0175, 1.75, 0], [2, 0, 0, 3, 0.0625, 1, 0], [2, 0, 0, 3, 0.00834, 3.25, 0], [2, 0, 0, 3, 0.025, 3, 0], [2, 0, 0, 3, 0.025, 3, 0] ]) return ppc
Distant star starts off around the time of Pinochet's bloody 1973 coup and continues until the 1990's. The unnamed narrator, who I presume to be Arturo B. which is briefly mentioned in the preface, is so busy with Chilean poetry that he is completely taken back with surprise when many students are arrested, killed, or missing in the coup. He has taken a sudden and obsessive interest in a poet named Alberto-Ruiz Tangle who turns out to be an Officer of the Air Forcer named Carlos Wieder. The narrator begins some intense detective work, with the help of his friends, to find the answers he is looking for. I love a good horrific and violent book and this book is exactly that. Although the writing style reminds me of Juan Rulfo, who isn't my favorite, I can certainly see the appeal and value it for it's worth. It's a quick 150 page read that is unique and violent in it's own specific way. Please support my blog and buy it now from Amazon!
#!/usr/bin/env python #Convert json array file in csv file #import import ijson import csv import time import sys import os import datetime os.chdir('/home/julien_ha/data') #Start timer start_time = time.time() input_file = sys.argv[1] output_file = input_file.replace('.json','.csv') #Parse json file f1 = open(input_file) d1 = [] for item in ijson.items(f1, "item"): d1.append(item) f = csv.writer(open(output_file, "wb+")) # Write CSV Header, If you dont need that, remove this line f.writerow(["blockHash","blockNumber","from","gas","gasPrice","hash","input","nonce","timestamp","to","transactionIndex","value"]) for t in d1: f.writerow([t["blockHash"], t["blockNumber"], t["from"], t["gas"], t["gasPrice"], t["hash"], t["input"], t["nonce"], t["timestamp"], t["to"], t["transactionIndex"], t["value"] ]) #Print time duration print("Done in --- %s seconds ---" % (time.time() - start_time))
feedspot.com was registered 1 decade 5 years ago. It has a alexa rank of #215,271 in the world. It is a domain having .com extension. This site has a Google PageRank of 4/10. It is estimated worth of $ 23,760.00 and have a daily income of around $ 44.00. As no active threats were reported recently, feedspot.com is SAFE to browse. It's fast, beautiful and simple way to keep up with all your favorite websites in one place. Feedspot is totally free and works in most modern browsers, without any plugin to install. Welcome to the Wood County Democratic Party of Ohio. Bazaarvoice connects brands, retailers and consumers in the world's largest shopper network, delivering ROI through consumer reviews, analytics and targeted media. Laser Center is near Binghamton New York, specializing in laser skin treatments including hair removal, scar removal, tattoo removal, skin tag removal and more!
# -*- coding: utf-8 -*- # Copyright © 2014-2018 GWHAT Project Contributors # https://github.com/jnsebgosselin/gwhat # # This file is part of GWHAT (Ground-Water Hydrograph Analysis Toolbox). # Licensed under the terms of the GNU General Public License. # ---- Imports: standard libraries from urllib.request import urlopen from urllib.error import HTTPError, URLError import csv import time import os import os.path as osp # ---- Imports: third parties import numpy as np from PyQt5.QtCore import QObject from PyQt5.QtCore import pyqtSignal as QSignal # ---- Imports: local libraries from gwhat.common.utils import calc_dist_from_coord from gwhat.meteo.weather_stationlist import WeatherSationList from gwhat import __rootdir__ DATABASE_FILEPATH = osp.join(__rootdir__, 'climate_station_database.npy') MAX_FAILED_FETCH_TRY = 3 PROV_NAME_ABB = [('ALBERTA', 'AB'), ('BRITISH COLUMBIA', 'BC'), ('MANITOBA', 'MB'), ('NEW BRUNSWICK', 'NB'), ('NEWFOUNDLAND', 'NL'), ('NORTHWEST TERRITORIES', 'NT'), ('NOVA SCOTIA', 'NS'), ('NUNAVUT', 'NU'), ('ONTARIO', 'ON'), ('PRINCE EDWARD ISLAND', 'PE'), ('QUEBEC', 'QC'), ('SASKATCHEWAN', 'SK'), ('YUKON TERRITORY', 'YT')] # ---- Base functions URL_TOR = ("ftp://[email protected]/" + "Pub/Get_More_Data_Plus_de_donnees/Station%20Inventory%20EN.csv") def read_stationlist_from_tor(): """"Read and format the `Station Inventory En.csv` file from Tor ftp.""" try: data = urlopen(URL_TOR).read() except (HTTPError, URLError): return None try: data = data.decode('utf-8-sig').splitlines() except (UnicodeDecodeError, UnicodeError): return None data = list(csv.reader(data, delimiter=',')) FIELDS_KEYS_TYPE = [('Name', 'Name', str), ('Province', 'Province', str), ('Climate ID', 'ID', str), ('Station ID', 'Station ID', str), ('DLY First Year', 'DLY First Year', int), ('DLY Last Year', 'DLY Last Year', int), ('Latitude (Decimal Degrees)', 'Latitude', float), ('Longitude (Decimal Degrees)', 'Longitude', float), ('Elevation (m)', 'Elevation', float)] df = {} columns = None for i, row in enumerate(data): if len(row) == 0: continue if row[0] == 'Name': columns = row data = np.array(data[i+1:]) # Remove stations with no daily data hly_first_year = data[:, columns.index('DLY First Year')] data = data[~(hly_first_year == ''), :] break else: return None for field, key, atype in FIELDS_KEYS_TYPE: arr = data[:, columns.index(field)] if atype == float: arr[arr == ''] = np.nan else: arr[arr == ''] = 'NA' df[key] = arr.astype(atype) # Sanitarize station name. for i in range(len(df['Name'])): df['Name'][i].replace('\\', ' ').replace('/', ' ') # Determine station status. df['Status'] = np.zeros(len(df['Name'])).astype(str) df['Status'][df['DLY Last Year'] >= 2017] = 'Active' df['Status'][df['DLY Last Year'] < 2017] = 'Closed' # Format province value. for name, abb in PROV_NAME_ABB: df['Province'][df['Province'] == name] = abb return df # ---- API class WeatherStationFinder(QObject): sig_progress_msg = QSignal(str) sig_load_database_finished = QSignal(bool) def __init__(self, filelist=None, *args, **kwargs): super(WeatherStationFinder, self).__init__(*args, **kwargs) self._data = None # ---- Load and fetch database @property def data(self): """Content of the ECCC database.""" return self._data def load_database(self): """ Load the climate station list from a file if it exist or else fetch it from ECCC Tor ftp server. """ if os.path.exists(DATABASE_FILEPATH): self.sig_progress_msg.emit( "Loading the climate station database from file.") ts = time.time() self._data = np.load(DATABASE_FILEPATH).item() te = time.time() print("Station list loaded sucessfully in %0.2f sec." % (te-ts)) self.sig_load_database_finished.emit(True) else: self.fetch_database() def fetch_database(self): """ Fetch and read the list of climate stations with daily data from the ECCC Tor ftp server and save the result on disk. """ print("Fetching station list from ECCC Tor ftp server...") ts = time.time() self._data = None failed_fetch_try = 0 while True: self.sig_progress_msg.emit("Fetching the climate station database" " from the ECCC server...") self._data = read_stationlist_from_tor() if self._data is None: failed_fetch_try += 1 if failed_fetch_try <= MAX_FAILED_FETCH_TRY: print("Failed to fetch the database from " " the ECCC server (%d/%d)." % (failed_fetch_try, MAX_FAILED_FETCH_TRY)) time.sleep(3) else: msg = "Failed to fetch the database from the ECCC server." print(msg) self.sig_progress_msg.emit(msg) break else: np.save(DATABASE_FILEPATH, self._data) te = time.time() print("Station list fetched sucessfully in %0.2f sec." % (te-ts)) break self.sig_load_database_finished.emit(True) # ---- Utility functions def get_stationlist(self, status=None, prov=None, prox=None, yrange=None): """ Return a list of the stations in the ECCC database that fulfill the conditions specified in arguments. """ N = len(self.data['Name']) results = np.ones(N) if prov: results = results * np.isin(self.data['Province'], prov) if status: results = results * (self.data['Status'] == status) if prox: lat1, lon1, max_dist = prox lat2, lon2 = self.data['Latitude'], self.data['Longitude'] dists = calc_dist_from_coord(lat1, lon1, lat2, lon2) results = results * (dists <= max_dist) if yrange: arr_ymin = np.max(np.vstack([self.data['DLY First Year'], np.ones(N)*yrange[0]]), axis=0) arr_ymax = np.min(np.vstack([self.data['DLY Last Year'], np.ones(N)*yrange[1]]), axis=0) results = results * ((arr_ymax-arr_ymin+1) >= yrange[2]) indexes = np.where(results == 1)[0] stations = np.vstack((self.data['Name'][indexes], self.data['Station ID'][indexes], self.data['DLY First Year'][indexes], self.data['DLY Last Year'][indexes], self.data['Province'][indexes], self.data['ID'][indexes], self.data['Latitude'][indexes], self.data['Longitude'][indexes], self.data['Elevation'][indexes], )).transpose().tolist() stationlist = WeatherSationList() stationlist.add_stations(stations) return stationlist if __name__ == '__main__': stn_browser = WeatherStationFinder() stn_browser.load_database() stnlist = stn_browser.get_stationlist(prov=['QC', 'ON'], prox=(45.40, -73.15, 25), yrange=(1960, 2015, 10))
← Does Your TV Give You the Warm Fuzzies? A study from the Norwegian Institute of Public Health suggests that mom is going to be blamed for something else when the kids get old enough to complain about their upbringing. In the first research project in the world to analyze children’s eating habits in combination with maternal psychological variables, researchers found that emotionally unstable mothers tend to give their kids more sweet and fatty foods, leading to more weight gain. And this was no small study. Nearly 28,000 mothers were included in the analysis, which focused on psychological factors such as anxiety, sadness, low self-confidence and a generally negative view of the world. In combination, those factors are referred to as “negative affectivity,” and mothers who exhibit it typically have lower stress thresholds and give up quicker when faced with obstacles — when their kids are out of control, they’re more likely to give up and let the cretins have their way. Strangely, though, researchers found no link between a mother’s personality and healthy eating habits. Evidently, being a more confident and positive mom does not necesssarily equal more fruits and veggies on the kids’ plates. The painful rub of all this is that earlier studies have found that being a more controlling parent (and that means mom and dad — because, no, dad doesn’t get a pass on this) also leads to more sugar in kids’ diets. Setting aside the negative emotion component from the Norwegian study, what’s an everyday parent to do? Two words: modeling and flexibility. The Framingham Children’s Study, conducted nearly nine years ago, yielded one of the most interesting results of any study on this topic before or since. Here it is: when parents exhibit “disinhibited eating” (lack of control), but preach “dietary restraint” (strict control) — their kids get fatter. What at first sounds paradoxical actually makes a lot of sense. Often, people who try the hardest (and talk the most) about controlling calories also have the hardest time actually doing it, and it’s a vicious cycle: increasing strictness eventually leads to losing more control, which leads to becoming even stricter, more loss of control, and on and on. And kids, the sponges that they are, internalize the chaos and put on the pounds. The remedy: break the cycle with a sensible dose of flexibility, and back it up with a helping of consistent modeling. Lighten up, literally. Easier said than done, no doubt, but it beats the alternatives. If you don’t have kids yet, I guess the lesson here is to try having them with someone with a fast metabolism. Not only will your kids need to eat more to get fat, your spouse is also less likely to end up having a negative effect on the kids via their own behavior.
# 259. 3Sum Smaller Add to List # DescriptionHintsSubmissionsSolutions # Total Accepted: 23955 Total Submissions: 58266 Difficulty: Medium Contributor: LeetCode # Given an array of n integers nums and a target, find the number of index triplets i, j, k with 0 <= i < j < k < n that satisfy the condition nums[i] + nums[j] + nums[k] < target. # # For example, given nums = [-2, 0, 1, 3], and target = 2. # # Return 2. Because there are two triplets which sums are less than 2: # # [-2, 0, 1] # [-2, 0, 3] # Follow up: # Could you solve it in O(n2) runtime? # 2017.05.23 # 3 pointers class Solution(object): def threeSumSmaller(self, nums, target): """ :type nums: List[int] :type target: int :rtype: int """ if len(nums) < 3: return 0 res = 0 nums.sort() n = len(nums) i = 0 while i < n - 2: j, k = i + 1, n - 1 while j < k: cur = nums[i] + nums[j] + nums[k] if cur < target: res += k - j # Key point j += 1 else: k -= 1 i += 1 return res
attention. When he was able to catch her eye, she quickly looked away. Finally he followed her into the kitchen and blurted out his invitation. To his amazement, she readily consented. "Oh," said the waitress, "I thought you wanted more coffee."
# -*- encoding: utf-8 -*- from django import forms from django.contrib import admin from django.utils.text import format_lazy from django.utils.translation import ugettext_lazy as _ from flexible_reports.models.table import AllSortOptions, SortInGroup from ..models import Column, ColumnOrder, Table from .helpers import AverageTextarea, SmallerTextarea, SortableHiddenMixin class ColumnForm(forms.ModelForm): class Meta: widgets = { 'label': SmallerTextarea, 'template': AverageTextarea, 'footer_template': SmallerTextarea, 'attrs': SmallerTextarea } class ColumnOrderForm(forms.ModelForm): def __init__(self, parent, *args, **kw): super(ColumnOrderForm, self).__init__(*args, **kw) self.fields['column'].queryset = Column.objects.filter(parent=parent) class ColumnOrderInline(SortableHiddenMixin, admin.TabularInline): extra = 0 model = ColumnOrder fields = ['column', 'desc', 'position'] def formfield_for_foreignkey(self, db_field, request=None, **kwargs): field = super(ColumnOrderInline, self).formfield_for_foreignkey(db_field, request, **kwargs) if db_field.name == 'column': if request._parent_ is not None: field.queryset = field.queryset.filter( parent=request._parent_, sortable=True) else: field.queryset = field.queryset.none() return field class ColumnInline(SortableHiddenMixin, admin.StackedInline): extra = 0 model = Column form = ColumnForm fields = ['label', 'attr_name', 'template', 'attrs', 'sortable', 'exclude_from_export', 'strip_html_on_export', 'display_totals', 'footer_template', 'position'] class TableForm(forms.ModelForm): class Meta: fields = ['label', 'base_model', 'sort_option', 'group_prefix', 'attrs', 'empty_template', ] widgets = { 'label': SmallerTextarea, 'empty_template': SmallerTextarea, 'attrs': SmallerTextarea } pass @admin.register(Table) class TableAdmin(admin.ModelAdmin): list_display = ['label', 'base_model', 'short_sort_option', 'columns'] inlines = [ColumnInline, ColumnOrderInline] form = TableForm def columns(self, obj): return ", ".join([x.label for x in obj.column_set.all()]) columns.short_description = _("Columns") def short_sort_option(self, obj): if obj.sort_option == SortInGroup.id: return format_lazy( "{label}{group_name}{group_prefix})", label=SortInGroup.label, group_name=_(" (group name: "), group_prefix=obj.group_prefix ) return AllSortOptions[obj.sort_option].label short_sort_option.short_description = _("Sort option") short_sort_option.admin_order_field = "sort_option" def get_form(self, request, obj=None, **kwargs): request._parent_ = obj return super(TableAdmin, self).get_form(request, obj, **kwargs)
Small, sweet, with a mild nutty flavour. Rich in protein and low in fat. Adzuki beans are commonly used in soups and desserts.
# Copyright (c) 2015 - present Facebook, Inc. # All rights reserved. # # This source code is licensed under the BSD style license found in the # LICENSE file in the root directory of this source tree. An additional grant # of patent rights can be found in the PATENTS file in the same directory. from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import argparse import logging import os import shutil import subprocess import tempfile import traceback import time from inferlib import config, issues, utils, bucklib from . import util import re MODULE_NAME = __name__ MODULE_DESCRIPTION = '''Run analysis of code built with a command like: buck [options] [target] Analysis examples: infer -- buck build HelloWorld''' LANG = ['clang', 'java'] KEEP_GOING_OPTION = "--keep-going" def gen_instance(*args): return BuckAnalyzer(*args) def string_in_quotes(value): return value.strip('\'') def create_argparser(group_name=MODULE_NAME): """This defines the set of arguments that get added by this module to the set of global args defined in the infer top-level module Do not use this function directly, it should be invoked by the infer top-level module""" parser = argparse.ArgumentParser(add_help=False) group = parser.add_argument_group( '{grp} module'.format(grp=MODULE_NAME), description=MODULE_DESCRIPTION, ) group.add_argument('--use-flavors', action='store_true', help='Run Infer analysis through the use of flavors. ' 'Currently this is supported only for the cxx_* ' 'targets of Buck - e.g. cxx_library, cxx_binary - ' 'and not for Java. Note: this flag should be used ' 'in combination with passing the #infer flavor ' 'to the Buck target.') group.add_argument('--xcode-developer-dir', help='Specify the path to Xcode developer directory ' '(requires --use-flavors to work)') group.add_argument('--blacklist-regex', help='Specify the regex for files to skip during ' 'the analysis (requires --use-flavors to work)') group.add_argument('--Xbuck', action='append', default=[], type=string_in_quotes, help='Pass values as command-line arguments to ' 'invocations of `buck build`.' 'NOTE: value should be wrapped in single quotes') return parser class BuckAnalyzer: def __init__(self, args, cmd): self.args = args self.cmd = cmd self.keep_going = KEEP_GOING_OPTION in self.args.Xbuck util.log_java_version() logging.info(util.run_cmd_ignore_fail(['buck', '--version'])) def capture(self): try: if self.args.use_flavors: return self.capture_with_flavors() else: return self.capture_without_flavors() except subprocess.CalledProcessError as exc: if self.args.debug: traceback.print_exc() return exc.returncode def create_cxx_buck_configuration_args(self): # return a string that can be passed in input to buck # and configures the paths to infer/clang/plugin/xcode facebook_clang_plugins_root = config.FCP_DIRECTORY clang_path = os.path.join( facebook_clang_plugins_root, 'clang', 'install', 'bin', 'clang', ) plugin_path = os.path.join( facebook_clang_plugins_root, 'libtooling', 'build', 'FacebookClangPlugin.dylib', ) args = [ '--config', '*//infer.infer_bin={bin}' .format(bin=config.BIN_DIRECTORY), '--config', '*//infer.clang_compiler={clang}'.format(clang=clang_path), '--config', '*//infer.clang_plugin={plugin}'.format(plugin=plugin_path), '--config', '*//cxx.pch_enabled=false', ] + self.args.Xbuck if self.args.xcode_developer_dir is not None: args.append('--config') args.append('apple.xcode_developer_dir={devdir}'.format( devdir=self.args.xcode_developer_dir)) if self.args.blacklist_regex: args.append('--config') args.append('*//infer.blacklist_regex={regex}'.format( regex=self.args.blacklist_regex)) return args def _get_analysis_result_paths(self): # TODO(8610738): Make targets extraction smarter buck_results_cmd = [ self.cmd[0], 'targets', '--show-output' ] + self.cmd[2:] + self.create_cxx_buck_configuration_args() buck_results_cmd = \ [x for x in buck_results_cmd if x != KEEP_GOING_OPTION] proc = subprocess.Popen(buck_results_cmd, stdout=subprocess.PIPE) (buck_output, _) = proc.communicate() if proc.returncode != 0: return None # remove target name prefixes from each line and split them into a list out = [x.split(None, 1)[1] for x in buck_output.strip().split('\n')] return [os.path.dirname(x) if os.path.isfile(x) else x for x in out if os.path.exists(x)] @staticmethod def _merge_infer_dep_files(root_paths, merged_out_path): potential_dep_files = [os.path.join(p, config.INFER_BUCK_DEPS_FILENAME) for p in root_paths] dep_files = filter(os.path.exists, potential_dep_files) utils.merge_and_dedup_files_into_path(dep_files, merged_out_path) @staticmethod def _merge_infer_report_files(root_paths, merged_out_path): potential_report_files = [os.path.join(p, config.JSON_REPORT_FILENAME) for p in root_paths] report_files = filter(os.path.exists, potential_report_files) all_results = issues.merge_reports_from_paths(report_files) utils.dump_json_to_path(all_results, merged_out_path) @staticmethod def _find_deps_and_merge(merged_out_path): """This function is used to compute the infer-deps.txt file that contains the location of the infer-out folders with the captured files created by buck. This is needed when keep-going is passed to buck and there are compilation failures, because in that case buck doesn't create this file.""" infer_out_folders = [] start_time = time.time() print('finding captured files in buck-out...') for root, dirs, files in os.walk(config.BUCK_OUT_GEN): regex = re.compile('.*infer-out.*') folders = \ [os.path.join(root, d) for d in dirs if re.match(regex, d)] for d in folders: if d not in infer_out_folders: infer_out_folders.append(d) with open(merged_out_path, 'w') as fmerged_out_path: for dir in infer_out_folders: fmerged_out_path.write('\t' + '\t' + dir + '\n') elapsed_time = time.time() - start_time print('time elapsed in finding captured files in buck-out: % 6.2fs' % elapsed_time) def _move_buck_out(self): """ If keep-going is passed, we may need to compute the infer-deps file with the paths to the captured files. To make sure that this is done in a consistent way, we need to start the analysis with an empty buck-out folder.""" if not os.path.exists(config.BUCK_OUT_TRASH): os.makedirs(config.BUCK_OUT_TRASH) tmp = tempfile.mkdtemp( dir=config.BUCK_OUT_TRASH, prefix=config.BUCK_OUT) print('moving files in ' + config.BUCK_OUT + ' to ' + tmp) for filename in os.listdir(config.BUCK_OUT): if filename != config.TRASH: shutil.move(os.path.join(config.BUCK_OUT, filename), tmp) def _run_buck_with_flavors(self): # TODO: Use buck to identify the project's root folder if not os.path.isfile('.buckconfig'): print('Please run this command from the folder where .buckconfig ' 'is located') return os.EX_USAGE env_vars = utils.read_env() infer_args = env_vars['INFER_ARGS'] if infer_args != '': infer_args += '^' # '^' must be CommandLineOption.env_var_sep infer_args += '--fcp-syntax-only' env_vars['INFER_ARGS'] = infer_args env = utils.encode_env(env_vars) command = self.cmd command += ['-j', str(self.args.multicore)] if self.args.load_average is not None: command += ['-L', str(self.args.load_average)] command += self.create_cxx_buck_configuration_args() try: subprocess.check_call(command, env=env) return os.EX_OK except subprocess.CalledProcessError as e: if self.keep_going: print('Buck failed, but continuing the analysis ' 'because --keep-going was passed') return -1 else: raise e def capture_with_flavors(self): if self.keep_going: self._move_buck_out() ret = self._run_buck_with_flavors() if not ret == os.EX_OK and not self.keep_going: return ret result_paths = self._get_analysis_result_paths() if result_paths is None: # huho, the Buck command to extract results paths failed return os.EX_SOFTWARE merged_reports_path = os.path.join( self.args.infer_out, config.JSON_REPORT_FILENAME) merged_deps_path = os.path.join( self.args.infer_out, config.INFER_BUCK_DEPS_FILENAME) self._merge_infer_report_files(result_paths, merged_reports_path) if not ret == os.EX_OK and self.keep_going: self._find_deps_and_merge(merged_deps_path) else: self._merge_infer_dep_files(result_paths, merged_deps_path) infer_out = self.args.infer_out json_report = os.path.join(infer_out, config.JSON_REPORT_FILENAME) bugs_out = os.path.join(infer_out, config.BUGS_FILENAME) issues.print_and_save_errors(infer_out, self.args.project_root, json_report, bugs_out, self.args.pmd_xml, console_out=not self.args.quiet) return os.EX_OK def capture_without_flavors(self): # Java is a special case, and we run the analysis from here buck_wrapper = bucklib.Wrapper(self.args, self.cmd) return buck_wrapper.run()
Mummy, Wife and Chaos: Its amazing how much you can get done with 3 children on so little sleep! Its amazing how much you can get done with 3 children on so little sleep! Last night a few mums from my children's school had a "we made it through another year/end of term/night of freedom" get together. It's been a long time since I strolled in at 3.45 am. Yes am. It was however brilliant and much needed! I even got to show off another fabby fearne cotton dress! Disclaimer: the photos you are about to witness are "laid back night off mummy". Hold no grudges! Of course when you have children, the morning after a "much needed night off" is never a gentle wake up. True to form 6.45am was wakey wakey time and with the hubby off this weekend we headed off to another National Trust beauty; Cliveden not long after. We have a membership and so far it's been great, the kids love getting off to see new things and the children's activities are brilliant. Cliveden didn't let us down and we stayed all day, only leaving when the rain started and we were all beginning to struggle. The food was reasonably priced, clean toilets and plenty of them and not once were they bored! The maze was fun...We did it! -Topshop oversized tshirt but I've tucked it in and a Next statement necklace. -Birkenstocks. Again these are ancient but have seen me through many summer's. I'll be honest and say I thought my birks days were over but.... well never say never! All in all an amazing day and Cliveden made it possible to entertain 3 kiddies on 3hrs sleep! Good to know that my Mums Nights Out aren't the only ones that end in the small hours, lol! And wow ... I wish I looked as good as you do on only 3 hours sleep!! I was exhausted! The fresh air done me the world of good! It's good to remind yourself that you can still party like the kids do - but my god do we pay for it the next day!!!
# This is an external library for calculating levels. # It's basically a copy of https://github.com/Plancke/hypixel-php/blob/master/src/util/Leveling.php # But written in Python. from math import sqrt, floor EXP_FIELD = 0 LVL_FIELD = 0 BASE = 10000 GROWTH = 2500 HALF_GROWTH = 0.5 * GROWTH REVERSE_PQ_PREFIX = -(BASE - 0.5 * GROWTH)/GROWTH REVERSE_CONST = REVERSE_PQ_PREFIX * REVERSE_PQ_PREFIX GROWTH_DIVIDES_2 = 2/GROWTH def getLevel(exp): return floor(1+REVERSE_PQ_PREFIX + sqrt(REVERSE_CONST+GROWTH_DIVIDES_2*exp)) def getExactLevel(exp): return getLevel(exp) + getPercentageToNextLevel(exp) def getExpFromLevelToNext(level): return GROWTH * (level-1) + BASE def getTotalExpToLevel(level): lv = floor(level) x0 = getTotalExpToFullLevel(lv) if level == lv: return x0 else: return (getTotalExpToFullLevel(lv+1) - x0) * (level % 1) + x0 def getTotalExpToFullLevel(level): return (HALF_GROWTH * (level-2) + BASE) * (level-1) def getPercentageToNextLevel(exp): lv = getLevel(exp) x0 = getTotalExpToLevel(lv) return (exp-x0) / (getTotalExpToLevel(lv+1) - x0) def getExperience(EXP_FIELD, LVL_FIELD): exp = int(EXP_FIELD) exp += getTotalExpToFullLevel(LVL_FIELD+1) return exp
With our superb management, potent technical capability and strict quality command procedure, we go on to provide our shoppers with trustworthy high-quality, reasonable costs and outstanding services. We goal at becoming considered one of your most trustworthy partners and earning your pleasure for Ultra Bright Led Desk Lamp , ultra bright led desk lamp , ultra bright desk lamp , keep improving. We are looking forward that more and more oversea friends join in our family for further development near the future! "To be the stage of realizing dreams of our employees! To build a happier, far more united and far more specialist team! To reach a mutual profit of our customers, suppliers, the society and ourselves for Ultra Bright Led Desk Lamp , ultra bright led desk lamp , ultra bright desk lamp , Be sure to feel cost-free to send us your specifications and we will respond for you asap. We have got a specialist engineering team to serve for the every single detailed needs. Free samples may be sent for you personally personally to know far more facts. So that you can meet your desires you should seriously feel cost-free to contact us. You could send us emails and call us straight. Additionally we welcome visits to our factory from all over the world for much better recognizing of our corporation. nd merchandise. In our trade with merchants of several countries we often adhere to the principle of equality and mutual advantage. It is our hope to market by joint efforts both trade and friendship to our mutual benefit. We look forward to getting your inquiries.
# -- # DjangoPowerDNS - A PowerDNS web interface # Copyright (C) 2017 McLive # -- # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU AFFERO General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. # -- from rest_framework import permissions from dpdns.models import APIKey class HasAPIAccess(permissions.BasePermission): message = 'Invalid or missing API Key.' def has_permission(self, request, view): api_key = request.META.get('HTTP_API_KEY', '') return APIKey.objects.filter(key=api_key).exists() def has_object_permission(self, request, view, obj): api_key = request.META.get('HTTP_API_KEY', '') key = APIKey.objects.get(key=api_key) domain = key.domain return domain == obj
At the Murchison Falls, the river Nile plunges through a narrow crevice and over a 40-metre drop. In the eastern sector of the park, before the Murchison Falls themselves, are the Karuma Falls, where the Nile cascades over a breathtaking 23 km of rapids, creating some of the most exciting white-water raftUganda as a tourist destination emerges out of the variety of its game stock and its unexploited scenic beauty. Uganda generally has exceptional natural resources for tourism with a variety of landscape and ecosystems, climates and cultures. Some of its features are exceptional by international standards such as the sheer variety of bird species, while others are unique. The Ugandan experience has novelty and rarity values not easily found elsewhere in Africa.
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright: (c) 2018, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'certified'} DOCUMENTATION = r''' --- module: bigip_profile_http short_description: Manage HTTP profiles on a BIG-IP description: - Manage HTTP profiles on a BIG-IP. version_added: 2.7 options: name: description: - Specifies the name of the profile. type: str required: True parent: description: - Specifies the profile from which this profile inherits settings. - When creating a new profile, if this parameter is not specified, the default is the system-supplied C(http) profile. type: str default: /Common/http description: description: - Description of the profile. type: str proxy_type: description: - Specifies the proxy mode for the profile. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str choices: - reverse - transparent - explicit dns_resolver: description: - Specifies the name of a configured DNS resolver, this option is mandatory when C(proxy_type) is set to C(explicit). - Format of the name can be either be prepended by partition (C(/Common/foo)), or specified just as an object name (C(foo)). - To remove the entry a value of C(none) or C('') can be set, however the profile C(proxy_type) must not be set as C(explicit). type: str insert_xforwarded_for: description: - When specified system inserts an X-Forwarded-For header in an HTTP request with the client IP address, to use with connection pooling. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: bool redirect_rewrite: description: - Specifies whether the system rewrites the URIs that are part of HTTP redirect (3XX) responses. - When set to C(none) the system will not rewrite the URI in any HTTP redirect responses. - When set to C(all) the system rewrites the URI in all HTTP redirect responses. - When set to C(matching) the system rewrites the URI in any HTTP redirect responses that match the request URI. - When set to C(nodes) if the URI contains a node IP address instead of a host name, the system changes it to the virtual server address. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str choices: - none - all - matching - nodes encrypt_cookies: description: - Cookie names for the system to encrypt. - To remove the entry completely a value of C(none) or C('') should be set. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: list encrypt_cookie_secret: description: - Passphrase for cookie encryption. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str update_password: description: - C(always) will update passwords if the C(encrypt_cookie_secret) is specified. - C(on_create) will only set the password for newly created profiles. type: str choices: - always - on_create default: always header_erase: description: - The name of a header, in an HTTP request, which the system removes from request. - To remove the entry completely a value of C(none) or C('') should be set. - The format of the header must be in C(KEY:VALUE) format, otherwise error is raised. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str version_added: 2.8 header_insert: description: - A string that the system inserts as a header in an HTTP request. - To remove the entry completely a value of C(none) or C('') should be set. - The format of the header must be in C(KEY:VALUE) format, otherwise error is raised. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str version_added: 2.8 server_agent_name: description: - Specifies the string used as the server name in traffic generated by BIG-IP. - To remove the entry completely a value of C(none) or C('') should be set. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str version_added: 2.8 include_subdomains: description: - When set to C(yes), applies the HSTS policy to the HSTS host and its sub-domains. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: bool version_added: 2.8 maximum_age: description: - Specifies the maximum length of time, in seconds, that HSTS functionality requests that clients only use HTTPS to connect to the current host and any sub-domains of the current host's domain name. - The accepted value range is C(0 - 4294967295) seconds, a value of C(0) seconds re-enables plaintext HTTP access, while specifying C(indefinite) will set it to the maximum value. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str version_added: 2.8 hsts_mode: description: - When set to C(yes), enables the HSTS settings. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: bool version_added: 2.8 accept_xff: description: - Enables or disables trusting the client IP address, and statistics from the client IP address, based on the request's XFF (X-forwarded-for) headers, if they exist. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: bool version_added: 2.9 xff_alternative_names: description: - Specifies alternative XFF headers instead of the default X-forwarded-for header. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: list version_added: 2.9 fallback_host: description: - Specifies an HTTP fallback host. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str version_added: 2.9 fallback_status_codes: description: - Specifies one or more HTTP error codes from server responses that should trigger a redirection to the fallback host. - The accepted valid error codes are as defined by rfc2616. - The codes can be specified as individual items or as valid ranges e.g. C(400-417) or C(500-505). - Mixing response code range across error types is invalid e.g. defining C(400-505) will raise an error. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: list version_added: 2.9 oneconnect_transformations: description: - Enables the system to perform HTTP header transformations for the purpose of keeping server-side connections open. This feature requires configuration of a OneConnect profile. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: bool version_added: 2.9 request_chunking: description: - Specifies how to handle chunked and unchunked requests. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str choices: - rechunk - selective - preserve version_added: 2.9 response_chunking: description: - Specifies how to handle chunked and unchunked responses. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str choices: - rechunk - selective - preserve version_added: 2.9 enforcement: description: - Specifies protocol enforcement settings for the HTTP profile. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. suboptions: truncated_redirects: description: - Specifies what happens if a truncated redirect is seen from a server. - If C(yes), the redirect will be forwarded to the client, otherwise the malformed HTTP will be silently ignored. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: bool excess_client_headers: description: - Specifies the behavior when too many client headers are received. - If set to C(pass-through), will switch to pass through mode, when C(reject) the connection will be rejected. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str choices: - reject - pass-through excess_server_headers: description: - Specifies the behavior when too many server headers are received. - If set to C(pass-through), will switch to pass through mode, when C(reject) the connection will be rejected. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str choices: - reject - pass-through oversize_client_headers: description: - Specifies the behavior when too-large client headers are received. - If set to C(pass-through), will switch to pass through mode, when C(reject) the connection will be rejected. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str choices: - reject - pass-through oversize_server_headers: description: - Specifies the behavior when too-large server headers are received. - If set to C(pass-through), will switch to pass through mode, when C(reject) the connection will be rejected. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str choices: - reject - pass-through pipeline: description: - Enables HTTP/1.1 pipelining, allowing clients to make requests even when prior requests have not received a response. - In order for this to succeed, however, destination servers must include support for pipelining. - If set to C(pass-through), pipelined data will cause the BIG-IP to immediately switch to pass-through mode and disable the HTTP filter. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str choices: - allow - reject - pass-through unknown_method: description: - Specifies whether to allow, reject or switch to pass-through mode when an unknown HTTP method is parsed. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str choices: - allow - reject - pass-through max_header_count: description: - Specifies the maximum number of headers allowed in HTTP request/response. - The valid value range is between 16 and 4096 inclusive. - When set to C(default) the value of this parameter will be C(64) - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str max_header_size: description: - Specifies the maximum header size specified in bytes. - The valid value range is between 0 and 4294967295 inclusive. - When set to C(default) the value of this parameter will be C(32768) bytes - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str max_requests: description: - Specifies the number of requests that the system accepts on a per-connection basis. - The valid value range is between 0 and 4294967295 inclusive. - When set to C(default) the value of this parameter will be C(0), which means the system will not limit the number of requests per connection. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: str known_methods: description: - Specifies which HTTP methods count as being known, removing RFC-defined methods from this list will cause the HTTP filter to not recognize them. - "The default list provided with the system include: C(CONNECT), C(DELETE), C(GET), C(HEAD), C(LOCK), C(OPTIONS), C(POST), C(PROPFIND), C(PUT), C(TRACE) ,C(UNLOCK). The list can be appended by by specifying C(default) keyword as one of the list elements." - The C(default) keyword can also be used to restore the default C(known_methods) on the system. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: list type: dict version_added: 2.9 sflow: description: - Specifies sFlow settings for the HTTP profile. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. suboptions: poll_interval: description: - Specifies the maximum interval in seconds between two pollings. - The valid value range is between 0 and 4294967295 seconds inclusive. - For this setting to take effect the C(poll_interval_global) parameter must be set to C(no). - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: int poll_interval_global: description: - Specifies whether the global HTTP poll-interval setting overrides the object-level Cpoll-interval setting. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: bool sampling_rate: description: - Specifies the ratio of packets observed to the samples generated. For example, a sampling rate of C(2000) specifies that 1 sample will be randomly generated for every 2000 packets observed. - The valid value range is between 0 and 4294967295 packets inclusive. - For this setting to take effect the C(sampling_rate_global) parameter must be set to C(no). - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: int sampling_rate_global: description: - Specifies whether the global HTTP sampling-rate setting overrides the object-level sampling-rate setting. - When creating a new profile, if this parameter is not specified, the default is provided by the parent profile. type: bool type: dict version_added: 2.9 partition: description: - Device partition to manage resources on. type: str default: Common state: description: - When C(present), ensures that the profile exists. - When C(absent), ensures the profile is removed. type: str choices: - present - absent default: present extends_documentation_fragment: f5 author: - Wojciech Wypior (@wojtek0806) ''' EXAMPLES = r''' - name: Create HTTP profile bigip_profile_http: name: my_profile insert_xforwarded_for: yes redirect_rewrite: all state: present provider: user: admin password: secret server: lb.mydomain.com delegate_to: localhost - name: Remove HTTP profile bigip_profile_http: name: my_profile state: absent provider: server: lb.mydomain.com user: admin password: secret delegate_to: localhost - name: Add HTTP profile for transparent proxy bigip_profile_http: name: my_profile proxy_type: transparent provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost ''' RETURN = r''' parent: description: Specifies the profile from which this profile inherits settings. returned: changed type: str sample: /Common/http description: description: Description of the profile. returned: changed type: str sample: My profile proxy_type: description: Specify proxy mode of the profile. returned: changed type: str sample: explicit hsts_mode: description: Enables the HSTS settings. returned: changed type: bool sample: no maximum_age: description: The maximum length of time, in seconds, that HSTS functionality requests that clients only use HTTPS. returned: changed type: str sample: indefinite include_subdomains: description: Applies the HSTS policy to the HSTS host and its sub-domains. returned: changed type: bool sample: yes server_agent_name: description: The string used as the server name in traffic generated by BIG-IP. returned: changed type: str sample: foobar header_erase: description: The name of a header, in an HTTP request, which the system removes from request. returned: changed type: str sample: FOO:BAR header_insert: description: The string that the system inserts as a header in an HTTP request. returned: changed type: str sample: FOO:BAR insert_xforwarded_for: description: Insert X-Forwarded-For-Header. returned: changed type: bool sample: yes redirect_rewrite: description: Rewrite URI that are part of 3xx responses. returned: changed type: str sample: all encrypt_cookies: description: Cookie names to encrypt. returned: changed type: list sample: ['MyCookie1', 'MyCookie2'] dns_resolver: description: Configured dns resolver. returned: changed type: str sample: '/Common/FooBar' accept_xff: description: Enables or disables trusting the client IP address, and statistics from the client IP address. returned: changed type: bool sample: yes xff_alternative_names: description: Specifies alternative XFF headers instead of the default X-forwarded-for header. returned: changed type: list sample: ['FooBar', 'client1'] fallback_host: description: Specifies an HTTP fallback host. returned: changed type: str sample: 'foobar.com' fallback_status_codes: description: HTTP error codes from server responses that should trigger a redirection to the fallback host. returned: changed type: list sample: ['400-404', '500', '501'] oneconnect_transformations: description: Enables or disables HTTP header transformations. returned: changed type: bool sample: no request_chunking: description: Specifies how to handle chunked and unchunked requests. returned: changed type: str sample: rechunk response_chunking: description: Specifies how to handle chunked and unchunked responses. returned: changed type: str sample: rechunk enforcement: description: Specifies protocol enforcement settings for the HTTP profile. type: complex returned: changed contains: truncated_redirects: description: Specifies what happens if a truncated redirect is seen from a server. returned: changed type: bool sample: yes excess_server_headers: description: Specifies the behavior when too many server headers are received. returned: changed type: str sample: pass-through oversize_client_headers: description: Specifies the behavior when too-large client headers are received. returned: changed type: str sample: reject oversize_server_headers: description: Specifies the behavior when too-large server headers are received. returned: changed type: str sample: reject pipeline: description: Allows, rejects or switches to pass-through mode when dealing with pipelined data. returned: changed type: str sample: allow unknown_method: description: Allows, rejects or switches to pass-through mode when an unknown HTTP method is parsed. returned: changed type: str sample: allow max_header_count: description: The maximum number of headers allowed in HTTP request/response. returned: changed type: str sample: 4096 max_header_size: description: The maximum header size specified in bytes. returned: changed type: str sample: default max_requests: description: The number of requests that the system accepts on a per-connection basis. returned: changed type: str sample: default known_methods: description: The list of known HTTP methods. returned: changed type: list sample: ['default', 'FOO', 'BAR'] sample: hash/dictionary of values sflow: description: Specifies sFlow settings for the HTTP profile. type: complex returned: changed contains: poll_interval: description: Specifies the maximum interval in seconds between two pollings. returned: changed type: int sample: 30 poll_interval_global: description: Enables/Disables overriding HTTP poll-interval setting. returned: changed type: bool sample: yes sampling_rate: description: Specifies the ratio of packets observed to the samples generated. returned: changed type: int sample: 2000 sampling_rate_global: description: Enables/Disables overriding HTTP sampling-rate setting. returned: changed type: bool sample: yes sample: hash/dictionary of values ''' import re from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import env_fallback try: from library.module_utils.network.f5.bigip import F5RestClient from library.module_utils.network.f5.common import F5ModuleError from library.module_utils.network.f5.common import AnsibleF5Parameters from library.module_utils.network.f5.common import fq_name from library.module_utils.network.f5.common import f5_argument_spec from library.module_utils.network.f5.common import flatten_boolean from library.module_utils.network.f5.common import transform_name from library.module_utils.network.f5.compare import cmp_simple_list from library.module_utils.network.f5.urls import check_header_validity except ImportError: from ansible.module_utils.network.f5.bigip import F5RestClient from ansible.module_utils.network.f5.common import F5ModuleError from ansible.module_utils.network.f5.common import AnsibleF5Parameters from ansible.module_utils.network.f5.common import fq_name from ansible.module_utils.network.f5.common import f5_argument_spec from ansible.module_utils.network.f5.common import flatten_boolean from ansible.module_utils.network.f5.common import transform_name from ansible.module_utils.network.f5.compare import cmp_simple_list from ansible.module_utils.network.f5.urls import check_header_validity class Parameters(AnsibleF5Parameters): api_map = { 'defaultsFrom': 'parent', 'insertXforwardedFor': 'insert_xforwarded_for', 'redirectRewrite': 'redirect_rewrite', 'encryptCookies': 'encrypt_cookies', 'encryptCookieSecret': 'encrypt_cookie_secret', 'proxyType': 'proxy_type', 'explicitProxy': 'explicit_proxy', 'headerErase': 'header_erase', 'headerInsert': 'header_insert', 'serverAgentName': 'server_agent_name', 'includeSubdomains': 'include_subdomains', 'maximumAge': 'maximum_age', 'mode': 'hsts_mode', 'acceptXff': 'accept_xff', 'xffAlternativeNames': 'xff_alternative_names', 'fallbackHost': 'fallback_host', 'fallbackStatusCodes': 'fallback_status_codes', 'oneconnectTransformations': 'oneconnect_transformations', 'requestChunking': 'request_chunking', 'responseChunking': 'response_chunking', } api_attributes = [ 'insertXforwardedFor', 'description', 'defaultsFrom', 'redirectRewrite', 'encryptCookies', 'encryptCookieSecret', 'proxyType', 'explicitProxy', 'headerErase', 'headerInsert', 'hsts', 'serverAgentName', 'acceptXff', 'xffAlternativeNames', 'fallbackHost', 'fallbackStatusCodes', 'oneconnectTransformations', 'requestChunking', 'responseChunking', 'enforcement', 'sflow', ] returnables = [ 'parent', 'description', 'insert_xforwarded_for', 'redirect_rewrite', 'encrypt_cookies', 'proxy_type', 'explicit_proxy', 'dns_resolver', 'hsts_mode', 'maximum_age', 'include_subdomains', 'server_agent_name', 'header_erase', 'header_insert', 'accept_xff', 'xff_alternative_names', 'fallback_host', 'fallback_status_codes', 'oneconnect_transformations', 'request_chunking', 'response_chunking', 'truncated_redirects', 'excess_client_headers', 'excess_server_headers', 'oversize_client_headers', 'oversize_server_headers', 'pipeline', 'unknown_method', 'max_header_count', 'max_header_size', 'max_requests', 'known_methods', 'poll_interval', 'poll_interval_global', 'sampling_rate', 'sampling_rate_global', ] updatables = [ 'description', 'insert_xforwarded_for', 'redirect_rewrite', 'encrypt_cookies', 'encrypt_cookie_secret', 'proxy_type', 'dns_resolver', 'hsts_mode', 'maximum_age', 'include_subdomains', 'server_agent_name', 'header_erase', 'header_insert', 'accept_xff', 'xff_alternative_names', 'fallback_host', 'fallback_status_codes', 'oneconnect_transformations', 'request_chunking', 'response_chunking', 'truncated_redirects', 'excess_client_headers', 'excess_server_headers', 'oversize_client_headers', 'oversize_server_headers', 'pipeline', 'unknown_method', 'max_header_count', 'max_header_size', 'max_requests', 'known_methods', 'poll_interval', 'poll_interval_global', 'sampling_rate', 'sampling_rate_global', ] class ApiParameters(Parameters): @property def poll_interval(self): return self._values['sflow']['pollInterval'] @property def poll_interval_global(self): return self._values['sflow']['pollIntervalGlobal'] @property def sampling_rate(self): return self._values['sflow']['samplingRate'] @property def sampling_rate_global(self): return self._values['sflow']['samplingRateGlobal'] @property def truncated_redirects(self): return self._values['enforcement']['truncatedRedirects'] @property def excess_client_headers(self): return self._values['enforcement']['excessClientHeaders'] @property def excess_server_headers(self): return self._values['enforcement']['excessServerHeaders'] @property def oversize_client_headers(self): return self._values['enforcement']['oversizeClientHeaders'] @property def oversize_server_headers(self): return self._values['enforcement']['oversizeServerHeaders'] @property def pipeline(self): return self._values['enforcement']['pipeline'] @property def unknown_method(self): return self._values['enforcement']['unknownMethod'] @property def max_header_count(self): return self._values['enforcement']['maxHeaderCount'] @property def max_header_size(self): return self._values['enforcement']['maxHeaderSize'] @property def max_requests(self): return self._values['enforcement']['maxRequests'] @property def known_methods(self): return self._values['enforcement'].get('knownMethods', None) @property def dns_resolver(self): if self._values['explicit_proxy'] is None: return None if 'dnsResolver' in self._values['explicit_proxy']: return self._values['explicit_proxy']['dnsResolver'] @property def dns_resolver_address(self): if self._values['explicit_proxy'] is None: return None if 'dnsResolverReference' in self._values['explicit_proxy']: return self._values['explicit_proxy']['dnsResolverReference'] @property def include_subdomains(self): if self._values['hsts'] is None: return None return self._values['hsts']['includeSubdomains'] @property def hsts_mode(self): if self._values['hsts'] is None: return None return self._values['hsts']['mode'] @property def maximum_age(self): if self._values['hsts'] is None: return None return self._values['hsts']['maximumAge'] class ModuleParameters(Parameters): @property def accept_xff(self): result = flatten_boolean(self._values['accept_xff']) if result is None: return None if result == 'yes': return 'enabled' return 'disabled' @property def fallback_status_codes(self): if self._values['fallback_status_codes'] is None: return None p1 = r'(?!([4][0-1][0-7]))\d{3}' p2 = r'(?!(50[0-5]))\d{3}' for code in self._values['fallback_status_codes']: match_4xx = re.search(p1, code) if match_4xx: match_5xx = re.search(p2, code) if match_5xx: raise F5ModuleError( 'Invalid HTTP error code or error code range specified.' ) return self._values['fallback_status_codes'] @property def oneconnect_transformations(self): result = flatten_boolean(self._values['oneconnect_transformations']) if result is None: return None if result == 'yes': return 'enabled' return 'disabled' @property def proxy_type(self): if self._values['proxy_type'] is None: return None if self._values['proxy_type'] == 'explicit': if self.dns_resolver is None or self.dns_resolver == '': raise F5ModuleError( 'A proxy type cannot be set to {0} without providing DNS resolver.'.format(self._values['proxy_type']) ) return self._values['proxy_type'] @property def dns_resolver(self): if self._values['dns_resolver'] is None: return None if self._values['dns_resolver'] == '' or self._values['dns_resolver'] == 'none': return '' result = fq_name(self.partition, self._values['dns_resolver']) return result @property def dns_resolver_address(self): resolver = self.dns_resolver if resolver is None: return None tmp = resolver.split('/') link = dict(link='https://localhost/mgmt/tm/net/dns-resolver/~{0}~{1}'.format(tmp[1], tmp[2])) return link @property def insert_xforwarded_for(self): result = flatten_boolean(self._values['insert_xforwarded_for']) if result is None: return None if result == 'yes': return 'enabled' return 'disabled' @property def parent(self): if self._values['parent'] is None: return None result = fq_name(self.partition, self._values['parent']) return result @property def encrypt_cookies(self): if self._values['encrypt_cookies'] is None: return None if self._values['encrypt_cookies'] == [''] or self._values['encrypt_cookies'] == ['none']: return list() return self._values['encrypt_cookies'] @property def explicit_proxy(self): if self.dns_resolver is None: return None result = dict( dnsResolver=self.dns_resolver, dnsResolverReference=self.dns_resolver_address ) return result @property def include_subdomains(self): result = flatten_boolean(self._values['include_subdomains']) if result is None: return None if result == 'yes': return 'enabled' return 'disabled' @property def maximum_age(self): if self._values['maximum_age'] is None: return None if self._values['maximum_age'] == 'indefinite': return 4294967295 if 0 <= int(self._values['maximum_age']) <= 4294967295: return int(self._values['maximum_age']) raise F5ModuleError( "Valid 'maximum_age' must be in range 0 - 4294967295, or 'indefinite'." ) @property def hsts_mode(self): result = flatten_boolean(self._values['hsts_mode']) if result is None: return None if result == 'yes': return 'enabled' return 'disabled' @property def header_erase(self): header_erase = self._values['header_erase'] if header_erase is None: return None if header_erase in ['none', '']: return self._values['header_erase'] check_header_validity(header_erase) return header_erase @property def header_insert(self): header_insert = self._values['header_insert'] if header_insert is None: return None if header_insert in ['none', '']: return self._values['header_insert'] check_header_validity(header_insert) return header_insert @property def excess_client_headers(self): if self._values['enforcement'] is None: return None return self._values['enforcement']['excess_client_headers'] @property def excess_server_headers(self): if self._values['enforcement'] is None: return None return self._values['enforcement']['excess_server_headers'] @property def oversize_client_headers(self): if self._values['enforcement'] is None: return None return self._values['enforcement']['oversize_client_headers'] @property def oversize_server_headers(self): if self._values['enforcement'] is None: return None return self._values['enforcement']['oversize_server_headers'] @property def pipeline(self): if self._values['enforcement'] is None: return None return self._values['enforcement']['pipeline'] @property def unknown_method(self): if self._values['enforcement'] is None: return None return self._values['enforcement']['unknown_method'] @property def truncated_redirects(self): if self._values['enforcement'] is None: return None result = flatten_boolean(self._values['enforcement']['truncated_redirects']) if result is None: return None if result == 'yes': return 'enabled' return 'disabled' @property def max_header_count(self): if self._values['enforcement'] is None: return None if self._values['enforcement']['max_header_count'] is None: return None if self._values['enforcement']['max_header_count'] == 'default': return 64 if 16 <= int(self._values['enforcement']['max_header_count']) <= 4096: return int(self._values['enforcement']['max_header_count']) raise F5ModuleError( "Valid 'max_header_count' must be in range 16 - 4096, or 'default'." ) @property def max_header_size(self): if self._values['enforcement'] is None: return None if self._values['enforcement']['max_header_size'] is None: return None if self._values['enforcement']['max_header_size'] == 'default': return 32768 if 0 <= int(self._values['enforcement']['max_header_size']) <= 4294967295: return int(self._values['enforcement']['max_header_size']) raise F5ModuleError( "Valid 'max_header_size' must be in range 0 - 4294967295, or 'default'." ) @property def max_requests(self): if self._values['enforcement'] is None: return None if self._values['enforcement']['max_requests'] is None: return None if self._values['enforcement']['max_requests'] == 'default': return 0 if 0 <= int(self._values['enforcement']['max_requests']) <= 4294967295: return int(self._values['enforcement']['max_requests']) raise F5ModuleError( "Valid 'max_requests' must be in range 0 - 4294967295, or 'default'." ) @property def known_methods(self): if self._values['enforcement'] is None: return None defaults = ['CONNECT', 'DELETE', 'GET', 'HEAD', 'LOCK', 'OPTIONS', 'POST', 'PROPFIND', 'PUT', 'TRACE', 'UNLOCK'] known = self._values['enforcement']['known_methods'] if known is None: return None if len(known) == 1: if known[0] == 'default': return defaults if known[0] == '': return [] if 'default' in known: to_return = [method for method in known if method != 'default'] to_return.extend(defaults) return to_return result = [method for method in known] return result @property def poll_interval(self): if self._values['sflow'] is None: return None if self._values['sflow']['poll_interval'] is None: return None if 0 <= self._values['sflow']['poll_interval'] <= 4294967295: return self._values['sflow']['poll_interval'] raise F5ModuleError( "Valid 'poll_interval' must be in range 0 - 4294967295 seconds." ) @property def sampling_rate(self): if self._values['sflow'] is None: return None if self._values['sflow']['sampling_rate'] is None: return None if 0 <= self._values['sflow']['sampling_rate'] <= 4294967295: return self._values['sflow']['sampling_rate'] raise F5ModuleError( "Valid 'sampling_rate' must be in range 0 - 4294967295 packets." ) @property def poll_interval_global(self): if self._values['sflow'] is None: return None result = flatten_boolean(self._values['sflow']['poll_interval_global']) return result @property def sampling_rate_global(self): if self._values['sflow'] is None: return None result = flatten_boolean(self._values['sflow']['sampling_rate_global']) return result class Changes(Parameters): def to_return(self): result = {} try: for returnable in self.returnables: result[returnable] = getattr(self, returnable) result = self._filter_params(result) except Exception: pass return result class UsableChanges(Changes): @property def explicit_proxy(self): result = dict() if self._values['dns_resolver'] is not None: result['dnsResolver'] = self._values['dns_resolver'] if self._values['dns_resolver_address'] is not None: result['dnsResolverReference'] = self._values['dns_resolver_address'] if not result: return None return result @property def hsts(self): result = dict() if self._values['hsts_mode'] is not None: result['mode'] = self._values['hsts_mode'] if self._values['maximum_age'] is not None: result['maximumAge'] = self._values['maximum_age'] if self._values['include_subdomains'] is not None: result['includeSubdomains'] = self._values['include_subdomains'] if not result: return None return result @property def enforcement(self): to_filter = dict( excessClientHeaders=self._values['excess_client_headers'], excessServerHeaders=self._values['excess_server_headers'], knownMethods=self._values['known_methods'], maxHeaderCount=self._values['max_header_count'], maxHeaderSize=self._values['max_header_size'], maxRequests=self._values['max_requests'], oversizeClientHeaders=self._values['oversize_client_headers'], oversizeServerHeaders=self._values['oversize_server_headers'], pipeline=self._values['pipeline'], truncatedRedirects=self._values['truncated_redirects'], unknownMethod=self._values['unknown_method'] ) result = self._filter_params(to_filter) if result: return result @property def sflow(self): to_filter = dict( pollInterval=self._values['poll_interval'], pollIntervalGlobal=self._values['poll_interval_global'], samplingRate=self._values['sampling_rate'], samplingRateGlobal=self._values['sampling_rate_global'], ) result = self._filter_params(to_filter) if result: return result class ReportableChanges(Changes): returnables = [ 'parent', 'description', 'insert_xforwarded_for', 'redirect_rewrite', 'encrypt_cookies', 'proxy_type', 'explicit_proxy', 'dns_resolver', 'hsts_mode', 'maximum_age', 'include_subdomains', 'server_agent_name', 'header_erase', 'header_insert', 'accept_xff', 'xff_alternative_names', 'fallback_host', 'fallback_status_codes', 'oneconnect_transformations', 'request_chunking', 'response_chunking', 'enforcement', 'sflow' ] @property def insert_xforwarded_for(self): if self._values['insert_xforwarded_for'] is None: return None elif self._values['insert_xforwarded_for'] == 'enabled': return 'yes' return 'no' @property def hsts_mode(self): if self._values['hsts_mode'] is None: return None elif self._values['hsts_mode'] == 'enabled': return 'yes' return 'no' @property def include_subdomains(self): if self._values['include_subdomains'] is None: return None elif self._values['include_subdomains'] == 'enabled': return 'yes' return 'no' @property def maximum_age(self): if self._values['maximum_age'] is None: return None if self._values['maximum_age'] == 4294967295: return 'indefinite' return int(self._values['maximum_age']) @property def truncated_redirects(self): result = flatten_boolean(self._values['truncated_redirects']) return result @property def max_header_count(self): if self._values['max_header_count'] is None: return None if self._values['max_header_count'] == 64: return 'default' return str(self._values['max_header_count']) @property def max_header_size(self): if self._values['max_header_size'] is None: return None if self._values['max_header_size'] == 32768: return 'default' return str(self._values['max_header_size']) @property def max_requests(self): if self._values['max_requests'] is None: return None if self._values['max_requests'] == 0: return 'default' return str(self._values['max_requests']) @property def known_methods(self): defaults = ['CONNECT', 'DELETE', 'GET', 'HEAD', 'LOCK', 'OPTIONS', 'POST', 'PROPFIND', 'PUT', 'TRACE', 'UNLOCK'] known = self._values['known_methods'] if known is None: return None if not known: return [''] if set(known) == set(defaults): return ['default'] if set(known).issuperset(set(defaults)): result = [item for item in known if item not in defaults] result.append('default') return result return known @property def enforcement(self): to_filter = dict( excess_client_headers=self._values['excess_client_headers'], excess_server_headers=self._values['excess_server_headers'], known_methods=self.known_methods, max_header_count=self.max_header_count, max_header_size=self.max_header_size, max_requests=self.max_requests, oversize_client_headers=self._values['oversize_client_headers'], oversize_server_headers=self._values['oversize_server_headers'], pipeline=self._values['pipeline'], truncated_redirects=self.truncated_redirects, unknown_method=self._values['unknown_method'] ) result = self._filter_params(to_filter) if result: return result @property def accept_xff(self): result = flatten_boolean(self._values['accept_xff']) return result @property def oneconnect_transformations(self): result = flatten_boolean(self._values['oneconnect_transformations']) return result @property def sflow(self): to_filter = dict( poll_interval=self._values['poll_interval'], poll_interval_global=self._values['poll_interval_global'], sampling_rate=self._values['sampling_rate'], sampling_rate_global=self._values['sampling_rate_global'], ) result = self._filter_params(to_filter) if result: return result class Difference(object): def __init__(self, want, have=None): self.want = want self.have = have def compare(self, param): try: result = getattr(self, param) return result except AttributeError: return self.__default(param) def __default(self, param): attr1 = getattr(self.want, param) try: attr2 = getattr(self.have, param) if attr1 != attr2: return attr1 except AttributeError: return attr1 @property def parent(self): if self.want.parent != self.have.parent: raise F5ModuleError( "The parent http profile cannot be changed" ) @property def dns_resolver(self): if self.want.dns_resolver is None: return None if self.want.dns_resolver == '': if self.have.dns_resolver is None or self.have.dns_resolver == 'none': return None elif self.have.proxy_type == 'explicit' and self.want.proxy_type is None: raise F5ModuleError( "DNS resolver cannot be empty or 'none' if an existing profile proxy type is set to {0}.".format(self.have.proxy_type) ) elif self.have.dns_resolver is not None: return self.want.dns_resolver if self.have.dns_resolver is None: return self.want.dns_resolver @property def header_erase(self): if self.want.header_erase is None: return None if self.want.header_erase in ['none', '']: if self.have.header_erase in [None, 'none']: return None if self.want.header_erase != self.have.header_erase: return self.want.header_erase @property def header_insert(self): if self.want.header_insert is None: return None if self.want.header_insert in ['none', '']: if self.have.header_insert in [None, 'none']: return None if self.want.header_insert != self.have.header_insert: return self.want.header_insert @property def server_agent_name(self): if self.want.server_agent_name is None: return None if self.want.server_agent_name in ['none', '']: if self.have.server_agent_name in [None, 'none']: return None if self.want.server_agent_name != self.have.server_agent_name: return self.want.server_agent_name @property def encrypt_cookies(self): if self.want.encrypt_cookies is None: return None if self.have.encrypt_cookies in [None, []]: if not self.want.encrypt_cookies: return None else: return self.want.encrypt_cookies if set(self.want.encrypt_cookies) != set(self.have.encrypt_cookies): return self.want.encrypt_cookies @property def encrypt_cookie_secret(self): if self.want.encrypt_cookie_secret != self.have.encrypt_cookie_secret: if self.want.update_password == 'always': result = self.want.encrypt_cookie_secret return result @property def xff_alternative_names(self): result = cmp_simple_list(self.want.xff_alternative_names, self.have.xff_alternative_names) return result @property def fallback_status_codes(self): result = cmp_simple_list(self.want.fallback_status_codes, self.have.fallback_status_codes) return result @property def known_methods(self): result = cmp_simple_list(self.want.known_methods, self.have.known_methods) return result class ModuleManager(object): def __init__(self, *args, **kwargs): self.module = kwargs.get('module', None) self.client = F5RestClient(**self.module.params) self.want = ModuleParameters(params=self.module.params) self.have = ApiParameters() self.changes = UsableChanges() def _set_changed_options(self): changed = {} for key in Parameters.returnables: if getattr(self.want, key) is not None: changed[key] = getattr(self.want, key) if changed: self.changes = UsableChanges(params=changed) def _update_changed_options(self): diff = Difference(self.want, self.have) updatables = Parameters.updatables changed = dict() for k in updatables: change = diff.compare(k) if change is None: continue else: if isinstance(change, dict): changed.update(change) else: changed[k] = change if changed: self.changes = UsableChanges(params=changed) return True return False def should_update(self): result = self._update_changed_options() if result: return True return False def exec_module(self): changed = False result = dict() state = self.want.state if state == "present": changed = self.present() elif state == "absent": changed = self.absent() reportable = ReportableChanges(params=self.changes.to_return()) changes = reportable.to_return() result.update(**changes) result.update(dict(changed=changed)) self._announce_deprecations(result) return result def _announce_deprecations(self, result): warnings = result.pop('__warnings', []) for warning in warnings: self.client.module.deprecate( msg=warning['msg'], version=warning['version'] ) def present(self): if self.exists(): return self.update() else: return self.create() def absent(self): if self.exists(): return self.remove() return False def update(self): self.have = self.read_current_from_device() if not self.should_update(): return False if self.module.check_mode: return True self.update_on_device() return True def remove(self): if self.module.check_mode: return True self.remove_from_device() if self.exists(): raise F5ModuleError("Failed to delete the resource.") return True def create(self): self._set_changed_options() if self.module.check_mode: return True self.create_on_device() return True def exists(self): uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError: return False if resp.status == 404 or 'code' in response and response['code'] == 404: return False return True def create_on_device(self): params = self.changes.api_params() params['name'] = self.want.name params['partition'] = self.want.partition uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http/".format( self.client.provider['server'], self.client.provider['server_port'] ) resp = self.client.api.post(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] in [400, 403, 404]: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return response['selfLink'] def update_on_device(self): params = self.changes.api_params() uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) resp = self.client.api.patch(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] in [400, 404]: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) def remove_from_device(self): uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) response = self.client.api.delete(uri) if response.status == 200: return True raise F5ModuleError(response.content) def read_current_from_device(self): uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return ApiParameters(params=response) class ArgumentSpec(object): def __init__(self): self.supports_check_mode = True self.chunk = ['rechunk', 'selective', 'preserve'] self.choices = ['pass-through', 'reject'] self.select = ['allow', 'pass-through', 'reject'] argument_spec = dict( name=dict(required=True), parent=dict(default='/Common/http'), description=dict(), accept_xff=dict(type='bool'), xff_alternative_names=dict(type='list'), fallback_host=dict(), fallback_status_codes=dict(type='list'), oneconnect_transformations=dict(type='bool'), request_chunking=dict(choices=self.chunk), response_chunking=dict(choices=self.chunk), proxy_type=dict( choices=[ 'reverse', 'transparent', 'explicit' ] ), dns_resolver=dict(), insert_xforwarded_for=dict(type='bool'), redirect_rewrite=dict( choices=[ 'none', 'all', 'matching', 'nodes' ] ), encrypt_cookies=dict(type='list'), encrypt_cookie_secret=dict(no_log=True), update_password=dict( default='always', choices=['always', 'on_create'] ), header_erase=dict(), header_insert=dict(), server_agent_name=dict(), hsts_mode=dict(type='bool'), maximum_age=dict(), include_subdomains=dict(type='bool'), enforcement=dict( type='dict', options=dict( truncated_redirects=dict(type='bool'), excess_client_headers=dict(choices=self.choices), excess_server_headers=dict(choices=self.choices), oversize_client_headers=dict(choices=self.choices), oversize_server_headers=dict(choices=self.choices), pipeline=dict(choices=self.select), unknown_method=dict(choices=self.select), max_header_count=dict(), max_header_size=dict(), max_requests=dict(), known_methods=dict(type='list'), ) ), sflow=dict( type='dict', options=dict( poll_interval=dict(type='int'), poll_interval_global=dict(type='bool'), sampling_rate=dict(type='int'), sampling_rate_global=dict(type='bool'), ) ), state=dict( default='present', choices=['present', 'absent'] ), partition=dict( default='Common', fallback=(env_fallback, ['F5_PARTITION']) ) ) self.argument_spec = {} self.argument_spec.update(f5_argument_spec) self.argument_spec.update(argument_spec) def main(): spec = ArgumentSpec() module = AnsibleModule( argument_spec=spec.argument_spec, supports_check_mode=spec.supports_check_mode, ) try: mm = ModuleManager(module=module) results = mm.exec_module() module.exit_json(**results) except F5ModuleError as ex: module.fail_json(msg=str(ex)) if __name__ == '__main__': main()
Brazilian-based International Meal Co. has purchased a dozen Margaritaville restaurants and plans to grow the chain. Margaritaville Enterprises sold the restaurants this month for an undisclosed price. Margaritaville's Orlando-based hospitality division ran the restaurants, employing about 40 corporate employees here. About 25 of those employees will now work for International Meal Co., while the rest will remain with Margaritaville and handle services such as marketing. No workers are losing their jobs, the companies said. "Not only does it stand for more growth domestically, now those employees have the opportunity to go around the world as part of a larger operation," Margaritaville Enterprises Chief Executive Officer John Cohlan said. International Meal Co. Chief Executive Officer Javier Gavilián said he is considering moving a Miami office with 15 employees to Orlando as well. The purchase includes 11 Margaritaville restaurants, including one at Universal CityWalk. It also includes LandShark Bar and Grilles in Atlantic City, N.J., and Myrtle Beach, S.C. "The motivation here was really about growth," Cohlan said. "They're better positioned to grow Margaritaville than we really would be domestically." Cohlan and Gavilian declined to discuss sales for the chain, founded by singer Jimmy Buffett and named after one of his signature songs. International Meal Co. will have exclusive rights to develop and own Margaritavilles in the United States and Latin America. This year, Margaritaville will open restaurants in Brazil and the Dominican Republic and in Pigeon Forge, Tenn. Gavilián said he thinks U.S. airports hold a lot of promise for the brand. International Meal Co. is no stranger to Central Florida businesses. It had already operated Margaritaville franchises in Puerto Rico and Panama. Last year, it signed an agreement with Orlando-based Darden Restaurants to operate Red Lobsters, LongHorn Steakhouses and Olive Gardens in Latin America.
# coding:utf-8 import sys reload(sys) sys.setdefaultencoding('utf8') # __author__ = '郭 璞' # __date__ = '2016/10/24' # __Desc__ = 翻墙助手, 默认会先备份一下当前的hosts文件,防止出现意外,另外可以跨平台使用 import platform import os import urllib2 def downloadHosts(url): file = open('./hosts.txt', 'wb') data = urllib2.urlopen(url).readlines() file.writelines(data) file.close() def crosswall(systemtype='Window'): try: if systemtype == 'Windows': os.system('copy %SystemRoot%\System32\drivers\etc\hosts %SystemRoot%\System32\drivers\etc\hosts_bak') os.system('copy hosts.txt %SystemRoot%\System32\drivers\etc\hosts') os.system('ipconfig /flushdns') os.system('pause') print 'It\'s done on Windows! And Try your browser!' elif systemtype == "Linux": os.system('cp /etc/hosts /etc/hosts_bak') os.system('mv ./hosts.txt /etc/hosts') os.system('pause') os.system('sudo /etc/init.d/networking restart ') print 'It\'s done on Linux! And Try your browser!' except Exception as e: print e if __name__ == '__main__': url = 'https://raw.githubusercontent.com/racaljk/hosts/master/hosts' downloadHosts(url=url) print 'Hosts update success!' crosswall(platform.system()) print 'Hosts replaced success! Try to cross the wall!'
Just think. The mighty Ragnaros, elemental lord of fire, gets banished after it takes 40 people to destroy him in the Molten Core. After three expansions (which is plenty of time to get back to health), he is restored and energized with Deathwing's power. Then he hits you with this spell and you take 5 damage. Wow. Inflicts 5 Fire damage to an enemy.
import abc from nala.structures.data import Edge from nltk.stem import PorterStemmer class EdgeGenerator: """ Abstract class for generating edges between two entities. Each edge represents a possible relationship between the two entities Subclasses that inherit this class should: * Be named [Name]EdgeGenerator * Implement the abstract method generate * Append new items to the list field "edges" of each Part in the dataset """ @abc.abstractmethod def generate(self, dataset): """ :type dataset: nala.structures.data.Dataset """ return class SimpleEdgeGenerator(EdgeGenerator): """ Simple implementation of generating edges between the two entities if they are contained in the same sentence. Implements the abstract class EdgeGenerator. :type entity1_class: str :type entity2_class: str :type relation_type: str """ def __init__(self, entity1_class, entity2_class, relation_type): self.entity1_class = entity1_class self.entity2_class = entity2_class self.relation_type = relation_type def generate(self, dataset): from itertools import product, chain for part in dataset.parts(): part.edges = [] for ann_1, ann_2 in product( (ann for ann in chain(part.annotations, part.predicted_annotations) if ann.class_id == self.entity1_class), (ann for ann in chain(part.annotations, part.predicted_annotations) if ann.class_id == self.entity2_class)): index_1 = part.get_sentence_index_for_annotation(ann_1) index_2 = part.get_sentence_index_for_annotation(ann_2) if index_1 == index_2 and index_1 != None: part.edges.append( Edge(ann_1, ann_2, self.relation_type, part.sentences[index_1], index_1, part)) class WordFilterEdgeGenerator(EdgeGenerator): """ Simple implementation of generating edges between the two entities if they are contained in the same sentence. Implements the abstract class EdgeGenerator. :type entity1_class: str :type entity2_class: str :type relation_type: str """ def __init__(self, entity1_class, entity2_class, words): self.entity1_class = entity1_class self.entity2_class = entity2_class self.relation_type = relation_type self.words = words def generate(self, dataset): from itertools import product for part in dataset.parts(): for ann_1, ann_2 in product( (ann for ann in part.annotations if ann.class_id == self.entity1_class), (ann for ann in part.annotations if ann.class_id == self.entity2_class)): index_1 = part.get_sentence_index_for_annotation(ann_1) index_2 = part.get_sentence_index_for_annotation(ann_2) if index_1 == index_2 and index_1 != None: for token in part.sentences[index1]: if token.word in self.words: part.edges.append( Edge(ann_1, ann_2, self.relation_type, part.sentences[index_1], index_1, part))
A man lives alone for many years, in a lonely house in the hills. Houses start encroaching on his land, building up around him, closer and closer until they build on both sides, then in his back yard, then in front. His door is sealed, there are walls all around and no way out. I try to get him out but can't. With an old-fashioned razor, he cuts through the wall into the next house, through that room's door, and on, to the outside. Free. But whenever he sees walls, doors, anything reminding him of his prison-room, he cuts it. He is quite mad. People remind him, so he cuts people, too. They die. But he is reincarnated in a factory as a rubbery gray gas: he lurks in every closed space, and when any of them are opened, a gray smoke boils out. A gray smoke--with a razor, slashing blind and wild. The killing goes on. We call in an exorcist. But there's a problem with banishing this malevolent spirit. He's not an invader. This is his home. He was here before all the rest--how can we drive out his spirit, when he has the prior and the deepest claim, mad and dead though he is? As the priest begins the rite anyway, a rite I'm not sure will work, a rite I'm not sure is right, I wake. This is the question--even if it's mad and dead, CAN I exorcise the real, original me? Do I have the strength? And even if I do... do I have the right? My childhood home was full of black widow spiders--big, fast, aggressive, and poisonous. To this day, I never open dark, sealed spaces casually--I always check before reaching in, just as desert folks check their shoes for scorpions in the morning. Also... I didn't know it when I dreamed this, but when I was very small my parents regularly took me along to visit my uncle inside one of the worst madhouses in California. He escaped, was caught, was given shock. Finally he was released. He lived with us a while when I was two or three, and he told me of electroshock and other tortures. So I grew up hiding all sorts of feelings out of fear they'd lock me up too and torture me too, if I didn't conform. I repressed more and more, the repressed me got desperate, vicious... a ghost of itself. And this dream suggests that at some point your true, original self can die, become purely destructive. And then you have to banish it, and start over with what you've got--what's living. Not an easy or pleasant decision to make. And especially difficult in a culture where psychology emphasizes accepting, integrating and re-incorporating lost and alienated parts. I only noticed just now that I drew the ghost with a knife, not a razor--and a peculiar knife at that. It's an ATHELM--a sharp, triangular, strictly ritual dagger used in some witch rites, symbolizing the male principle, reason/logic, winter, the north... the Sword principle, in the Tarot. Jung calls it the thinking orientation, as opposed to feeling, intuition and sensation. What's all this arcane stuff mean? It means my dream tried to tell me just what this mad part is. I analyze things to death--cut them up cruelly.
import MySQLdb import os import sys import urlparse def get_db_variables(): urlparse.uses_netloc.append("mysql") try: # Check to make sure DATABASES is set in settings.py file. # If not default to {} if 'DATABASES' not in locals(): DATABASES = {} if 'CLEARDB_DATABASE_URL' in os.environ: url = urlparse.urlparse(os.environ['CLEARDB_DATABASE_URL']) # Ensure default database exists. DATABASES['default'] = DATABASES.get('default', {}) # Update with environment configuration. DATABASES['default'].update({ 'NAME': url.path[1:], 'USER': url.username, 'PASSWORD': url.password, 'HOST': url.hostname, 'PORT': url.port, }) if url.scheme == 'mysql': DATABASES['default']['ENGINE'] = 'django.db.backends.mysql' except Exception: print 'Unexpected error:', sys.exc_info() return DATABASES def str_for_mysql(s): if isinstance(s, basestring): s = s.replace("'", "''") # Add any more string formatting steps here return s def date_for_mysql(d): d = d.strftime("%Y-%m-%d %H:%M") # Add any more date formatting steps here return d class DB(object): conn = None def connect(self): db_params = get_db_variables() self.conn = MySQLdb.connect(db_params['default']['HOST'], db_params['default']['USER'], db_params['default']['PASSWORD'], db_params['default']['NAME'], charset='utf8') print "DB >> Opened connection to database." def query(self, sql): try: cursor = self.conn.cursor() cursor.execute(sql) except (AttributeError, MySQLdb.OperationalError): self.connect() cursor = self.conn.cursor() cursor.execute(sql) return cursor def commit(self): try: self.conn.commit() except (AttributeError, MySQLdb.OperationalError): self.connect() self.conn.commit() def close(self): try: self.conn.close() print "DB >> Closed connection to database." except (AttributeError, MySQLdb.OperationalError): pass
Combine the thyme, shallots, garlic and three tablespoons of olive oil. Stand one hour. Preheat the oven to 160 degC. Halve the tomatoes lengthwise and place, cut side down, in a shallow baking pan. Drizzle with the remaining oil and sprinkle with salt and pepper. Roast for 20 minutes. Carefully flip over and dot the thyme mixture over the top. Continue roasting for 15 to 20 minutes.