text
stringlengths
29
850k
import os import shutil import getopt import sys inputDir = ".\\input" outputDir = ".\\output" inputFile = ".\\result.txt" modules = {} basicblocks = {} #Conf def help(): print "Possible arguments: GenBpFiles.py [-h] [-d DIR] [-o FILE]" print " -h Prints this message to you" print " -d DIR Directory that contains basicblocks files" print " -i FILE File that contains basicblocks to remove" print " -o DIR Result directory" try: opts, args = getopt.getopt(sys.argv[1:], "hd:i:o:", []) except: help() sys.exit() for opt, arg in opts: if opt in("-h"): help() sys.exit() if opt in("-d"): inputDir = arg if opt in("-i"): inputFile = arg if opt in("-o"): outputDir = arg #input file print "Reading input file %s" % inputFile f = open(inputFile) #module list line = f.readline() modules = {} while line != "" and line[2] != "|": moduleName = line[:line.find("|")] moduleCode = line[line.find("|")+1:line.find("|")+3] modules[moduleCode] = moduleName if moduleName not in basicblocks: basicblocks[moduleName] = {} line = f.readline() #basicblock while line.strip() != "": moduleCode = line[0:2] bb = line[3:11] moduleName = modules[moduleCode] if bb not in basicblocks[moduleName]: basicblocks[moduleName][bb] = 1 else: basicblocks[moduleName][bb] += 1 line = f.readline() f.close() #Modifying basicblocks if not os.path.isdir(outputDir): os.makedirs(outputDir) for fname in os.listdir(inputDir): f = open(inputDir + "/" + fname) moduleLine = f.readline() module = moduleLine.strip().lower() if len(basicblocks[module]) == 0: print "File %s remains unchanged" % fname f.close() shutil.copy2(inputDir + "/" + fname, outputDir + "/" + fname) continue print "Modifying %s" % fname #basicblock fout = open(outputDir + "/" + fname, "w") fout.write(moduleLine) line = f.readline() while line.strip() != "": bb = line[0:8] if bb not in basicblocks[module]: fout.write(line) line = f.readline() f.close() fout.close()
Hosted by Wendy Clymer, 916-524-3730. Closed to downtown Sacramento, freeways, hotels, shopping centers and parks. Mixed use commercial lot next to a busy street. Great investment property. Don't miss it.
# -*- coding: utf-8 -*- """ AlexNet. References: - Alex Krizhevsky, Ilya Sutskever & Geoffrey E. Hinton. ImageNet Classification with Deep Convolutional Neural Networks. NIPS, 2012. - [AlexNet Paper](http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks.pdf) """ #%% import modules from __future__ import division, print_function, absolute_import import tflearn from tflearn.layers.core import input_data, dropout, fully_connected from tflearn.layers.conv import conv_2d, max_pool_2d from tflearn.layers.normalization import local_response_normalization from tflearn.layers.estimator import regression #%% Building 'AlexNet' def alexnet(n_rows, n_cols, lr=0.001): network = input_data(shape=[None, n_rows, n_cols, 1], name='input') network = conv_2d(network, 96, 11, strides=4, activation='relu') network = max_pool_2d(network, 3, strides=2) network = local_response_normalization(network) network = conv_2d(network, 256, 5, activation='relu') network = max_pool_2d(network, 3, strides=2) network = local_response_normalization(network) network = conv_2d(network, 384, 3, activation='relu') network = conv_2d(network, 384, 3, activation='relu') network = conv_2d(network, 256, 3, activation='relu') network = max_pool_2d(network, 3, strides=2) network = local_response_normalization(network) network = fully_connected(network, 2048, activation='tanh') network = dropout(network, 0.5) network = fully_connected(network, 2048, activation='tanh') network = dropout(network, 0.5) network = fully_connected(network, 4, activation='softmax') network = regression(network, optimizer='momentum', loss='categorical_crossentropy', learning_rate=lr, name='target') model = tflearn.DNN(network, checkpoint_path='model_alexnet', max_checkpoints=1, tensorboard_verbose=2, tensorboard_dir='logs') # model.fit(X, Y, n_epoch=1000, validation_set=0.1, shuffle=True, show_metric=True, # batch_size=64, snapshot_step=200, snapshot_epoch=False, run_id='alexnet') return model
4th, The Hub Mall, Western Express Highway, Goregaon(East), Mumbai, India. We appreciate your feedback, suggestions and concerns. Please fill in the form, and send it to us. We will address it as soon as we can.
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from dataclasses import dataclass, field from fairseq.data.encoders import register_tokenizer from fairseq.dataclass import FairseqDataclass @dataclass class MosesTokenizerConfig(FairseqDataclass): source_lang: str = field(default="en", metadata={"help": "source language"}) target_lang: str = field(default="en", metadata={"help": "target language"}) moses_no_dash_splits: bool = field( default=False, metadata={"help": "don't apply dash split rules"} ) moses_no_escape: bool = field( default=False, metadata={"help": "don't perform HTML escaping on apostrophe, quotes, etc."}, ) @register_tokenizer("moses", dataclass=MosesTokenizerConfig) class MosesTokenizer(object): def __init__(self, cfg: MosesTokenizerConfig): self.cfg = cfg try: from sacremoses import MosesTokenizer, MosesDetokenizer self.tok = MosesTokenizer(cfg.source_lang) self.detok = MosesDetokenizer(cfg.target_lang) except ImportError: raise ImportError( "Please install Moses tokenizer with: pip install sacremoses" ) def encode(self, x: str) -> str: return self.tok.tokenize( x, aggressive_dash_splits=(not self.cfg.moses_no_dash_splits), return_str=True, escape=(not self.cfg.moses_no_escape), ) def decode(self, x: str) -> str: return self.detok.detokenize(x.split())
a boy or man; "that chap is your host"; "there's a fellow at the door"; "he's a likable cuss"; "he's a good bloke" crack due to dehydration; "My lips chap in this dry weather" The Chap are an experimental pop band from London. Their music is a mix of rock and pop with small quantities of almost every other genre thrown in for good measure. They are known for their energetic live shows which often end with exploding violins and cellos. (CHAPS) The Clearing House Automated Payment System or CHAPS is a British company established in London in 1984, which offers same-day sterling and euro fund transfers. CHAPS is a member of the trade organisation APACS, and the EU-area settlement system TARGET. (CHAPS (health organisation)) CHAPS (formerly the Community HIV/AIDS Prevention Strategy) is a partnership of UK gay men's health promotion organisations. It is currently funded to operate in England and Wales by the Department of Health and is administered by Terrence Higgins Trust. (Chaps) Pronounced "shaps," these are leggings worn by cowboys as protection against the weather and brush, and are usually made of leather. (Chaps) Long leather leggings worn by cowboys over their pants for protection against cactus and other range plants. (CHAPS) A real time funds clearing system used to process payments. (CHAPS) An electronic transfer of money from the factor to the client. This method will deliver cleared funds to your account on the day of the transfer. The charge for this transfer varies between companies but expect to pay in the region of £30 plus VAT for each transfer. (CHAPS) Clearing House Automated Payment System. Another electronic means by which you can receive money from your Factor. Same day transfer & clearance of funds. There is a charge for this service & it varies from Factor to Factor so make sure & ask how much. (CHAPS) an electronic payment system that guarantees same day payment. (Chaps) Leather leggings worn over the jeans from hips to boots. They are an aid in protecting the legs from brush on the ranch and in keeping the rider’s seat firmly in the saddle. (Chaps) Motorcycle clothing accessory designed for leg protection. They’re usually made of leather and are fastened around the waist, with an open back. They snap at the ankles and zip down the legs.
''' Created on 2013-11-05 @author: nicolas ''' import MessageField import Exception class Field(MessageField.Field): ''' This subclass of A429MsgField is part of an ensemble of classes that can be used as an utility for packing and unpacking A429 messages. LabelField is more specifically dedicated to managing bits in discrete ARINC 429 messages. ''' def __repr__(self): if self._value is not None: return '<%s.%s object at 0x%x, value %s [%s]>'%(self.__module__, self.__class__.__name__, id(self), str(self._value), repr(MessageField.Field)) else: return '<%s.%s object at 0x%x [%s]>'%(self.__module__, self.__class__.__name__, id(self), repr(MessageField.Field)) def __init__(self,bitIndex,bitName,meaningWhenSet,meaningWhenNotSet): ''' Simply declare a 1 bit field at the specified position Note: LSB index is 1 ''' MessageField.Field.__init__(self,bitIndex, 1,bitName) self._value = None self._meaningWhenSet = meaningWhenSet self._meaningWhenNotSet = meaningWhenNotSet def is_data_set(self): return self._value is not None def setData(self,bitValue): ''' set the bit value This function expect the bit value passed as a boolean ''' if type(bitValue) != type(bool()): raise Exception.A429Exception('Bit are expected as bool') else: self._value = bitValue def getData(self): ''' get the bit value ''' if self._value is None: raise Exception.A429NoData(self.name) else: return self._value def clear(self): ''' Clear the label value ''' self._value = None def pack(self): ''' Return the 32 bits word corresponding to an A429 message with the bit data (all other bits at zero) ''' if self._value is None: raise Exception.A429NoData(self.name) else: return MessageField.Field.pack(self,int(self._value)) def unpack(self,A429word): """ set the bit value given a 32 bit ARINC 429 message value """ self._value = bool(MessageField.Field.unpack(self,A429word)) def __eq__(self, other): ''' Define the == operator to compare field definition AND parity convention ''' if isinstance(other, Field): return self.__dict__ == other.__dict__ else: return NotImplemented def __ne__(self, other): ''' Define the != operator to compare field definition AND parity convention ''' result = self.__eq__(other) ''' Define the != operator to compare size, lsb and name ''' if result is NotImplemented: return result return not result def serialize(self, stream, serializeState = False , parentElement = None): ''' Serialize field to XML ''' from xml.etree.ElementTree import Element, SubElement, Comment, ElementTree fieldElement = super(Field,self).serialize(stream,serializeState,parentElement) fieldElement.set('type',__name__) fieldElement.set('meaningWhenSet', self._meaningWhenSet) fieldElement.set('meaningWhenNotSet', self._meaningWhenNotSet) if serializeState: fieldElement.text = str(self._value) return fieldElement
The single-fanged Enyo sports good firepower capability, a missile hardpoint and some extremely strong armor plating, making it one of the best support frigates out there. Ideal for use as point ships to draw enemy fire from more vulnerable friendlies. Unlike most Gallente ship manufacturers, Roden Shipyards tend to favor missiles over drones and their ships generaly possess stronger armor. Their electronics capacity, however, tends to be weaker than ships from their competitors. Specialized as a frigate-class drone carrier, the Ishkur carries less in the way of firepower than most other Gallente gunboats. With a fully stocked complement of drones and a skilled pilot, however, no one should make the mistake of thinking this vessel easy prey. As the largest drone developer and manufacturer in space, CreoDron has a vested interest in drone carriers. While sacrificing relatively little in the way of defensive capability, the Ishkur can chew its way through surprisingly strong opponents - provided, of course, that the pilot uses top-of-the-line CreoDron drones.
from django.core.exceptions import ImproperlyConfigured from rest_framework.permissions import BasePermission SAFE_METHODS = ['GET', 'HEAD', 'OPTIONS'] class OAuth2ScopePermission(BasePermission): """ Make sure request is authenticated and token has right scope set. """ def has_permission(self, request, view): token = request.auth read_only = request.method in SAFE_METHODS if not token: return False if hasattr(token, 'scope'): scopes = self.get_scopes(request, view) if scopes['required'] is not None: is_valid = token.is_valid(scopes['required']) if not is_valid: return False else: # View did not define any required scopes is_valid = False # Check for method specific scopes if read_only: if scopes['read'] is not None: return token.is_valid(scopes['read']) else: if scopes['write'] is not None: return token.is_valid(scopes['write']) return is_valid assert False, ('OAuth2ScopePermission requires the ' '`oauth_api.authentication.OAuth2Authentication` ' 'class to be used.') def get_scopes(self, request, view): required = getattr(view, 'required_scopes', None) read = getattr(view, 'read_scopes', None) write = getattr(view, 'write_scopes', None) if not required and not read and not write: raise ImproperlyConfigured( 'OAuth protected resources requires scopes. Please add required_scopes, read_scopes or write_scopes.' ) return { 'required': required, 'read': read, 'write': write, }
Let us know if you have a suggestion for the parking lot information on this page. Precise ParkLink is a leader in the Canadian Parking Industry. With 30 years of experience in the Canadian parking market, they have become a complete turnkey parking solution as they bridge the gap between parking operations, parking equipment, parking construction and maintenance, and parking revenue financial services- a claim no other Canadian parking organization can make. Ready To Learn More About Our Solutions? © 2019 Precise ParkLink Inc. All rights reserved.
#!/usr/bin/env python import sys import string import commands import os from optparse import OptionParser import glob base_dir = os.getcwd() exe_base = "/afs/cern.ch/user/x/xju/work/upsilon/code/MyXAODTools/bsubs/monojet/" def check_dir(dir_): if not os.path.exists(dir_): os.mkdir(dir_) def submit(exe, out_log_name): print "executable:", exe print "log file:", out_log_name bad_jobs = 0 good_jobs = 0 input_dir = base_dir + "/split_and_merge/" input_all = glob.glob(input_dir+"x*") check_dir(base_dir+"/histograms/") for input_name in input_all: out_name = base_dir+"/histograms/merged_"+os.path.basename(input_name)+"_hist.root" run_cmd = exe + " " +input_name+" "+out_name bsubs_cmd = "bsub -q wisc -R 'pool>4000' -C 0 -o " + \ base_dir+ "/"+ out_log_name+" "+run_cmd #print bsubs_cmd status,output=commands.getstatusoutput(bsubs_cmd) if status != 0: bad_jobs += 1 else: good_jobs += 1 print "Good jobs: "+ str(good_jobs)+", "+str(bad_jobs)+" failed!" def submit_tree(exe, out_log_name): print "executable:", exe print "log file:", out_log_name bad_jobs = 0 good_jobs = 0 input_dir = base_dir + "/histograms/" input_all = glob.glob(input_dir+"merged*") for input_name in input_all: out_name = base_dir+"/histograms/hist_qcd_"+os.path.basename(input_name) run_cmd = exe + " " +input_name+" "+out_name bsubs_cmd = "bsub -q wisc -R 'pool>4000' -C 0 -o " + \ base_dir+ "/"+ out_log_name+" "+run_cmd #print bsubs_cmd status,output=commands.getstatusoutput(bsubs_cmd) if status != 0: bad_jobs += 1 else: good_jobs += 1 print "Good jobs: "+ str(good_jobs)+", "+str(bad_jobs)+" failed!" if __name__ == "__main__": usage = "%prog log_name" parser = OptionParser(description="submit jobs for monojet", usage=usage) parser.add_option("--new_file", dest="new_file", default=False, action="store_true", help="create new file") parser.add_option("--read_ntuple", dest="read_ntuple", default=False, action="store_true", help="read ntuple produced by jetsmearing") (options,args) = parser.parse_args() if len(args) < 1: parser.print_help() exit(1) out_log_name = args[0] if options.new_file: exe = exe_base+"run_jetsmearing.sh" submit(exe, out_log_name) elif options.read_ntuple: exe = exe_base+"run_read_minitree.sh" submit_tree(exe, out_log_name) else: parser.print_help() exit(2)
Whenever people imagine "renting", a majority think about more compact housing—such as apartments and townhouses, which usually tend not to provide tenants with their own identity. Any person moving to 69165, however, will want to recognize that there will also be houses for rent in the area. A wide array of very good reasons exist for looking at houses for rent in 69165—in contrast to apartment units or even buying a home—and by making use of some good search tools you`ll be able to quickly trim down a list of houses to discover the best place to fit your needs. A lot of people favor houses for rent in 69165 considering they provide you with a whole lot more space, compared to most apartments or condominiums. As rentals, they make it possible for tenants to have much more freedom and flexibility; namely the ability to leave after only six months to a year and never being held liable for serious maintenance and other fixes. Landlords or management companies are typically those that will handle maintenance and other fixes, but look over your lease agreement thoroughly to make sure there won`t be any altercation about who covers what in case something breaks. When you search in 69165, houses for rent are seen mostly in outer locations, versus in the heart of downtown. Nevertheless, you may sometimes discover good houses for rent downtown if you`re determined to be there. Houses for rent in 69165 give much more room compared to apartments or condominiums, for that reason they`re terrific for households with children and groups of multiple housemates. Whereas most forms of housing share some amenities and may or may not include an outdoor area, single homes sit independently from nearby properties and have their own facilities, for instance, laundry and maybe even a pool. Like most folks, you almost certainly wish to be near to 69165`s main attractions. There`s no need to worry, seeing that houses for rent in 69165 can be found closeby these areas too. Regardless of what your dream home`s prerequisites are—RentFinder helps you discover houses for rent in 69165 that satisfy them all. Are you a parent with young kids? RentFinder enables you to find properties near K-12 schools via the map on the sidebar of this page. Will a dog or cat be relocating in with you? The tool enables you to pinpoint pet-friendly locations too, and then you`ll be able to match up those locations with houses you might be thinking about. If it`s important to make sure that you are located in an area where it`s quick and easy to eat your fix of pizza, this web page`s right-hand section will help you do that too. Next, after finding a house for rent in 69165 that goes hand-in-hand with all your criteria, always go check out it ahead of giving any payment for it or signing a lease. You should also go check it out on your own, instead of just browsing web-based pictures of the property. As soon as you finally check out a property, scope out the overall condition it is kept in. The landlord is generally responsible for caring for it, so its present state will offer a hint as to how they will treat things if you are a lessee. Does it appear the home`s lawn has been given attention? Do you happen upon any peeling wallpaper or busted up walls? Do all of the appliances it comes with (washer and dryer, stove, freezer, or microwave oven) switch on? Do the front and back doors lock or bolt firmly? It is equally advisable to peek at the nearby homes on the street. No matter if the homes are not under the same landlord, how they are maintained can reflect on that location by and large. Undecided about the features you are in search of in a house for rent in 69165? Let RentFinder help you right this moment. Simply click the following button to get going on your search!
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Generates Spectrograms based on Raw data recorded with GNURadio """ import os import numpy as np import scipy as sp from scipy import signal from scipy.misc import imsave # While using sp.signal.specgram there are two fields that regard the FFT size: # * nfft: Length of the FFT used, if a zero padded FFT is desired. If None, # the FFT length is nperseg. Defaults to None. # * nperseg: Length of each segment. Defaults to None, but if window is str or # tuple, is set to 256, and if window is array_like, is set to the # length of the window. # Length of window segments to later avg. avg over 120FFTs = 0.768ms NFFT = 64 # Per file, 605 pics are generated, from which 10% is going to be used for test NUM_TRAIN_IMG = 545 NUM_TEST_IMG = 60 # TODO: change NFFT name? # 5e5 samples for 50ms time window COUNT = int(5e5) # TODO do I use this? DIR_PATH = os.path.join('..', '..', 'data', 'final_pu', 'with_dc') TRAIN_PATH = os.path.join(DIR_PATH, '..', '..', 'pic_set', 'train') TEST_PATH = os.path.join(DIR_PATH, '..', '..', 'pic_set', 'test') # plt.gray() # Count = 1.25 e 9 samples for 2500 pictures back to back # + 500 samples for overflow avoidance (not expected to use them) ############################################################################### # File naming convention ############################################################################### # The file will have the following name formating; FILE = "scn_{scn}_snr_{snr}.dat" # where scn is the Scenario under consideration, which will be taken from the # following structure: SCN = [scenario for scenario in range(10)] # SNR regards the Signal-to-noise ratio of the recorded signal, taking # values from the following structure SNR = ['-5', '-2_5', '0', '2_5', '5', '10', '15'] # TODO: I need to check first if the file to be analyzed exists, otherwise # this is pointless # Type of measurement TYPE = ['with_dc', 'no_dc'] # Check if the dirs for the images exists. If not, create ## Checking for train dir print("Checking for directories...") if not os.path.exists(TRAIN_PATH): print("Creating directory at ", os.path.realpath(TRAIN_PATH)) os.makedirs(TRAIN_PATH) ## Checking for test dir if not os.path.exists(TEST_PATH): print("Creating directory at ", os.path.realpath(TEST_PATH)) os.makedirs(TEST_PATH) for typ in TYPE: # Needed to locate the different type of measurement DIR_PATH = os.path.join('..', '..', 'data', 'final_pu', typ) for scn in SCN: ## Checking for class scenario directory ### Train TRAIN_SCN_PATH = os.path.join(TRAIN_PATH, 'scn_{}'.format(scn)) if not os.path.exists(TRAIN_SCN_PATH): print("Creating directory at ", os.path.realpath(TRAIN_SCN_PATH)) os.makedirs(TRAIN_SCN_PATH) ### Test TEST_SCN_PATH = os.path.join(TEST_PATH, 'scn_{}'.format(scn)) if not os.path.exists(TEST_SCN_PATH): print("Creating directory at ", os.path.realpath(TEST_SCN_PATH)) os.makedirs(TEST_SCN_PATH) for snr in SNR: AF = open(os.path.join(DIR_PATH, 'scn_{}_snr_{}.dat'.format(scn, snr)), 'rb') for j in range(605): # Number of spectrograms to generate for i in range(64): # af.seek(7700*i) # every sample has 4 bytes THIS ONE IS TOTALLY WRONG!!! # af.seek(7700*8*i, 0) # every sample has 4 bytes I DUNNO WHY THIS IS NOT THE SAME AS BELOW # From https://stackoverflow.com/questions/39834345/scipy-signal-spectrogram-output-not-as-expected # I got that # of segments = 1 + floor( (datalen - NFFT) / (NFFT - overlap)) # With: # * NFFT = 64 # # segments = 120 (to record around 50ms of data) # the datalen required is ~7700 samples # seek uses the offset in bytes, so offset = #samples * bytes per sample # Remember: here we are using samples of type np.complex64 # AF.seek(7700*8*i, 0) # every sample has 4 bytes I DUNNO WHY THIS IS NOT THE SAME AS BELOW # AF.seek(7700*8, 1) # every sample has 4 bytes THIS ONE WORKS # IMPORTANT! # Seek seems not to be necessary # print(AF.tell()) # in fromfile(...) the count includes the datatype, so no need of # multiplying samples times bytes per sample data = sp.fromfile(AF, dtype=sp.complex64, count=7700) # spectrogram(...) returns also the frequency bins and the times: # f, t, Sxx = signal.spectrogram(...) # but we won't use them _, _, Sxx = signal.spectrogram(data, fs=10e6, mode='magnitude', return_onesided=False, nperseg=NFFT, detrend=False, noverlap=0) # The spectrum will be reversed, so we shift it Sxx = sp.fftpack.fftshift(Sxx, axes=0) Sxx = 20 * np.log10(Sxx) avgd = np.average(Sxx, axis=1) if i == 0: stacked = np.array(avgd) else: stacked = np.vstack([stacked, avgd]) if j < NUM_TRAIN_IMG: imsave(os.path.join(TRAIN_SCN_PATH, 'image_{}.jpg'.format( j + NUM_TRAIN_IMG * SNR.index(snr) + NUM_TRAIN_IMG * len(SNR) * TYPE.index(typ))), stacked) else: imsave(os.path.join(TEST_SCN_PATH, 'image_{}.jpg'.format( (j-NUM_TRAIN_IMG) + NUM_TEST_IMG * SNR.index(snr) + NUM_TEST_IMG * len(SNR)* TYPE.index(typ))), stacked) AF.close() # The End
Cliff House, designed by Khosla Associates, is located in Kerala, India. It’s truly a dream holiday house with quite unusual design that many people will fall in love with. It features an asymmetrical angled roof and made of mix of concrete, polished cement, timber, and natural kota stone. The house features amazing views of the Arabian Sea coast and coconut plantation. The home covers an area of 1397 sqm and features everything needed to comfortable holidays with family and friends. The interiors are cozy and clean thanks to the fact that furniture and accessories are kept to a minimal. They are also very well connected with outdoor areas.
#!/usr/bin/python # -*- coding: utf-8 -*- #Project Details: This file is part of the final project of statistics #Team members : Eliana Osorio, Sebastian Idarraga, Hector F. Jimenez #File Details: Contains Colors, And Core functions # url: github.com/heticor915/UTP2016-1/IS512Statistics # License Details: # Copyright (C) 2016 Hector F. Jimenez S. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; Applies version 2 of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import core as core import sys, subprocess #if __name__=='__main__': #Print the banner while True: subprocess.call(['clear'],shell=False)#clean term core.banner() #Generate a Random banner core.menu() #Dmenu... opcion=raw_input("[:::]> ") #Casting the option entered by the user if(((opcion>'6') or (opcion<'1') ) and opcion!=''): #Validator...break the infinite while cicle core.usage(); #Print proper usage raw_input(); #Wait for enter break if (opcion!='\n'): opcion=int(opcion) core.operative(opcion) #Do the homework :P raw_input(); #Debug
Know Ansbach HS Class of 1948 graduates that are NOT on this List? Help us Update the 1948 Class List by adding missing names. More 1948 alumni from Ansbach HS have posted profiles on Classmates.com®. Click here to register for free at Classmates.com® and view other 1948 alumni. Missing some friends from AHS that graduated with you in 1948? Check the list below that shows the Ansbach class of '48.
import numpy as np import numpy.random as npr import matplotlib.pyplot as plt from pybasicbayes.util.text import progprint_xrange from pylds.models import DefaultLDS npr.seed(0) # Set parameters D_obs = 1 D_latent = 2 D_input = 1 T = 2000 # Simulate from one LDS truemodel = DefaultLDS(D_obs, D_latent, D_input) inputs = np.random.randn(T, D_input) data, stateseq = truemodel.generate(T, inputs=inputs) # Fit with another LDS input_model = DefaultLDS(D_obs, D_latent, D_input) input_model.add_data(data, inputs=inputs) # Fit a separate model without the inputs noinput_model = DefaultLDS(D_obs, D_latent, D_input=0) noinput_model.add_data(data) # Run the Gibbs sampler def update(model): model.resample_model() return model.log_likelihood() input_lls = [update(input_model) for _ in progprint_xrange(100)] noinput_lls = [update(noinput_model) for _ in progprint_xrange(100)] # Plot the log likelihoods plt.figure() plt.plot(input_lls, label="with inputs") plt.plot(noinput_lls, label="wo inputs") plt.xlabel('iteration') plt.ylabel('training likelihood') plt.legend() # Predict forward in time T_given = 1800 T_predict = 200 given_data= data[:T_given] given_inputs = inputs[:T_given] preds = \ input_model.sample_predictions( given_data, inputs=given_inputs, Tpred=T_predict, inputs_pred=inputs[T_given:T_given + T_predict]) # Plot the predictions plt.figure() plt.plot(np.arange(T), data, 'b-', label="true") plt.plot(T_given + np.arange(T_predict), preds, 'r--', label="prediction") ylim = plt.ylim() plt.plot([T_given, T_given], ylim, '-k') plt.xlabel('time index') plt.xlim(max(0, T_given - 200), T) plt.ylabel('prediction') plt.ylim(ylim) plt.legend() # Smooth the data input_ys = input_model.smooth(data, inputs) noinput_ys = noinput_model.smooth(data) plt.figure() plt.plot(data, 'b-', label="true") plt.plot(input_ys, 'r-', lw=2, label="with input") plt.xlabel("Time") plt.xlim(max(0, T_given-200), T) plt.ylabel("Smoothed Data") plt.legend() plt.show()
Listing of China Keyboard Cable Manufacturers & suppliers. All qualify products of keyboard cable made in China & Taiwan, connecting reliable Chinese manufacturers, suppliers, exporters, factories & contract manufacturing companies with global buyers. China manufacturers listings of keyboard cable in B2BChinaSources.com for global buyers. High quality and quality assurance. Keyboard cable supplied by Chinese manufacturers with the most competitive prices and superb quality. Listings will lead you right to crucial data such as company brief introduction business type, main products, target market, contact information, certifications and more. Keyboard Cable made by creditable manufacturers and you are welcome to Add Your Products Here to attract more potential clients if you happen to be doing business in the same field.
# GojiDNS - Developed by South Patron CC - http://www.southpatron.com/ # # This file is part of GojiDNS. # # GojiDNS is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # GojiDNS is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with GojiDNS. If not, see <http://www.gnu.org/licenses/>. from django.conf.urls import patterns, include, url from django.views.generic import TemplateView as TV, RedirectView as RV from django.contrib import admin admin.autodiscover() urlpatterns = patterns('goji.views.public', url( r'^$', 'index', name = 'goji-public-index' ), url( r'^v/login$', 'login', name = 'goji-public-login' ), url( r'^v/logout$', 'logout', name = 'goji-public-logout' ), url( r'^v/register$', 'register', name = 'goji-public-register' ), url( r'^v/authenticate$', 'authenticate', name = 'goji-public-authenticate' ), url( r'^v/resend_authentication$', 'resend_authentication', name = 'goji-public-resend-authentication' ), url( r'^v/reset_password$', 'reset_password', name = 'goji-public-reset-password' ), url( r'^v/confirm_email/(?P<code>\S+)$', 'confirm_email', name = 'goji-public-confirm-email-code' ), url( r'^v/confirm_email$', 'confirm_email', name = 'goji-public-confirm-email' ), url( r'^faq$', 'faq', name = 'goji-public-faq' ), url( r'^legal$', TV.as_view( template_name = 'pages/public/general/legal.html' ), name = 'goji-public-legal' ), url( r'^features$', TV.as_view( template_name = 'pages/public/general/features.html' ), name = 'goji-public-features' ), ) urlpatterns += patterns('goji.views.members', url( r'^members$','domain_list', name = 'goji-domain-list' ), url( r'^members/domain/(?P<domain>\S+)/resource/(?P<rid>\d+)/delete$', 'domain_resource_delete', name = 'goji-domain-resource-delete' ), url( r'^members/domain/(?P<domain>\S+)/resource/(?P<rid>\d+)$', 'domain_resource_edit', name = 'goji-domain-resource-edit' ), url( r'^members/domain/(?P<domain>\S+)/resource/add$', 'domain_resource_add', name = 'goji-domain-resource-add' ), url( r'^members/domain/(?P<domain>\S+)/edit$', 'domain_edit', name = 'goji-domain-edit' ), url( r'^members/domain/(?P<domain>\S+)$', 'domain', name = 'goji-domain' ), url( r'^members/domain_add$', 'domain_add', name = 'goji-domain-add' ), url( r'^members/domain_clone$', 'domain_clone', name = 'goji-domain-clone' ), url( r'^members/domain_delete/(?P<domain>\S+)$', 'domain_delete', name = 'goji-domain-delete' ), url( r'^members/profile$', 'profile', name = 'goji-profile' ), url( r'^members/network_status$', 'network_status', name = 'goji-network-status' ), url( r'^members/change_password$', 'change_password', name = 'goji-change-password' ), url( r'^members/contact-us$', 'contact_us', name = 'goji-contact-us' ), ) urlpatterns += patterns('', url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)) )
Pre-Purchase The Division 2 and Get a FREE Game! Anyone who pre-purchases The Division 2 will receive one of three other Ubisoft games FREE on March 15. Choose from among Far Cry Primal, Watch Dogs 2, or Tom Clancy’s Ghost Recon Wildlands. Click here to pre-purchase now. Any player that already pre-purchased is also eligible and will receive an email with instructions on how to redeem.
import datetime import itertools import logging from copy import deepcopy import anyjson as json from django.db import models from django.contrib.auth.models import User from django.contrib.sites.models import Site from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import force_unicode from django import template import django.template.loader from django.utils.safestring import mark_safe from django.conf import settings from mypage.pages.managers import PageManager, SessionPageManager from mypage.pages.layout import Layout from mypage.widgets.models import Widget from mypage.widgets.models import get_object from mypage.widgets.templatetags.splitobjectlist import split_list DEFAULT_PAGE_TEMPLATES = ( ('page.html', 'Default', 2), ('page3.html', 'Default 3', 3), ) DEFAULT_SKIN_CHOICES = (('default', 'Default'),) log = logging.getLogger('mypage.pages.models') def page_template_choices(): # TODO: do this function lazy to support multi-site process page_templates = getattr(settings, 'PAGE_TEMPLATES', DEFAULT_PAGE_TEMPLATES) page_template_choices = [ (val, name) for val, name, containers in page_templates ] return page_template_choices def skin_choices(): # TODO: migrate and remove return getattr(settings, 'SKIN_CHOICES', DEFAULT_SKIN_CHOICES) class Page(models.Model): "Page containing multiple widgets." template = models.CharField(max_length=100, default='page.html', choices=page_template_choices()) site = models.ForeignKey(Site, default=lambda: settings.SITE_ID) # TODO migrate to layout.template_config and remove skin = models.CharField(max_length=100, blank=True, default='', choices=skin_choices()) layout_migrated = models.BooleanField(default=False) layout_json = models.TextField() objects = PageManager() class Meta: verbose_name = _('Page') verbose_name_plural = _('Pages') def __unicode__(self): return u'Page: %d' % self.pk @property def widgets(self): if not hasattr(self, '_widgets'): self._widgets = Widget.objects.filter(pk__in=map(lambda wil: wil.widget_id, self.layout.widgets)) return self._widgets def update_template(self, new_template): if new_template == self.template: return cs = None for val, name, containers in getattr(settings, 'PAGE_TEMPLATES', DEFAULT_PAGE_TEMPLATES): if val == new_template: cs = containers break else: raise KeyError('%r is not a valid choice for template' % new_template) self.layout.arrange_containers(cs) self.template = new_template def get_widgets(self): return [ i.get_child() for i in self.widgets.all() ] def layout_get(self): if not hasattr(self, '_layout'): self._layout = Layout(self, json.deserialize(self.layout_json)) return self._layout def layout_set(self, value): self.layout_json = json.serialize(value) layout = property(layout_get, layout_set) def add_widget(self, widget, container=0, position=None): self.layout.insert_widget(widget, container=container, position=position) self.save() log.info('Add widget %d into page %d)', widget.pk, self.pk) def add_widgets(self, widgets): for w in widgets: self.add_widget(w) self.save() def remove_widget(self, widget): self.layout.remove_widget(widget) log.info('Remove widget %d from page %d)', widget.pk, self.pk) self.save() def remove_widgets(self, widgets): for w in widgets: self.remove_widget(w) self.save() class UserPage(Page): "Page customized by/for one User" user = models.ForeignKey(User, db_index=True) objects = PageManager() site_copy = models.ForeignKey(Site, default=lambda: settings.SITE_ID) class Meta: unique_together = (('site_copy', 'user',),) verbose_name = _('User page') verbose_name_plural = _('User pages') class SessionPage(Page): "Page customized by/for one AnonymousUser via a session" session_key = models.CharField(_('session key'), max_length=40, db_index=True) updated = models.DateTimeField(null=False, default=datetime.datetime.now) site_copy = models.ForeignKey(Site, default=lambda: settings.SITE_ID) objects = SessionPageManager() class Meta: unique_together = (('site_copy', 'session_key',),) verbose_name = _('Session page') verbose_name_plural = _('Session pages')
is a traditional and historical house in Shiraz, Iran. It is set within the Persian gardens of Eram Garden. It was built between 1879 and 1886 by Mirza Ibrahim Khan. The Qavam family were merchants originally from Qazvin. But they soon became active in the government during the Zand dynasty, followed by the Qajar, and Pahlavi dynasty as well. The Qavam "Naranjestan" preserves the elegance and refinement enjoyed by the upper-class families during the 19th century. The paintings on the low ceilings of the house are inspired by Victorian era Europe. The mirrored porch was a focal point of the house, overlooking the Eram Garden that was designed with fountains, date palms, and flowering plants. During the second Pahlavi era, the House became the headquarters of Pahlavi University's "Asia Institute", directed by Arthur Upham Pope and Richard Nelson Frye. Frye and his family also lived in the house for a while. The house today is a museum and is opened to the public. The house and the Eram Garden are within the Shiraz Botanical Garden.
from model.account import Account import logging from taskutils.future import future, FutureReadyForResult, GenerateOnAllChildSuccess,\ setlocalprogress def CountAccountsWithFutureExperiment(): def Go(): def CountRemaining(futurekey, cursor): logging.debug("Got here") accounts, cursor, kontinue = Account.query().fetch_page( 100, start_cursor = cursor ) numaccounts = len(accounts) if kontinue: lonallchildsuccessf = GenerateOnAllChildSuccess(futurekey, numaccounts, lambda a, b: a + b) future(CountRemaining, parentkey=futurekey, queue="background", onallchildsuccessf = lonallchildsuccessf)(cursor) logging.debug("raising") setlocalprogress(futurekey, numaccounts) if kontinue: raise FutureReadyForResult("still calculating") else: logging.debug("leaving") return numaccounts countfuture = future(CountRemaining, queue="background")(None) return countfuture.key return "Count Accounts With Future", Go
This is a sizeable plot being approximately 0.117 acres with a pleasant outlook enjoying views over open fields. The building plot is in a private position being well hidden just off the centre of the village. The accommodation provides for a Sitting Room, Study, Kitchen Diner, Ground Floor Bedroom, Bathroom, Two First Floor Bedrooms and Bathroom. The property will have good sized private rear garden and to the front there is room for parking and turning. Entry to be taken through a general area for visitor parking which will be retained in the ownership of the vendors but with full rights of access for residential purposes. Cotherstone is a vibrant village with an array of amenities which include a Post Office/shop, two pubs and a primary school. The village lies approximately four miles from the popular market town of Barnard Castle and is conveniently located for access to the A66, Darlington and the Dales. Full planning position was approved under DM/16/00981/FPA. The application and associated documents can be viewed online at http://www.durham.gov.uk/planning.
## MediaInfoDLL - All info about media files # This software is provided 'as-is', without any express or implied # warranty. In no event will the authors be held liable for any damages # arising from the use of this software. # # Permission is granted to anyone to use this software for any purpose, # including commercial applications, and to alter it and redistribute it # freely, subject to the following restrictions: # # 1. The origin of this software must not be misrepresented; you must not # claim that you wrote the original software. If you use this software # in a product, an acknowledgment in the product documentation would be # appreciated but is not required. # 2. Altered source versions must be plainly marked as such, and must not be # misrepresented as being the original software. # 3. This notice may not be removed or altered from any source distribution. # #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # # Python (Windows) example # # To make this example working, you must put MediaInfo.Dll and test.avi # in the same folder # #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # # Should be "import MediaInfoDLL" but does not work, why? # How to import MediaInfoDLL.py correctly? # Example following # #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ from MediaInfoDLL import * MI = MediaInfo() Version=MI.Option_Static("Info_Version", "0.7.7.0;MediaInfoDLL_Example_Python;0.7.7.0") if Version=="": print "MediaInfo.Dll: this version of the DLL is not compatible" exit #Information about MediaInfo print "Info_Parameters" print MI.Option_Static(u"Info_Parameters") print print "Info_Capacities" print MI.Option_Static(u"Info_Capacities") print print "Info_Codecs" print MI.Option_Static(u"Info_Codecs") #An example of how to use the library print print "Open" MI.Open(u"Example.ogg") print print "Inform with Complete=false" MI.Option_Static("Complete") print MI.Inform() print print "Inform with Complete=true" MI.Option_Static(u"Complete", u"1") print MI.Inform() print print "Custom Inform" MI.Option_Static(u"Inform", u"General;Example : FileSize=%FileSize%") print MI.Inform() print print "Get with Stream=General and Parameter='FileSize'" print MI.Get(Stream.General, 0, u"FileSize") print print "GetI with Stream=General and Parameter=46" print MI.GetI(Stream.General, 0, 46) print print "Count_Get with StreamKind=Stream_Audio" print MI.Count_Get(Stream.Audio) print print "Get with Stream=General and Parameter='AudioCount'" print MI.Get(Stream.General, 0, u"AudioCount") print print "Get with Stream=Audio and Parameter='StreamCount'" print MI.Get(Stream.Audio, 0, u"StreamCount") print print "Close" MI.Close()
Los Angeles-based pop recording artist Lostboycrow releases his latest single “Waste of Time” featuring singer-songwriter Bea Miller today via Sony’s RED MUSIC. CLICK HERE to listen to the track that premiered on Billboard and is available on all digital streaming platforms now. The Dylan Bauld-produced (Halsey, Flor, Bea Miller) song, co-written by Bauld with both feature artists, serves as the second single release in support of Lostboycrow’s full-length debut album, Santa Fe, due in early 2019. The first song written for the album, “Waste of Time” (feat. Bea Miller) captures the artist’s excitement with heavenly synths, a roaring guitar solo, and an unshakable shared harmony with Bea Miller. Rife with natural creative energy and endless high desert inspiration, Santa Fe, New Mexico served as the backdrop for Lostboycrow‘s full-length debut album, Santa Fe, due in early 2019 via Sony’s RED MUSIC. It closes one chapter and opens another for the artist born Chris Blair. At the onset of his touring career in 2011, New Mexico welcomed him with open arms at cozy coffee houses across the state, and the singer later enchanted audiences and critics nationwide when he toured alongside VÉRITÉ, K. Flay, and Flor, garnering acclaim from news outlets like Billboard, NYLON, and Pigeons & Planes along the way. The years since he first hosted those intimate gigs in Albuquerque set the stage for such transformation in the high desert. Revered for his rapturous voice, off-kilter R&B panache, pure alternative perception, and airtight songwriting, Lostboycrow emerged as the ultimate pop enigma. By 2018, his cumulative streams impressively tallied over 150 million highlighted by the success of “Powers,” Stay A Little Longer,” and “The Lost Boy” (feat. Skizzy Mars), which received placements on notable Spotify playlists New Music Friday, Indie Pop, and Pop Chillout, to name a few. The singer now boasts nearly 2 million monthly listeners and 54,000 subscribers on Spotify alone.
from django.conf import settings from django import forms from accounts.models import Profile class GeneralDetailsForm(forms.ModelForm): class Meta: model = Profile fields = list(set(settings.GENERAL_DETAILS_FIELD_LIST) - set(['first_name', 'last_name', 'register_number', 'college_email_id'])) class PersonalDetailsForm(forms.ModelForm): class Meta: model = Profile fields = settings.PERSONAL_DETAILS_FIELD_LIST class FamilyDetailsForm(forms.ModelForm): class Meta: model = Profile fields = settings.FAMILY_DETAILS_FIELD_LIST class ContactDetailsForm(forms.ModelForm): class Meta: model = Profile fields = settings.CONTACT_DETAILS_FIELD_LIST class EducationDetailsForm(forms.ModelForm): class Meta: model = Profile fields = settings.EDUCATION_DETAILS_FIELD_LIST class MiscDetailsForm(forms.ModelForm): class Meta: model = Profile fields =settings. MISC_DETAILS_FIELD_LIST
When most people think of Edmonton’s music scene, the blues is not the first genre that comes to mind. But Canadian blues musician Harpdog Brown grew up watching traveling blues legends in his hometown. Friday night at the Rockstar Bar, he had the house packed and the dance floor bouncing, both literally and figuratively. Seriously, the floor was actually bouncing. Hosted by the Sault Blues Society, Friday’s show wasn’t the first time Harpdog’s performed in the Sault. He first stumbled across a blues jam at the Water Tower Inn when he and his band were passing through town a few years back. And since connecting with the Sault Blues Society, he’s been somewhat of a regular. Look for SooToday’s full interview with Harpdog Brown on our upcoming episode of Tuned. For more information about the Sault Blues Society and any upcoming performances, please click here.
import copy import json from django.contrib.auth.models import User from django.core import signing from django.db import models from .jobportals.portals import PORTAL_FORMS from .data import ( DATAFILE_STATES, DATAFILE_STATE_CHOICES, DATAFILE_TRANSFER_IN_PROGRESS, EXTERNAL_SUBMISSION_STATE_CHOICES, EXTERNAL_SUBMISSION_PENDING_SUBMISSION, ACTIONS_TEXT, STORAGE_ACCOUNT_PENDING_VALIDATION, STORAGE_ACCOUNT_READY, STORAGE_ACCOUNT_STATES, STORAGE_ACCOUNT_STATE_CHOICES, ) class BaseModel(models.Model): created_at = models.DateTimeField(auto_now_add=True) modified_at = models.DateTimeField(auto_now=True) class Meta: abstract = True class UserStorageAccount(BaseModel): root_folder_key = '' owner = models.ForeignKey(User) name = models.CharField(max_length=255, blank=True) validation_state = models.CharField(max_length=255, choices=STORAGE_ACCOUNT_STATE_CHOICES, default=STORAGE_ACCOUNT_PENDING_VALIDATION) _concrete = None def get_concrete(self): """ :return: the concrete instance of this storage account """ for attr in ['s3provider', 'gdriveprovider', 'b2dropprovider', 'dropboxprovider', 'dummyprovider', 'wlwebdavprovider']: try: inst = getattr(self, attr) return inst except: pass return None @property def display_name(self): if self.name != '': return self.name else: if self.get_concrete() is not None: return self.get_concrete().__unicode__() else: return self.__unicode__() @property def utilization(self): s = Datafile.objects.filter(folder__storage_account=self).aggregate(models.Sum('size'))['size__sum'] if s is None: return 0 else: return s @property def readable_validation_state(self): return STORAGE_ACCOUNT_STATES.get(self.validation_state, 'Unknown') @property def validated(self): return self.validation_state == STORAGE_ACCOUNT_READY @property def quota(self): inst = self.get_concrete() if inst is None: return None elif inst.type == 'GDRIVE_PROVIDER': if inst.quota_bytes == 0: return None else: return inst.quota_bytes else: return None @property def sync_in_progress(self): sync_op = SyncOperation.get_latest_for_account(self) if sync_op is None: return False else: return sync_op.ongoing def get_root_folder(self): return Folder.objects.get(storage_account=self, parent=None) def __unicode__(self): return u'StorageAccount%d' % self.pk class S3Provider(UserStorageAccount): type = 'S3_PROVIDER' root_folder_key = '/' access_key_id = models.CharField(max_length=255) secret_access_key = models.CharField(max_length=255) bucket_name = models.CharField(max_length=255) def __unicode__(self): return u'S3Provider (bucket: %s, access_key: %s)' % (self.bucket_name, self.access_key_id) class GDriveProvider(UserStorageAccount): type = 'GDRIVE_PROVIDER' root_folder_key = 'root' credentials = models.CharField(max_length=4096) quota_bytes = models.BigIntegerField(default=0) def __unicode__(self): return u'GDriveProvider' class B2DropProvider(UserStorageAccount): type = 'B2DROP_PROVIDER' root_folder_key = '/' username = models.CharField(max_length=255) password = models.CharField(max_length=255) def __unicode__(self): return u'B2DropProvider' class WLWebdavProvider(UserStorageAccount): type = 'WL_WEBDAV_PROVIDER' root_folder_key = '/' def __unicode__(self): return u'WLWebdavProvider' class DropboxProvider(UserStorageAccount): type = 'DROPBOX' root_folder_key = '/' access_user_id = models.CharField(max_length=255) access_token = models.CharField(max_length=255) quota_bytes = models.BigIntegerField(default=0) def __unicode__(self): return u'DropboxProvider' class DummyProvider(UserStorageAccount): type = 'DUMMY' root_folder_key = '/' def __unicode__(self): return u'DummyProvider' class Dataset(BaseModel): owner = models.ForeignKey(User) name = models.CharField(max_length=1024) published = models.BooleanField(default=False) publish_key = models.CharField(max_length=1024, default='') def publish(self, expires=None): self.published = True self.publish_key = signing.dumps({'pk': self.pk}) self.save() def unpublish(self): self.published = False self.publish_key = '' self.save() def __unicode__(self): return self.name class DatasetFile(BaseModel): owner = models.ForeignKey(User) dataset = models.ForeignKey(Dataset) datafile = models.ForeignKey('Datafile') def __unicode__(self): return self.datafile.filename class Meta: unique_together = ('dataset', 'datafile') class Folder(BaseModel): owner = models.ForeignKey(User) parent = models.ForeignKey('Folder', null=True) name = models.CharField(max_length=1024) storage_account = models.ForeignKey(UserStorageAccount) storage_key = models.CharField(max_length=1024) @property def full_path(self): # TODO: Optimize this. MPTT? if self.parent is None: return self.name else: return self.parent.full_path + '/' + self.name @property def rel_path(self): """ :return: the path relative to the provider root """ # TODO: What if there is a '/' in the path?? # TODO: Optimize this. MPTT? if self.parent is None: return '' elif self.parent.parent is None: return self.name else: return self.parent.rel_path + '/' + self.name def __unicode__(self): return self.name class Datafile(BaseModel): filename = models.CharField(max_length=1024) owner = models.ForeignKey(User) folder = models.ForeignKey(Folder) upload_state = models.CharField( max_length=255, choices=DATAFILE_STATE_CHOICES, default=DATAFILE_TRANSFER_IN_PROGRESS) # storage_account = models.ForeignKey(UserStorageAccount) storage_key = models.CharField(max_length=1024) size = models.IntegerField(null=True, default=None) external_link = models.URLField(max_length=8192, blank=True) @property def storage_account(self): return self.folder.storage_account @property def full_path(self): return self.folder.full_path + '/' + self.filename @property def rel_path(self): return self.folder.rel_path + '/' + self.filename @property def readable_upload_state(self): return DATAFILE_STATES.get(self.upload_state, 'Unknown') def __unicode__(self): return self.filename class UserAction(BaseModel): user = models.ForeignKey(User) action_type = models.CharField(max_length=255) args = models.TextField() @property def text(self): return self.__unicode__() @classmethod def log(cls, user, action_type, args): # TODO: validate that args match action_type? obj = cls(user=user, action_type=action_type, args=json.dumps(args)) obj.save() return obj def __unicode__(self): try: args = json.loads(self.args) except ValueError: args = {} args.update({'user': self.user.username}) r = ACTIONS_TEXT[self.action_type] % args return r class Meta: ordering = ['-created_at'] class SyncOperation(BaseModel): storage_account = models.ForeignKey(UserStorageAccount) ongoing = models.BooleanField(default=False) @classmethod def get_latest_for_account(cls, storage_account): """ Returns the most recent SyncOperation for a given UserStorageAccount :param storage_account: :return: the latest SyncOperation, or None if there hasn't been any """ try: obj = cls.objects.filter(storage_account=storage_account).order_by('-created_at')[0] except: obj = None return obj class ExternalCredentials(BaseModel): provider_name = models.CharField(max_length=1024) owner = models.ForeignKey(User) username = models.CharField(max_length=1024) password = models.CharField(max_length=1024) def __unicode__(self): return 'ExternalCredentials(%s, %s)' % (self.provider_name, self.username) class ExternalJobPortal(BaseModel): name = models.CharField(max_length=1024) def __unicode__(self): return self.name class ExternalJobPortalFormGroup(BaseModel): portal = models.ForeignKey(ExternalJobPortal) parent = models.ForeignKey('ExternalJobPortalFormGroup', null=True) name = models.CharField(max_length=1024) def __unicode__(self): return self.name class ExternalJobPortalForm(BaseModel): portal = models.ForeignKey(ExternalJobPortal) parent = models.ForeignKey(ExternalJobPortalFormGroup, null=True) name = models.CharField(max_length=1024) original_url = models.URLField() submit_url = models.URLField() template_name = models.CharField(max_length=1024) @classmethod def load_initial(cls): portal_forms = copy.deepcopy(PORTAL_FORMS) for portal_form in portal_forms: # Save the portal portal, created = ExternalJobPortal.objects.update_or_create(pk=portal_form['portal']['pk'], defaults=portal_form['portal']) if created: print('Created portal %d' % portal.pk) portal_form['portal'] = portal new_portal_form, created = cls.objects.update_or_create(pk=portal_form['pk'], defaults=portal_form) if created: print('Created portal_form %d' % new_portal_form.pk) new_portal_form.save() def __unicode__(self): return self.name class ExternalJobPortalSubmission(BaseModel): owner = models.ForeignKey(User) target = models.ForeignKey(ExternalJobPortalForm) data = models.TextField() job_key = models.CharField(max_length=1024, blank=True) @property def state(self): try: states = ExternalJobPortalSubmissionStateChange.objects.filter(external_submission=self) states = states.order_by('-created_at') state = states[0] except: state = EXTERNAL_SUBMISSION_PENDING_SUBMISSION return state def update_state(self, new_state): state_change = ExternalJobPortalSubmissionStateChange(external_submission=self, state=new_state) state_change.save() def __unicode__(self): return 'ExternalSubmission(%d)' % self.pk class ExternalJobPortalSubmissionStateChange(BaseModel): external_submission = models.ForeignKey(ExternalJobPortalSubmission) state = models.CharField(max_length=256, choices=EXTERNAL_SUBMISSION_STATE_CHOICES)
Malleable: The fascial web constantly changes shape and composition to adapt to mechanical stress. KMI creates positional and functional changes to increase the potential of the body to assist fascia to reorganize and self-correct. Physically and functionally continuous: Fascia invests through every tissue, provides the environment for every cell of the body, surrounds the organs, and gives the whole body its shape. The entire body can be effected by a local change. Able to register information and transmit it through the body: The fascial web is a body wide mechanosensory organ (Schleip) telling us where we are in space. Mechanoreceptors communicate with the central and peripheral nervous system which helps controls coordinated movements. Responsive to gravity: We are all under the effect of gravity and fascia organizes our structure according to the directional pull of this force. Connective tissue is a remarkably versatile bit of biology. Connective tissue forms every supportive tissue in our bodies from the fluid blood to the solid bone, and a host of sheets, straps, and slings in between. Muscular tissue moves us around, but it works through the connective tissue of fascia, tendons, and the ligaments at every turn, and it is the connective tissue complex that holds us in the shape we are in. When we are injured or stressed, no matter what the source, there is a neuromuscular response – usually involving some combination of contraction, retraction, immobility, and often rotation. These patterns put some muscles under strain when they develop painful trigger points and also pulls at this fascial fabric, requiring it to shift, thicken, glue itself to surrounding structures, and otherwise compensate for the excess sustained muscular holding. Anatomy Trains: The map we use to help integrate the body. Anatomy trains are long continuous chains of myofascia, examining the connective tissue system together with muscle fiber direction. By tracking these continuous lines of force the entire body is covered through the series. THe “Anatomy Trains Myofascial Meridians”concept, was developed by Thomas Myers, who was a student of Dr.Rolf. The anatomy trains system approaches the body holistically, with the underlying principle that strategies for healing should simultaneously be local and global.
''' Created on 2016年2月9日 @author: Darren ''' ''' Dual Palindromes Mario Cruz (Colombia) & Hugo Rickeboer (Argentina) A number that reads the same from right to left as when read from left to right is called a palindrome. The number 12321 is a palindrome; the number 77778 is not. Of course, palindromes have neither leading nor trailing zeroes, so 0220 is not a palindrome. The number 21 (base 10) is not palindrome in base 10, but the number 21 (base 10) is, in fact, a palindrome in base 2 (10101). Write a program that reads two numbers (expressed in base 10): N (1 <= N <= 15) S (0 < S < 10000) and then finds and prints (in base 10) the first N numbers strictly greater than S that are palindromic when written in two or more number bases (2 <= base <= 10). Solutions to this problem do not require manipulating integers larger than the standard 32 bits. PROGRAM NAME: dualpal INPUT FORMAT A single line with space separated integers N and S. SAMPLE INPUT (file dualpal.in) 3 25 OUTPUT FORMAT N lines, each with a base 10 number that is palindromic when expressed in at least two of the bases 2..10. The numbers should be listed in order from smallest to largest. SAMPLE OUTPUT (file dualpal.out) 26 27 28 ''' def convert(num,base): res="" while num>0: temp=num%base if temp>9: res=chr(ord("A")-10+temp)+res else: res=str(temp)+res num//=base return res def dualpal(N,S): res=[] while len(res)<N: S+=1 count=0 for base in range(2,11): cand=convert(S, base) if cand==cand[::-1]: count+=1 if count>=2: res.append(S) break print(res) dualpal(15, 9900)
Huge Comic Book Auction LOADED with 1000's of Comic Books from Star Wars, X-Men, Super Man, Spider Man, Justice League and More. From DC to Marvel, Independent and Rare. This Auction offers a little bit of everything for everybody who is a fan of Comics. 10% Buyers Premium in effect. Please SEE LOT #100 for complete Auction Terms & Conditions.
from robotpy_ext.autonomous import timed_state, StatefulAutonomous # Only for auto complete # from components.drive import Drive from components.forklift import ToteForklift from components.alignment import Alignment class StackAutonomous(StatefulAutonomous): MODE_NAME = 'Stack Auto' DEFAULT = False drive = Drive tote_forklift = ToteForklift align = Alignment def initialize(self): self.register_sd_var('back', .5) self.register_sd_var('fwd', .5) def on_enable(self): super().on_enable() self.drive.reset_gyro_angle() def on_iteration(self, tm): super().on_iteration(tm) # This gets executed afterwards self.drive.angle_rotation(0) @timed_state(duration =.5, next_state='get_tote2', first=True) def calibrate(self, initial_call): if initial_call: self.tote_forklift.set_pos_stack1() if self.tote_forklift.isCalibrated: self.next_state('get_tote2') @timed_state(duration=1.3, next_state='reverse') def get_tote2(self, initial_call): if initial_call: self.align.align() @timed_state(duration=3, next_state='drop') def reverse(self): self.drive.move(self.back, 0, 0, 0) @timed_state(duration=1.3, next_state='strafeRight') def drop(self): self.tote_forklift.set_pos_bottom() @timed_state(duration = 2, next_state='get_tote3') def strafeRight(self): self.drive.move(0, 1, 0, 0) @timed_state(duration = 3, next_state='get_tote4') def get_tote3(self): self.align.align() @timed_state(duration = 1.3, next_state='reverse2') def get_tote4(self): self.align.align() @timed_state(duration = 3, next_state='strafe') def reverse2(self): self.drive.move(self.back, 0, 0, 0) @timed_state(duration=1) def strafe(self): self.drive.move(0, -1, 0, 0)
DURHAM — The town has put 24 public works projects totaling more than $35 million in front of U.S. Sen. Judd Gregg and Sen.-elect Jeanne Shaheen in hopes they will keep them in mind during debate on a new economic stimulus package. U.S. Sen. Judd Gregg on Monday said the state could receive as much as $300 million of the $800 billion or so that will go before the next Congress. Town Administrator Todd Selig said the town submitted the project list after Shaheen's office contacted local officials and asked them to prepare a wish list of projects they would like considered under the stimulus package. In preparing the list, Selig said he went through the town's capital improvement plan and selected projects he felt the town was ready to implement immediately. Water and wastewater projects dominate the list. n Improving sewer lines running to UNH. The projects' combined costs is $5,815,480, with the Spruce Hole well being the most expensive project at $2,751,000. Other suggested projects include building a new public library (at an estimated cost of $3,490,000), building a new Town Hall complex (at an estimated cost of $4,250,000), constructing two multipurpose athletic fields at Packers Fall (at an estimated cost of $1,490,000) and building a new fire station (at an estimated cost of $4,500,000). The remaining requests cover bridge and sewer line repairs. Residents wishing to view the letter the town sent to Shaheen and Gregg and the full set of projects can view them on the town website: http://www.ci.durham. nh.us/generalpdfs/DPW_Economic_Stimulus_Package.pdf.
""" Utility module to help debug Python scripts -------------------------------------------------------------------------- File: utilsDebug.py Overview: Python module to supply functions to help debug Python scripts. Gotchas: None. Copyright: None. -------------------------------------------------------------------------- """ # Python modules: import sys # Third party modules: # In-house modules: # Instantiations: #----------------------------------------------------------------------------- # Details: Class to implement simple stack function trace. Instantiation the # class as the first function you want to trace. Example: # obj = utilsDebug.CDebugFnVerbose("validate_arguments()") # Gotchas: This class will not work in properly in a multi-threaded # environment. # Authors: Illya Rudkin 28/11/2013. # Changes: None. #-- class CDebugFnVerbose(object): # Public static properties: bVerboseOn = False # True = turn on function tracing, False = turn off. # Public: #++------------------------------------------------------------------------ # Details: CDebugFnVerbose constructor. # Type: Method. # Args: vstrFnName - (R) Text description i.e. a function name. # Return: None. # Throws: None. #-- # CDebugFnVerbose(vstrFnName) #++------------------------------------------------------------------------ # Details: Print out information on the object specified. # Type: Method. # Args: vstrText - (R) Some helper text description. # vObject - (R) Some Python type object. # Return: None. # Throws: None. #-- def dump_object(self, vstrText, vObject): if not CDebugFnVerbose.bVerboseOn: return sys.stdout.write( "%d%s> Dp: %s" % (CDebugFnVerbose.__nLevel, self.__get_dots(), vstrText)) print(vObject) #++------------------------------------------------------------------------ # Details: Print out some progress text given by the client. # Type: Method. # Args: vstrText - (R) Some helper text description. # Return: None. # Throws: None. #-- def dump_text(self, vstrText): if not CDebugFnVerbose.bVerboseOn: return print(("%d%s> Dp: %s" % (CDebugFnVerbose.__nLevel, self.__get_dots(), vstrText))) # Private methods: def __init__(self, vstrFnName): self.__indent_out(vstrFnName) #++------------------------------------------------------------------------ # Details: Build an indentation string of dots based on the __nLevel. # Type: Method. # Args: None. # Return: Str - variable length string. # Throws: None. #-- def __get_dots(self): return "".join("." for i in range(0, CDebugFnVerbose.__nLevel)) #++------------------------------------------------------------------------ # Details: Build and print out debug verbosity text indicating the function # just exited from. # Type: Method. # Args: None. # Return: None. # Throws: None. #-- def __indent_back(self): if CDebugFnVerbose.bVerboseOn: print(("%d%s< fn: %s" % (CDebugFnVerbose.__nLevel, self.__get_dots(), self.__strFnName))) CDebugFnVerbose.__nLevel -= 1 #++------------------------------------------------------------------------ # Details: Build and print out debug verbosity text indicating the function # just entered. # Type: Method. # Args: vstrFnName - (R) Name of the function entered. # Return: None. # Throws: None. #-- def __indent_out(self, vstrFnName): CDebugFnVerbose.__nLevel += 1 self.__strFnName = vstrFnName if CDebugFnVerbose.bVerboseOn: print(("%d%s> fn: %s" % (CDebugFnVerbose.__nLevel, self.__get_dots(), self.__strFnName))) # Private statics attributes: __nLevel = 0 # Indentation level counter # Private attributes: __strFnName = ""
Hope this year brings you everything your heart desires. What were the highlights of 2018 for you? What are you looking forward to most for 2019? Planning on making more meaningful connections with like-minded creatives? Let’s meet over coffee and support each other to make this year the best yet! We will be meeting at Stardust Coffee in Winter Park, Saturday January 26 from 12-2PM. Hope to see you there!
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import (absolute_import, division, print_function, unicode_literals) from builtins import * from random import choice, sample from itertools import product def direct_print(text): print(text, flush=True) def question_guess(number, lower_bound, upper_bound): template = '從 {} 到 {} 猜一個數字:' question = template.format(lower_bound, upper_bound) answer = number return (question, answer) def question_add(arg1, arg2): template = '{0} + {1} = ' question = template.format(arg1, arg2) answer = arg1 + arg2 return (question, answer) def question_sub(arg1, wanted_answer): template = '{0} - {1} = ' question = template.format(arg1 + wanted_answer, arg1) answer = wanted_answer return (question, answer) def question_sub_fixed(arg1, arg2): """ Given arg1 and arg2 return the question of "arg1 - arg2 = " """ template = '{0} - {1} = ' question = template.format(arg1, arg2) answer = arg1 - arg2 return (question, answer) def question_multiply(arg1, arg2): template = '{0} × {1} = ' question = template.format(arg1, arg2) answer = arg1 * arg2 return (question, answer) def question_divide(arg1, wanted_answer): template = '{0} ÷ {1} = ' question = template.format(arg1*wanted_answer, arg1) answer = wanted_answer return (question, answer) def question_highest_digit(original_question, original_answer): blocked_answer = '▢' + str(original_answer)[1:] question = ''.join([ original_question, blocked_answer, ',其中▢應填入什麼數字?', ]) answer = int(str(original_answer)[0]) return (question, answer) def question_highest_wrapper(recipe, *args): original = generate_question(recipe, *args) question, answer = question_highest_digit(*original) return (question, answer) def is_correct(users_answer, answer, precision): delta = abs(users_answer - answer) return delta <= precision def new_level_greet(level_id, precision): template = '第 {} 關 (容許誤差:{})' greet = template.format(level_id, precision) bar = '=' * 20 return '\n'.join([bar, greet, bar]) def correct_greet(answer, users_answer): if answer == users_answer: greet = '太棒了,答案就是 {}!'.format(answer) else: greet = '算你答對,正確答案是 {}!'.format(answer) return greet def too_high_hint(): return '太多了,再少一點!' def too_low_hint(): return '太少了,再多一點!' RECIPE_MAP = { 'guess': question_guess, 'add': question_add, 'sub': question_sub, 'subf': question_sub_fixed, 'multiply': question_multiply, 'divide': question_divide, 'highest': question_highest_wrapper, } def generate_question_list(recipe, *args): question_function = RECIPE_MAP[recipe] new_args = [] for arg in args: if not isinstance(arg, range): new_args.append([arg]) else: new_args.append(arg) question_list = [] for arg_tuple in product(*new_args): question, answer = question_function(*arg_tuple) question_list.append((question, answer)) return question_list def play_question(question, answer, precision, print_method, input_func): while True: users_response = input_func(question) try: users_answer = float(users_response) except: confirmed = confirm_exit(input_func) if confirmed: return False else: continue if is_correct(users_answer, answer, precision): print_method(correct_greet(answer, users_answer)) return True elif users_answer > answer: print_method(too_high_hint()) else: #users_answer < answer print_method(too_low_hint()) def confirm_exit(input_func): answer = input_func("確認結束遊戲?(是請按1;其他鍵表示否)") if answer == '1': return True else: return False class ApproxGame(object): """ Game for mental mathmatics in approximate numbers """ def __init__(self): levels = { # id: (next_level, precision, round_count, recipe, *args) 1: (2, 0, 1, 'guess', range(0, 100), 0, 100), 2: (3, 0, 10, 'add', range(1, 10), range(0, 10)), 3: (4, 0, 5, 'subf', 9, range(1, 10)), 4: (5, 0, 5, 'add', 10, range(1, 10)), 6: (7, 0, 10, 'add', range(10, 100, 10), range(1, 10)), 5: (6, 0, 10, 'sub', range(1, 10), range(0, 10)), 7: (8, 5, 10, 'subf', 99, range(11, 100)), 8: (9, 0, 10, 'add', range(100, 1000, 100), range(10, 100, 10)), 9: (10, 0, 10, 'add', range(100, 1000, 100), range(10, 100)), 10: (11, 10, 10, 'subf', 999, range(100, 1000)), 11: (12, 10, 10, 'add', range(10, 100), range(10, 100)), 12: (13, 10, 10, 'sub', range(10, 100), range(0, 100)), 13: (14, 0, 10, 'highest', 'multiply', range(10, 99), range(1, 10)), 14: (15, 0, 10, 'multiply', range(1, 9), range(0, 9)), 15: (16, 10, 10, 'multiply', range(10, 99), range(0, 9)), 16: (17, 50, 10, 'multiply', range(100, 999), range(1, 9)), 17: (18, 100, 5, 'multiply', range(10, 99), range(10, 99)), 18: (19, 0, 10, 'divide', range(1, 9), range(2, 9)), 19: (1, 10, 2, 'divide', range(10, 99), range(2, 9)), } self._levels = levels def play_level(self, level_id, print_method, input_func): level = self._levels[level_id] next_level, precision, round_count = level[0:3] recipe_args = level[3:] print_method(new_level_greet(level_id, precision)) question_list = generate_question_list(*recipe_args) for question, answer in sample(question_list, round_count): correctness = play_question(question, answer, precision, direct_print, input_func) if not correctness: # stop game return None return next_level def run(self, level_id=1): while True: level_id = self.play_level(level_id, direct_print, input) if level_id is None: direct_print("=======\n遊戲結束\n=======\n") break if __name__ == '__main__': game = ApproxGame() game.run()
Are we truly present at worship? Can we focus away from the world in adoration of Father, Son and Holy Spirit? A devotion I read talked about our worship of God needing to be done from our spirits, our true selves. This week’s hymn asks God’s Spirit to give our souls divine truth, to awaken our spirits, to clear our sights, and to reign in our consciences. John 4:23-24 Yet a time is coming and has now come when the true worshipers will worship the Father in the Spirit and in truth, for they are the kind of worshipers the Father seeks. God is spirit, and his worshipers must worship in the Spirit and in truth. I help out at a Pay It Forward shop at our church. Parents with new babies come in to find baby clothing at no cost. Sometimes we hold the babies while they shop. Oh the sweetness of a brand new soul given from God. This makes me think of a hymn about the sweet spirit of God. The song acknowledges the presence of the Holy Spirit and how it can make us look and feel. It also asks the sweet Holy Spirit to stay with us and fill us with God’s love so that we might lift our hearts in praise. God’s Spirit revives us! Acts 13:52 And the disciples were continually filled with joy and the Holy Spirit. We might imagine God’s Holy Spirit as His breath in and on us, or feel the Spirit as His light warming us, or sense His presence in and around us as a powerful and graceful state. In this Sunday’s hymn feeling, hearing, and seeing the presence of the Holy Spirit are expressed. In the following Scripture readings Jesus tells his Disciples about receiving the power of the Holy Spirit and Peter calls receiving the Holy Spirit a gift. Acts 1:8 But you shall receive power when the Holy Spirit has come upon you, and you shall be My witnesses in Jerusalem and all Judea and Samaria and to the ends of the earth. Acts 2:38 And Peter answered them, repent and be baptized, every one of you, in the name of Jesus Christ for the forgiveness of and release from your sins; and you shall receive the gift of the Holy Spirit. Verse: In the midst of His children the Lord said He would be. It doesn’t take very many, it can be just two or three. And I feel that same sweet spirit that I felt often times before. Surely I can say I’ve been with the Lord. Verse: There’s a holy hush around us as God’s glory feels this place. I’ve touched the hem of His garment, I can almost see His face. I know without a doubt that I’ve been with the Lord. On this fourth Sunday the hymn asks the Holy Spirit to descend upon our heart, to wean us from the earth, to make us love God as we ought, to take the dimness of our souls away, and to let us seek and find God. It also requests that the Holy Spirit teach us to feel God’s presence, to have strength in struggles, to have patience in our praying, and to love as God would have us love. The following scripture readings tell about Jesus when the Holy Spirit descended upon Him after he was baptized by John the Baptist. Luke 3:22 And the Holy Spirit descended upon Him in bodily form like a dove, and a voice came from heaven, saying, You are My Son, My Beloved! In You I am well pleased and find delight! Luke 4:1 Then Jesus, full of and controlled by the Holy Spirit, returned from the Jordan and was led in the Spirit Luke 4:14 Then Jesus went back full of and under the power of the Spirit into Galilee, and the fame of Him spread through the whole region round about. Luke 10:21 In that same hour He rejoiced and gloried in the Holy Spirit and said, I thank You, Father, Lord of heaven and earth, that You have concealed these things from the wise and understanding and learned, and revealed them to babes. Yes, Father, for such was Your gracious will and choice and good pleasure. 1)Spirit of God, descend upon my heart; wean it from earth; through all its pulses move; stoop to my weakness, mighty as Thou art, and make me love Thee as I ought to love. 2)I ask no dream, no prophet ecstasies, no sudden rending of the veil of clay, no angel visitant, no opening of the skies; but take the dimness of my soul away. 3)Has Thou not bid me love Thee, God and King? All, all Thine own, soul, heart and strength and mind. I see Thy cross; there teach my heart to cling. O let me seek Thee, and O let me find. 4)Teach me to feel that Thou art always nigh; teach me the struggles of the soul to bear. To check the rising doubt, the rebel sigh, teach me the patience of unanswered prayer. 5)Teach me to love Thee as Thine angels love, one holy passion filling all my frame; the kindling of the heaven-descended Dove, my heart an altar, and Thy love the flame. Many Holy Spirit hymns ask the Spirit to come and be present in various ways. On this third Sunday the concept is about the Spirit falling afresh on us in order to melt, mold, fill, and use us. In the following scripture readings Daniel and Mary were filled with the Holy Spirit and used for great things – most importantly Mary, the virgin mother of Christ. Daniel 4:18 This dream I, King Nebuchadnezzar, have seen. And you, O Belteshazzar [Daniel], declare now its interpretation, since all the wise men of my kingdom are not able to make known to me the interpretation; but you are able, for the Spirit of the Holy God is in you. Luke 1:35 Then the angel said to her, The Holy Spirit will come upon you, and the power of the Most High will overshadow you; and so the holy Thing which shall be born of you will be called the Son of God. As I found myself humming and singing last Sunday’s song, “Breathe on Me, Breath of God”, the lyrics became prayerful. I was asking to be filled with life anew, breathed on until my heart is pure, made wholly God’s, and given eternal life with Our Father. Music can be so powerful in that way. I also happened to have a devotion that was based on Psalm 51:10-12. In it David is asking God for a right spirit, for Him not to take His Holy Spirit away, and to uphold him with His free spirit. Oh, that we might all welcome God’s spirit into our midst and our lives. Acts 9:31 So the church throughout the whole of Judea and Galilee and Samaria had peace and was edified and walking in the respect and reverential fear of the Lord and in the consolation and exhortation of the Holy Spirit, continued to increase and was multiplied. As we move from the Sundays of Easter to Pentecost, I have been thinking about all the hymns honoring the Holy Spirit that I have heard or sung over the years. I would love to note some scripture and a song for the next several Sundays, which some consider part of the season after Pentecost. John 14:26 But the Comforter, the Holy Spirit, Whom the Father will send in My name, He will teach you all things. And He will cause you to recall everything I have told you. And having said this, He breathed on them and said to them, Receive the Holy Spirit!
import re, string, os, time, mt from zlib import crc32 yenc_found = False try: import _yenc yenc_found = True except: pass class ArticleDecoder(mt.threads.Thread): def __init__(self, nextSeg, save_to, path, onFinish = None, onSuccess = None, onFail = None, onAssemblyPercent = None): mt.threads.Thread.__init__(self) self.daemon = True self.decoder = SegmentDecoder() self.nextSeg = nextSeg self.save_to = save_to self.onFinish = onFinish self.onSuccess = onSuccess self.onAssemblyPercent = onAssemblyPercent self.onFail = onFail self.path = path def run(self): while ( self.running ): try: seg = self.nextSeg() if ( seg == None ): self.sleep(0.1) continue if ( seg == -1 ): # this means we're finished here. if ( self.onAssemblyPercent ): self.onAssemblyPercent(0) self.assembleSegments() if ( self.onAssemblyPercent ): self.onAssemblyPercent(100) self.running = False break self.decodeSegment(seg) except Exception as inst: mt.log.error("ArticleDecoder running error: " + str(inst.args)) self.stop() if ( self.onFinish ): self.onFinish() def assembleSegments(self): if ( not self.running ): return mt.log.debug("Assembling..") # generate list of files. file_index = {} for cache_file in os.listdir(self.path): file_name = cache_file[:-4] if ( not file_index.has_key(file_name) ): file_index[file_name] = [] file_index[file_name].append(cache_file) # check if the save folder exists if ( not os.path.isdir(self.save_to) ): os.mkdir(self.save_to) file_count = len(file_index) files_complete = 0 for file_name in file_index: try: file = open(os.path.join(self.save_to, file_name), "wb") file_index[file_name].sort() segments = file_index[file_name] mt.log.debug("Assembling File: " + file_name + " Total Segments: " + str(len(segments))) for seg in segments: seg_f = open(os.path.join(self.path, seg), "rb") seg_data = seg_f.read() seg_f.close() if ( seg_data ): file.write(seg_data) os.remove(os.path.join(self.path, seg)) file.close() mt.log.debug("Assembled file: " + file_name + ".") except Exception as inst: mt.log.error("File assembly error: " + str(inst.args)) # report assembly completion status if ( self.onAssemblyPercent ): files_complete += 1 percent = int((float(files_complete)/float(file_count))*100.0) self.onAssemblyPercent(percent) def decodeSegment(self, seg): try: if ( self.decoder.yenc_decode(seg) ): file_path = os.path.join(self.path, seg.decoded_filename + "." + str("%03d" % (seg.decoded_number,))) cache_file = open(file_path, "wb") cache_file.write(seg.decoded_data) cache_file.close() # memory leaks really bad without this. del seg.data[:] seg.decoded_data = "" if ( self.onSuccess ): self.onSuccess(seg) else: if ( self.onFail ): self.onFail(seg) except Exception as inst: mt.log.error("ArticleDecoder decode segment(" + seg.msgid + ") error: " + str(inst.args)) if ( self.onFail ): self.onFail(seg) finally: del seg.data[:] class SegmentDecoder(object): def __init__(self): self.YDEC_TRANS = ''.join([chr((i + 256 - 42) % 256) for i in range(256)]) def yenc_decode(self, seg): ignore_errors = seg.lastTry() buffer = [] in_body = False end_found = False for line in seg.data: if (line[:7] == '=ybegin'): args = line.split(" ") for arg in args: if ( arg.startswith("name=") ): seg.decoded_filename = line.split("=")[-1] if ( arg.startswith("part=") ): seg.decoded_number = int(arg.split("=")[1]) elif (line[:6] == '=ypart'): in_body = True continue elif (line[:5] == '=yend'): args = line.split(" ") for arg in args: if ( arg.startswith("pcrc32=") or arg.startswith("crc32=") ): c = arg.split("=")[1] seg.decoded_crc = '0' * (8 - len(c)) + c end_found = True break if ( in_body ): buffer.append(line) # no ending found, article must have been cut off in transmit. if ( not end_found ) and ( not ignore_errors ): mt.log.debug("Article decode error: =yend not found.") return False # join the data together and decode it. data = ''.join(buffer) crc = "" if ( yenc_found ): decoded_data, _yenc_crc, something = _yenc.decode_string(data) crc = '%08X' % ((_yenc_crc ^ -1) & 2**32L - 1) else: # stolen from hellanzb. for i in (0, 9, 10, 13, 27, 32, 46, 61): j = '=%c' % (i + 64) data = data.replace(j, chr(i)) decoded_data = data.translate(self.YDEC_TRANS) crc = '%08X' % (crc32(decoded_data) & 2**32L - 1) # if the article has failed multiple times we'll ignore errors and take # whatever we can get from it. if ( not ignore_errors ): # If a CRC was included, check it. if ( seg.decoded_crc != "" ) and ( crc != "" ): if ( seg.decoded_crc.upper() != crc ): mt.log.debug("CRC does not match. A: " + seg.decoded_crc.upper() + " B: " + crc) return False # check partnum if ( seg.decoded_number != seg.number ): mt.log.debug("Part number does not match: " + seg.msgid) return False # ensure we decoded a filename. if ( seg.decoded_filename == "" ): mt.log.debug(seg.msgid + " does not have a filename.") return False else: if ( seg.decoded_number != seg.number ): seg.decoded_number = seg.number seg.decoded_size = len(decoded_data) seg.decoded_data = decoded_data return True
1. South Africa > Australia. 08 March 2018. 2. Australia: Building Management Services Industry - Predominantly dominated by tradies demanding beyond market-related prices. 3. Buy or startup a small-med size Building Management Services company and thereafter focus on building a new business sales team (my speciality), vertical and horizontal acquisitions, profit-based incentives for suppliers and clients based EBITDA. 4.Open offices in all states. 5. Investment Opportunities for Privately owned companies with a national footprint only. 5. List company within 5 -7years.
import theano import numpy as np from theano import Op, Apply from theano.tensor import as_tensor_variable try: import scipy.linalg imported_scipy = True except ImportError: # some ops (e.g. Cholesky, Solve, A_Xinv_b) won't work imported_scipy = False class ComplexExpm(Op): """ Compute the matrix exponential of a square array. """ __props__ = () def make_node(self, A): assert imported_scipy, ( "Scipy not available. Scipy is needed for the Expm op") A = as_tensor_variable(A) assert A.ndim == 3 expm = theano.tensor.tensor3(dtype=A.dtype) return Apply(self, [A, ], [expm, ]) def perform(self, node, inputs, outputs): (A,) = inputs (expm,) = outputs temp = scipy.linalg.expm(A[0, :, :] + 1j * A[1, :, :]) expm[0] = np.stack([temp.real, temp.imag]) def grad(self, inputs, outputs): (A,) = inputs (g_out,) = outputs return [ComplexExpmGrad()(A, g_out)] def infer_shape(self, node, shapes): return [shapes[0]] def _hconj_internal(x): x_hconj = np.transpose(x, axes=(0, 2, 1)).copy() x_hconj[1, :, :] = -x_hconj[1, :, :] return x_hconj class ComplexExpmGrad(Op): """ Gradient of the matrix exponential of a square array. """ __props__ = () def make_node(self, A, gw): assert imported_scipy, ( "Scipy not available. Scipy is needed for the Expm op") A = as_tensor_variable(A) assert A.ndim == 3 out = theano.tensor.tensor3(dtype=A.dtype) return Apply(self, [A, gw], [out, ]) def infer_shape(self, node, shapes): return [shapes[0]] def perform(self, node, inputs, outputs): # Kalbfleisch and Lawless, J. Am. Stat. Assoc. 80 (1985) Equation 3.4 # Kind of... You need to do some algebra from there to arrive at # this expression. (A, gA) = inputs (out,) = outputs w, V = scipy.linalg.eig(A[0, :, :] + 1j * A[1, :, :], right=True) U = scipy.linalg.inv(V) exp_w = np.exp(w) X = np.subtract.outer(exp_w, exp_w) / np.subtract.outer(w, w) np.fill_diagonal(X, exp_w) Y = np.conj(V.dot(U.dot(gA[0, :, :].T - 1j * gA[1, :, :].T).dot(V) * X).dot(U)).T out[0] = np.stack([Y.real, Y.imag]).astype(A.dtype) complex_expm = ComplexExpm()
An extra strength insect repellent containing DEET for effective relief from flies and biting insects. Now available in the revolutionary new Equimist spray bottle. The Equimist sprayer allows easy, even coverage even when spraying under the horse. The quiet action is suitable for even the most sensitive horses.
# # Copyright (c) 2013-2016 Quarkslab. # This file is part of IRMA project. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License in the top-level directory # of this distribution and at: # # http://www.apache.org/licenses/LICENSE-2.0 # # No part of the project, including this file, may be copied, # modified, propagated, or distributed except according to the # terms contained in the LICENSE file. from lib.plugins import PluginBase from lib.irma.common.utils import IrmaProbeType class VirusTotalFormatterPlugin(PluginBase): # ================= # plugin metadata # ================= _plugin_name_ = "VirusTotal" _plugin_author_ = "IRMA (c) Quarkslab" _plugin_version_ = "1.0.0" _plugin_category_ = IrmaProbeType.external _plugin_description_ = "VirusTotal results Formatter" _plugin_dependencies_ = [] # =========== # Formatter # =========== @staticmethod def can_handle_results(raw_result): expected_name = VirusTotalFormatterPlugin.plugin_name expected_category = VirusTotalFormatterPlugin.plugin_category return raw_result.get('type', None) == expected_category and \ raw_result.get('name', None) == expected_name """ VT AVs list 'Bkav', 'MicroWorld-eScan', 'nProtect', 'K7AntiVirus', 'NANO-Antivirus', 'F-Prot', 'Norman', 'Kaspersky', 'ByteHero', 'F-Secure', 'TrendMicro', 'McAfee-GW-Edition', 'Sophos', 'Jiangmin', 'ViRobot', 'Commtouch', 'AhnLab-V3', 'VBA32', 'Rising', 'Ikarus', 'Fortinet', 'Panda', 'CAT-QuickHeal', 'McAfee', 'Malwarebytes', 'K7GW', 'TheHacker', 'TotalDefense', 'TrendMicro-HouseCall', 'Avast', 'ClamAV', 'BitDefender', 'Agnitum', 'Comodo', 'DrWeb', 'VIPRE', 'AntiVir', 'Emsisoft', 'Antiy-AVL', 'Kingsoft', 'Microsoft', 'SUPERAntiSpyware', 'GData', 'ESET-NOD32', 'AVG', 'Baidu-International', 'Symantec', 'PCTools', """ @staticmethod def format(raw_result): status = raw_result.get('status', -1) if status != -1: vt_result = raw_result.pop('results', {}) av_result = vt_result.get('results', {}) if status == 1: # get ratios from virustotal results nb_detect = av_result.get('positives', 0) nb_total = av_result.get('total', 0) raw_result['results'] = "detected by {0}/{1}" \ "".format(nb_detect, nb_total) raw_result['external_url'] = av_result.get('permalink', None) elif status == 0: raw_result['results'] = av_result.get('verbose_msg', None) return raw_result
Published 04/25/2019 01:53:50 pm at 04/25/2019 01:53:50 pm in Dusk To Dawn Light Bulbs Outdoor. dusk to dawn light bulbs outdoor dawn to dusk light bulbs dusk to dawn light sensor dusk to dawn light problems outdoor dusk to dawn led light bulbs outdoor. dusk to dawn led light bulbs outdoor,dusk to dawn outdoor light bulbs lowes,dusk to dawn flood light bulbs outdoor,dusk to dawn light bulbs outdoor,best dusk to dawn outdoor light bulbs,outdoor dusk to dawn light sensor control for led bulbs, the best dusk to dawn light bulb for on flipboard by dusk to dawn light bulbsamorno w ee smart sensor light bulb with auto onoff indoor outdoor led lighting lamp porch, motion sensor light bulb w smart bulb radar dusk to dawn led motion motion sensor light bulb w smart bulb radar dusk to dawn led motion sensor light bulbs, dusk to dawn light sensor dawn light bulb not working dusk add dusk dusk to dawn light sensor dawn light bulb not working dusk add dusk dawn sensor outdoor, dusk til dawn light bulbs creditsloansandforexinfo dusk til dawn light bulbs dust to dawn light bulbs dust to dawn light bulb dusk, indooroutdoor automatic onoff sensor led bulbs light bulbs watt equivalent a led light bulb soft white dusktilldawn .
# -*- coding: utf-8 -*- # **************************************************************************** # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # **************************************************************************** # Autores: # Bruna Tavares Silva @brunats # Christopher Renkavieski @ChrisRenka # Disciplina: # Inteligência Artificial - BCC - CCT UDESC # Profº: # Rafael Parpinelli # **************************************************************************** import time import csv import random import math from copy import deepcopy #import numpy as np ####define funções def leitura(arq): #definindo matriz entrada = [] #lendo ambiente e construindo matriz with open(arq,'r') as csvfile: plots = csv.reader(csvfile, delimiter='\n') for row in plots: linha = "" if(row != []): linha = row.pop(0) if(linha[0] != 'c'): if(linha[0] == 'p'): p, tipo, var, clau = linha.split() var = int(var) clau = int(clau) elif(linha[0] != '%' and linha[0] != '0'): a, b, c, zero = linha.split() a = int(a) b = int(b) c = int(c) clausula = [] clausula.append(a) clausula.append(b) clausula.append(c) entrada.append(clausula) #print(entrada) return var, clau, entrada def geraRandom(n): lista = [] for i in range (0, n+1): lista.append(random.choice([True, False])) return lista def inverte(a): if (a == True): return False return True def avalia(cnf, sol): total = 0 for i in cnf: cl = False for j in i: if (j>0): cl = cl or sol[j] else: cl = cl or inverte(sol[-j]) if (cl == False): total+=1 return total def randomSearch(cnf, sol, var, clau, it, num): arqNome = 'random{}.txt'.format(num) f = open(arqNome, 'w') #it = 5000 resultado = avalia(cnf, sol) #print(resultado/clau) s = '0 {}\n'.format(resultado/clau) f.write(s) lista = [] lista.append(resultado/clau) for i in range(1,it): sTemp = geraRandom(var) rTemp = avalia(cnf, sTemp) #print(rTemp/clau) s = '{} {}\n'.format(i, rTemp/clau) f.write(s) lista.append(rTemp/clau) if(rTemp < resultado): sol = deepcopy(sTemp) resultado = rTemp f.close() return sol, resultado, lista def reduzLinear(t, ti, passos): return t - ti/passos def reduzExp(ti, passo, alpha): return ti*pow(alpha, passo) def perturba(sol, var): nova = deepcopy(sol) flip = random.randint(1, var) nova[flip] = inverte(nova[flip]) return nova def simAne(cnf, sol, var, clau, it, num): arqNome = 'simAne{}.txt'.format(num) f = open(arqNome, 'w') ti = 0.010 t = ti resultado = avalia(cnf, sol)/clau s = '0 {}\n'.format(resultado) f.write(s) lista = [] lista.append(resultado) melhorSol = deepcopy(sol) melhorResult = resultado for i in range(1, it): sTemp = perturba(sol, var) rTemp = avalia(cnf, sTemp)/clau #s = '{} {}\n'.format(i, rTemp) s = '{} {}\n'.format(i, resultado) f.write(s) lista.append(resultado) deltaE = rTemp - resultado if(deltaE<=0): sol = deepcopy(sTemp) resultado = rTemp if(rTemp < melhorResult): melhorResult = rTemp melhorSol = deepcopy(sTemp) elif(random.uniform(0,1) <= math.exp(-deltaE/t)): #print(math.exp(-deltaE/t)) sol = deepcopy(sTemp) resultado = rTemp t = reduzLinear(t, ti, it) #t = reduzExp(ti, i, 0.9999) #print(t) f.close() return melhorSol, melhorResult, lista def executa(cnf, var, clau, it): melhorRand = [] melhorSimAne = [] listaRand = [] listaSimAne = [] for i in range(0, 10): print(i) solInicial = geraRandom(var) solFinal, rFinal, totalRand = randomSearch(lista, solInicial, var, clau, it, i) melhorRand.append(rFinal) listaRand.append(totalRand) solFinal, rFinal, totalSimAne = simAne(lista, solInicial, var, clau, it, i) melhorSimAne.append(rFinal) listaSimAne.append(totalSimAne) print(melhorRand) print(melhorSimAne) return listaRand, listaSimAne def media(listaRand, listaSimAne, it): print('Calculando médias e desvios padrão') fRand = open('mediaRand.txt', 'w') fSimAne = open('mediaSimAne.txt', 'w') for j in range(0, it): mediaRand = 0.0 mediaSimAne = 0.0 for i in range(0, 10): mediaRand += listaRand[i][j] mediaSimAne += listaSimAne[i][j] mediaRand = mediaRand/10 mediaSimAne = mediaSimAne/10 sdRand = 0.0 sdSimAne = 0.0 for i in range(0, 10): sdRand += (listaRand[i][j] - mediaRand)*(listaRand[i][j] - mediaRand) sdSimAne += (listaSimAne[i][j] - mediaSimAne)*(listaSimAne[i][j] - mediaSimAne) sdRand = sdRand/10 sdRand = math.sqrt(sdRand) sRand = '{} {} {} {}\n'.format(j, mediaRand, mediaRand-sdRand, mediaRand+sdRand) fRand.write(sRand) sdSimAne = sdSimAne/10 sdSimAne = math.sqrt(sdSimAne) sSimAne = '{} {} {} {}\n'.format(j, mediaSimAne, mediaSimAne-sdSimAne, mediaSimAne+sdSimAne) fSimAne.write(sSimAne) fRand.close() fSimAne.close() ####fim das funções arq = 'uf20-01.cnf' #arq = 'teste.txt' n_var, n_clau, lista = leitura(arq) it = 50000 listaRand, listaSimAne = executa(lista, n_var, n_clau, it) media(listaRand, listaSimAne, it) ''' solInicial = geraRandom(n_var) print("Solução inicial:") print(solInicial) #solFinal, rFinal = randomSearch(lista, solInicial, n_var, n_clau, it) #print("Solução final random:") #print (solFinal) #print (rFinal) #solFinal, rFinal = simAne(lista, solInicial, n_var, n_clau, it) #print("Solução final simulated annealing:") #print (solFinal) #print (rFinal*n_clau) '''
Travertine bathroom floor tiles home designs. Stone cladding travertine sydney supplying tiles and. Bathroom tile ideas travertine white high gloss finish. Bathroom tile ideas travertine white high gloss finish. Sparkling white travertine floor tile with luna pearl granite. Ivory travertine tile bathroom google search bathroom. Travertine tiles prices, colour range, tile sizes we. Travertine tiles in the bathroom: designs with natural. Bathroom : floor tiles price bath tiles white kitchen. Travertine tiles wall floor tiles topps tiles. Best 25 travertine shower ideas on pinterest travertine. Travertine tiles tile design ideas. Ivory travertine tiles honed filled wall and floor tiles. Travertine tile bathroom contemporary with wall mirrors. Travertine tiles in the bathroom: designs with natural. Tiles awesome travertine bathroom tile: travertine. Travertine tiles in the bathroom: designs with natural. Travertine tile grey tile design ideas. Tile: trendy bathroom floor tiles with perfect finishing. White travertine tile disneyworldmapsorg. Travertine #039;eko light#039; travertine tiles marblous group. Best 25 travertine shower ideas on pinterest travertine. Tiles awesome travertine bathroom tile: travertine. Bathroom : travertine kitchen countertops travertine. Travertine tiles in the bathroom: designs with natural. Round bathroom light, travertine tile bathroom travertine. Stone tile shower tiles grey bathroom tiles porcelain. Bathroom tile ideas travertine white high gloss finish. Travertine tiles in the bathroom: designs with natural. Travertine shower ideas (bathroom designs) designing idea. White tumbled travertine mosaic tiles 4x4 natural stone. White tumbled travertine mosaic tiles 1x1 natural stone. White travertine tile disneyworldmapsorg. Travertine tiles in the bathroom: designs with natural. Travertine tiles in the bathroom: designs with natural.
# This file is part of Kirinki. # # Kirinki is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # Kirinki is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public # License along with kirinki. If not, see <http://www.gnu.org/licenses/>. # # Django settings for kirinki project. DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( ('Pablo Alvarez de Sotomayor Posadillo', '[email protected]'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'kirinki', # Or path to database file if using sqlite3. 'USER': 'kuser', # Not used with sqlite3. 'PASSWORD': 'dbpasswd', # Not used with sqlite3. 'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'Europe/Madrid' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'es-es' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale USE_L10N = True # Absolute path to the directory that holds media. # Example: "/home/media/media.lawrence.com/" MEDIA_ROOT = '/var/www/kirinki/static/' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash if there is a path component (optional in other cases). # Examples: "http://media.lawrence.com", "http://example.com/media/" MEDIA_URL = '' # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a # trailing slash. # Examples: "http://foo.com/media/", "/media/". ADMIN_MEDIA_PREFIX = '/media/' # Make this unique, and don't share it with anybody. SECRET_KEY = '7uj1)e5k#@x%gxi0#)-08l5w%(sqbty^uct7hv1w1cy#-=%@c*' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.gzip.GZipMiddleware', ) ROOT_URLCONF = 'kirinki.urls' TEMPLATE_CONTEXT_PROCESSORS = ( "django.core.context_processors.auth", "django.core.context_processors.debug", "django.core.context_processors.i18n", "django.core.context_processors.media", "django.contrib.messages.context_processors.messages" ) TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. '/var/www/kirinki/templates' # 'templates' ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', # Uncomment the next line to enable the admin: 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', 'kirinki', ) SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db' SESSION_SAVE_EVERY_REQUEST = True SESSION_EXPIRE_AT_BROWSER_CLOSE = True CACHE_BACKEND = 'memcached://127.0.0.1:21211/' MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage' EMAIL_HOST = 'kirinki.net' EMAIL_PORT = '25' EMAIL_HOST_USER = '[email protected]' EMAIL_HOST_PASSWORD = '' EMAIL_USE_TLS = True DEFAULT_CHARSET = 'utf-8' FILE_CHARSET = 'utf-8'
Are your servers getting the maintenance, support and patching they need? We’re here to help. IQ IT is able to provide your managed dedicated servers with the experience and knowledge of server management, including upgrades, fixes, monitoring, support and patching, so that you can focus on your business. With IQ IT server management, you can count on fast, reliable maintenance and service.
import hashlib from collections import OrderedDict class BlockChain(object): def __init__(self, genesis='None'): self.data = OrderedDict() self.prev = self.make_hash(genesis) def make_hash(self, msg): hasher = hashlib.md5() hasher.update(u'%s' % msg) return hasher.hexdigest() def add(self, data): data = u'%s:%s' % (data, self.prev) key = self.make_hash(data) self.data[key] = data self.prev = key def output(self): for k in self.data: print '%s : %s' % (k, self.data[k]) def verify(self): return all([k == self.make_hash(self.data[k]) for k in self.data]) bc = BlockChain(genesis='hi') bc.add('hello') bc.add('hello world') bc.add('hello world!') bc.output() print bc.verify() # 716e505b51b115aa7554596127627e50 : hello:49f68a5c8493ec2c0bf489821c21fc3b # 2d890e63bcecb7e826ac7201aa9a055b : hello world:716e505b51b115aa7554596127627e50 # c6c09a0ecf532c2ee1f1a5dcd8455b0b : hello world!:2d890e63bcecb7e826ac7201aa9a055b # True
Recently I wrote an article about the importance of bench strength for your Advisory Councils. Not only is this a best practice, but it is also a good benchmark for the success and sustainability of your program. To see how effective your Advisory Councils are simply count how many people you have ready and willing to step in if an opening were to occur on your Council today. None Typical of a new council. Remember, recruitment is an ongoing effort. Plan now and you'll be ready when a retirement occurs. 1-3 Good job. Be sure to maintain communications with this group and actively manage the touch points. 4 or More Top notch. You clearly understand the importance of proactively managing your pipeline. We often think about bench strength as our internal talent pool--when in reality the same principles apply to our Customer Advisory Councils. Bench strength ensures you have customers primed and ready to participate when an opening occurs on your Council. Catherine Gibson-Green, Customer Advisory Council Director at AT&T, maintains a list of prospective Council members submitted by the sales force. To see how well a customer would contribute to a Council she offers them opportunities to participate in subcommittees, work groups, focus groups and surveys. In doing so, she continues to engage them, strengthens the relationship and defines the expectation of Council participation. Smart thinking! So who are you cultivating as your next Council member? Acquisitions are expected to pick up in 2011. Should you find yourself as part of an acquired company, here is a tip to help you weather the change. The acquiring company may be familiar with your market, but generally they are not intimate with your customers. If you have strong customer engagement programs, you are an asset during the transition period. Instead of sitting on the sideline, involve the acquiring company in your Customer Advisory Council.* They'll have the chance to meet your strategic customers, discuss the new organization, understand challenges and trends in your market and map out future opportunities with your customers. You'll position your programs as strategic and yourself as a trusted adviser. A prospect told me earlier this week that their company didn't have a centralized customer contact list. In the spirit of the season, the CMO sent a note to the sales force asking for customer names, titles and addresses to send a holiday gift. The thought was tha t this route would provide a backhanded way of getting the data. So what can we learn from this lesson? Relationships are critical...inside and outside your organization. Your sales force needs to trust that value will be gained by sharing customer data. Only when you share the data can you start to have conversations about how and where to strengthen relationships with clients. If you don't have a customer contact list at the corporate level, chances are pretty good that your relationships are not where they need to be. The good news is the holiday season presents a great way to start. Even though the CMO in my story above didn't hit the ball out of the park on the first try, seeds are being planted and will in time produce results. Remember the movie Miracle on 34th Street? The little girl who doesn’t believe in Santa, but mutters “I believe…I believe” anyway? I’m reminded of that scene when I think about corporate strategy. Until the little girl sees the present she asked for, she is hopeful, but not a true believer. Employees and leaders are the same way. They want to believe in the strategic planning process. They know they are supposed to believe in the ultimate strategy, but it’s hard. Until they see what they’ve asked for, it’s all just a bunch of words and a lot of PowerPoint. And unfortunately, years of failing to take tangible actions leave companies with wishful thinkers. Research shows most corporations fail to compete not because their strategy is good or bad, but because they are unable to execute their chosen strategy. This shouldn’t come as a surprise. There are countless numbers of books written by really smart people who tell us the importance of execution. They are right. Execution is the game changer…if for no other reason than the culture of success created by following through on a plan and holding each other accountable for performance. The NFL is a great example. In a league of salary caps and parity, where teams are separated by just seven points, some teams flourish and others struggle to win. Without execution there is little chance of meaningful long-term success. But you can take steps now to change that. You can take steps to improve execution in 2011. Where to start? You must first build trust—in the process, in your leadership, and in your organization to make decisions and take actions consistent with your strategy. To build trust in the process, you need to ensure your strategy is tied to a real understanding of the market. Employees are savvy. They know the difference between an outside-in view and the kind that comes from “the ivory tower.” Regardless of how smart your employees think you are, or how much they respect you, at the end of the day they want validation. The best way we have found validate or vet strategy with the market is through interactions with key customers. It could be as simple as a roundtable discussion of your S.W.O.T., or as sophisticated as a facilitated Advisory Council. Either way, you gain rich feedback validating your opportunities and constraints. You get a clear picture of the market and where it is headed. And, you strengthen your customer relationships in the process. This feedback and market clarity is critical as you work to build the second level of trust—trust in your leadership. Think about what has happened to the employment contract over the past ten to twenty years. We’ve told employees they should be loyal to their companies, even when their companies are not. We want employees to be advocates for our brand, but we often fail to educate and support this effort. To rebuild trust in leadership, your strategy must be relevant. Creating an outside-in strategy is a good first step. The second is to start a dialogue. The good news is both ends of the generational divide are actually united. Baby boomers and millennials both desire integrity and transparency in leadership. They wish to participate in—not be spoken “at” or “to”. While some companies are testing social media and online communities, there is still a lot to be said for facilitated face-to-face engagements. We particularly like Root Learning’s strategy map process which is grounded in adult learning techniques. It helps employees understand and participate in the strategy process—the need to change, the alternatives considered and the path chosen. By rebuilding trust in the process and in leadership, you have improved your chance of succeeding. Your journey, however, is far from complete. To build trust in the organization you must help the organization let go of the past (i.e. destructive behaviors), while at the same time celebrate successes and hold each other accountable for performance. Performance management is a key component to connect organizational, functional and individual performance objectives to the roadmap and behaviors. The metrics for determining progress will need to be communicated frequently through town halls and internal communication vehicles. We also suggest creating an internal board to monitor employee understanding of the strategy and provide feedback on how communications are perceived by different parts of the organization. By understanding how long to stay on message and when and how to adapt your messages, you’ll improve the capability of the organization to execute and you’ll greatly improve organizational trust. Great execution is no miracle. By taking steps now to rebuild trust in your process, your leadership and your organization, you will create an organizational culture known for execution and success. It seems commoditization is happening faster these days and the only way to combat it is through continual in novation. "If you want to become a more innovative organization," says Michael Schrage in his HBR Blog, "don't hire more innovative employees, acquire more innovative customers. Your capacity to innovate matters less than your customers' and clients' willingness and ability to exploit it." Michael's quote hits home as a I recall recent client engagements and the importance we place on recruiting the right customers to participate in their executive programs. Pretty much any customer can tell you their pain points. The difference between any customer and the right customer is their ability to provide solutions, to ask "What if?" When you get these customers together with your executives, around a table or as part of an Executive Sponsor Program, amazing things happen. Do you know who your "right" customers are? You know you are on the right track when the message you've been preaching for years is born out, at least in part, by the big consulting firms. This past week I read a piece about brand being the reflection of your employees. After all, it is their execution (or lack thereof), that creates brand perception. I totally agree. Then I read a research study about aligning your corporate culture to your business goals to bring your strategy to life. I mostly agree. If you've read my prior posts about an effective strategic planning process, then you know that points two and three are ensuring your employees understand the plan, and then integrating the plan with your management system (gets to execution and culture). But strategies are useless if they fail to incorporate market and competitive data. (That was point one.) Your brand and culture are useless too, if they fail to incorporate market and competitive data. Unfortunately, businesses trying to help these companies don't. Why? They tend to provide research and tools the way businesses operate...in silos. As a result, execution winds up on the CEO's plate, almost exclusively, because his opinion is the anchor point. The money is in making your customers the anchor points. Listening to the decision makers in your top accounts provides you a means to understand where and how to grow, where to invest and how to shift your brand and culture to be able to execute consistently against expectations. When you develop a strategy based on market insight and then align your resources to the strategy in a meaningful way, you will be on your way to sustainable, predictable, profitable growth. Now, doesn't that sound sweet? A CMO told me the other day, "I know which customers are strategic to my company. My challenge is making my company strategic to those customers." His comment demonstrates the shift in marketing that is currently occurring within high-tech and service companies. In these organizations the marketing function is no longer about awareness, brand or feel-good programs. It is a results-oriented entity enabling the relationships and market position necessary to increase customer value and drive revenue. Has your company made the shift? If not, what's holding you back? I just read an article by Chief Learning Officer that states "Alignment Starts From the Inside Out." The article is based on research of 1,500 chief learning and development executives who say their organizations will align more closely to organizational objectives this year. The article describes how performance management, competency development and leadership need to align to organization priorities. I fully agree. Having worked in and with a number of dysfunctional organizations I concluded years ago that companies need an inside-out approach. I even started a consulting firm to help companies gain this alignment. As my blog title suggests, I now question my initial conclusions. Based on discussions with executives it became clear they really didn't have an anchor point to use for the alignment journey. They agreed with my value proposition and asked for proposals on how to achieve alignment, but at the end of the day they had to admit their strategic plans weren't very strategic and their brand destination not well defined. Of course this wasn't true of every company. It was true, though, of organizations that most needed my help and were struggling to transform themselves. So what do well performing organizations do? They continually gain market insight and build relationships with users, influencers and decision makers who keep their organizations on the front end of change. In essence, they use an outside-in approach. I see it time and time again in my work here at the Geehan Group. The Chief Learning Officer article references the use of customer sat metrics to create feedback loops and even states alignment shouldn't be based on internal HR processes--rather on what clients and employees want. At the end of the day I'm pretty sure we're saying the same thing. You don't build a company and then find the market. You find the market and then build the company. Whether you call it inside-out or outside-in, what matters is alignment of resources to a market-based strategy. "If sustainable, predictable, profitable growth is the holy grail you are seeking, you have to build relationships with decision makers," says Sean Geehan, founder of the Geehan Group and author of the upcoming book The B2B Executive Playbook. This assertion at a recent Columbus AMA Special Interest Group Meeting prompted an interesting question from the audience. "Are you suggesting we focus all our relationship efforts on decision makers?" asked a B2B marketer. I like this question because I think it is at the heart of a fundamental change we are making not just as marketers, but as selling organizations. More and more companies are asking for assistance in "moving up the food chain"--being able to call on and have relationships with higher level executives. They realize that customer satisfaction and NPS scores are only a portion of what we need to do with regard to customer loyalty and retention. To sell solutions, to solve problems, to become trusted business partners means we have to build relationships at the decision maker level. We have to understand their business aspirations and we have to deliver value. And to answer the gentleman's question, we have to do this while maintaining our relationships with users and influencers. Wow! A big job to be sure. But I think we are up to it. We've mastered expense control and quality management. We've seen our organizations through mergers and divestitures. There's no reason why we can't step up and help our organization's build profitable customer relationships with decision makers. And, as Sean shared in his presentation, this can be accomplished without spending any additional money. We simply need to re-balance the money we are spending today. Where is your organization at today? Where do you want to be? What are you doing to help your organization make the shift? I'd love to hear your comments.
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*- # vi: set ft=python sts=4 ts=4 sw=4 noet : # This file is part of Fail2Ban. # # Fail2Ban is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Fail2Ban is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Fail2Ban; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # Author: Serg G. Brester (sebres) # # This module was written as part of ban time increment feature. __author__ = "Serg G. Brester (sebres)" __copyright__ = "Copyright (c) 2014 Serg G. Brester" __license__ = "GPL" import threading from .jailthread import JailThread from .failmanager import FailManagerEmpty import os, logging, time, datetime, math, json, random import sys from ..helpers import getLogger from .mytime import MyTime from .utils import Utils # Gets the instance of the logger. logSys = getLogger(__name__) class ObserverThread(JailThread): """Handles observing a database, managing bad ips and ban increment. Parameters ---------- Attributes ---------- daemon ident name status active : bool Control the state of the thread. idle : bool Control the idle state of the thread. sleeptime : int The time the thread sleeps for in the loop. """ # observer is event driven and it sleep organized incremental, so sleep intervals can be shortly: DEFAULT_SLEEP_INTERVAL = Utils.DEFAULT_SLEEP_INTERVAL / 10 def __init__(self): # init thread super(ObserverThread, self).__init__(name='f2b/observer') # before started - idle: self.idle = True ## Event queue self._queue_lock = threading.RLock() self._queue = [] ## Event, be notified if anything added to event queue self._notify = threading.Event() ## Sleep for max 60 seconds, it possible to specify infinite to always sleep up to notifying via event, ## but so we can later do some service "events" occurred infrequently directly in main loop of observer (not using queue) self.sleeptime = 60 # self._timers = {} self._paused = False self.__db = None self.__db_purge_interval = 60*60 # observer is a not main thread: self.daemon = True def __getitem__(self, i): try: return self._queue[i] except KeyError: raise KeyError("Invalid event index : %s" % i) def __delitem__(self, i): try: del self._queue[i] except KeyError: raise KeyError("Invalid event index: %s" % i) def __iter__(self): return iter(self._queue) def __len__(self): return len(self._queue) def __eq__(self, other): # Required for Threading return False def __hash__(self): # Required for Threading return id(self) def add_named_timer(self, name, starttime, *event): """Add a named timer event to queue will start (and wake) in 'starttime' seconds Previous timer event with same name will be canceled and trigger self into queue after new 'starttime' value """ t = self._timers.get(name, None) if t is not None: t.cancel() t = threading.Timer(starttime, self.add, event) self._timers[name] = t t.start() def add_timer(self, starttime, *event): """Add a timer event to queue will start (and wake) in 'starttime' seconds """ # in testing we should wait (looping) for the possible time drifts: if MyTime.myTime is not None and starttime: # test time after short sleep: t = threading.Timer(Utils.DEFAULT_SLEEP_INTERVAL, self._delayedEvent, (MyTime.time() + starttime, time.time() + starttime, event) ) t.start() return # add timer event: t = threading.Timer(starttime, self.add, event) t.start() def _delayedEvent(self, endMyTime, endTime, event): if MyTime.time() >= endMyTime or time.time() >= endTime: self.add_timer(0, *event) return # repeat after short sleep: t = threading.Timer(Utils.DEFAULT_SLEEP_INTERVAL, self._delayedEvent, (endMyTime, endTime, event) ) t.start() def pulse_notify(self): """Notify wakeup (sets /and resets/ notify event) """ if not self._paused: n = self._notify if n: n.set() #n.clear() def add(self, *event): """Add a event to queue and notify thread to wake up. """ ## lock and add new event to queue: with self._queue_lock: self._queue.append(event) self.pulse_notify() def add_wn(self, *event): """Add a event to queue withouth notifying thread to wake up. """ ## lock and add new event to queue: with self._queue_lock: self._queue.append(event) def call_lambda(self, l, *args): l(*args) def run(self): """Main loop for Threading. This function is the main loop of the thread. Returns ------- bool True when the thread exits nicely. """ logSys.info("Observer start...") ## first time create named timer to purge database each hour (clean old entries) ... self.add_named_timer('DB_PURGE', self.__db_purge_interval, 'db_purge') ## Mapping of all possible event types of observer: __meth = { # universal lambda: 'call': self.call_lambda, # system and service events: 'db_set': self.db_set, 'db_purge': self.db_purge, # service events of observer self: 'is_alive' : self.isAlive, 'is_active': self.isActive, 'start': self.start, 'stop': self.stop, 'nop': lambda:(), 'shutdown': lambda:() } try: ## check it self with sending is_alive event self.add('is_alive') ## if we should stop - break a main loop while self.active: self.idle = False ## check events available and execute all events from queue while not self._paused: ## lock, check and pop one from begin of queue: try: ev = None with self._queue_lock: if len(self._queue): ev = self._queue.pop(0) if ev is None: break ## retrieve method by name meth = ev[0] if not callable(ev[0]): meth = __meth.get(meth) or getattr(self, meth) ## execute it with rest of event as variable arguments meth(*ev[1:]) except Exception as e: #logSys.error('%s', e, exc_info=logSys.getEffectiveLevel()<=logging.DEBUG) logSys.error('%s', e, exc_info=True) ## going sleep, wait for events (in queue) n = self._notify if n: self.idle = True n.wait(self.sleeptime) ## wake up - reset signal now (we don't need it so long as we reed from queue) n.clear() if self._paused: continue else: ## notify event deleted (shutdown) - just sleep a litle bit (waiting for shutdown events, prevent high cpu usage) time.sleep(ObserverThread.DEFAULT_SLEEP_INTERVAL) ## stop by shutdown and empty queue : if not self.is_full: break ## end of main loop - exit logSys.info("Observer stopped, %s events remaining.", len(self._queue)) self._notify = None #print("Observer stopped, %s events remaining." % len(self._queue)) except Exception as e: logSys.error('Observer stopped after error: %s', e, exc_info=True) #print("Observer stopped with error: %s" % str(e)) # clear all events - exit, for possible calls of wait_empty: with self._queue_lock: self._queue = [] self.idle = True return True def isAlive(self): #logSys.debug("Observer alive...") return True def isActive(self, fromStr=None): # logSys.info("Observer alive, %s%s", # 'active' if self.active else 'inactive', # '' if fromStr is None else (", called from '%s'" % fromStr)) return self.active def start(self): with self._queue_lock: if not self.active: super(ObserverThread, self).start() def stop(self, wtime=5, forceQuit=True): if self.active and self._notify: logSys.info("Observer stop ... try to end queue %s seconds", wtime) #print("Observer stop ....") # just add shutdown job to make possible wait later until full (events remaining) with self._queue_lock: self.add_wn('shutdown') #don't pulse - just set, because we will delete it hereafter (sometimes not wakeup) n = self._notify self._notify.set() #self.pulse_notify() self._notify = None # wait max wtime seconds until full (events remaining) if self.wait_empty(wtime) or forceQuit: n.clear() self.active = False; # leave outer (active) loop self._paused = True; # leave inner (queue) loop self.__db = None else: self._notify = n return self.wait_idle(min(wtime, 0.5)) and not self.is_full return True @property def is_full(self): with self._queue_lock: return True if len(self._queue) else False def wait_empty(self, sleeptime=None): """Wait observer is running and returns if observer has no more events (queue is empty) """ time.sleep(ObserverThread.DEFAULT_SLEEP_INTERVAL) if sleeptime is not None: e = MyTime.time() + sleeptime # block queue with not operation to be sure all really jobs are executed if nop goes from queue : if self._notify is not None: self.add_wn('nop') if self.is_full and self.idle: self.pulse_notify() while self.is_full: if sleeptime is not None and MyTime.time() > e: break time.sleep(ObserverThread.DEFAULT_SLEEP_INTERVAL) # wait idle to be sure the last queue element is processed (because pop event before processing it) : self.wait_idle(0.001) return not self.is_full def wait_idle(self, sleeptime=None): """Wait observer is running and returns if observer idle (observer sleeps) """ time.sleep(ObserverThread.DEFAULT_SLEEP_INTERVAL) if self.idle: return True if sleeptime is not None: e = MyTime.time() + sleeptime while not self.idle: if sleeptime is not None and MyTime.time() > e: break time.sleep(ObserverThread.DEFAULT_SLEEP_INTERVAL) return self.idle @property def paused(self): return self._paused; @paused.setter def paused(self, pause): if self._paused == pause: return self._paused = pause # wake after pause ended self.pulse_notify() @property def status(self): """Status of observer to be implemented. [TODO] """ return ('', '') ## ----------------------------------------- ## [Async] database service functionality ... ## ----------------------------------------- def db_set(self, db): self.__db = db def db_purge(self): logSys.debug("Purge database event occurred") if self.__db is not None: self.__db.purge() # trigger timer again ... self.add_named_timer('DB_PURGE', self.__db_purge_interval, 'db_purge') ## ----------------------------------------- ## [Async] ban time increment functionality ... ## ----------------------------------------- def failureFound(self, failManager, jail, ticket): """ Notify observer a failure for ip was found Observer will check ip was known (bad) and possibly increase an retry count """ # check jail active : if not jail.isAlive() or not jail.getBanTimeExtra("increment"): return ip = ticket.getIP() unixTime = ticket.getTime() logSys.debug("[%s] Observer: failure found %s", jail.name, ip) # increase retry count for known (bad) ip, corresponding banCount of it (one try will count than 2, 3, 5, 9 ...) : banCount = 0 retryCount = 1 timeOfBan = None try: maxRetry = failManager.getMaxRetry() db = jail.database if db is not None: for banCount, timeOfBan, lastBanTime in db.getBan(ip, jail): banCount = max(banCount, ticket.getBanCount()) retryCount = ((1 << (banCount if banCount < 20 else 20))/2 + 1) # if lastBanTime == -1 or timeOfBan + lastBanTime * 2 > MyTime.time(): # retryCount = maxRetry break retryCount = min(retryCount, maxRetry) # check this ticket already known (line was already processed and in the database and will be restored from there): if timeOfBan is not None and unixTime <= timeOfBan: logSys.debug("[%s] Ignore failure %s before last ban %s < %s, restored", jail.name, ip, unixTime, timeOfBan) return # for not increased failures observer should not add it to fail manager, because was already added by filter self if retryCount <= 1: return # retry counter was increased - add it again: logSys.info("[%s] Found %s, bad - %s, %s # -> %s%s", jail.name, ip, MyTime.time2str(unixTime), banCount, retryCount, (', Ban' if retryCount >= maxRetry else '')) # retryCount-1, because a ticket was already once incremented by filter self retryCount = failManager.addFailure(ticket, retryCount - 1, True) ticket.setBanCount(banCount) # after observe we have increased attempt count, compare it >= maxretry ... if retryCount >= maxRetry: # perform the banning of the IP now (again) # [todo]: this code part will be used multiple times - optimize it later. try: # pragma: no branch - exception is the only way out while True: ticket = failManager.toBan(ip) jail.putFailTicket(ticket) except FailManagerEmpty: failManager.cleanup(MyTime.time()) except Exception as e: logSys.error('%s', e, exc_info=logSys.getEffectiveLevel()<=logging.DEBUG) class BanTimeIncr: def __init__(self, banTime, banCount): self.Time = banTime self.Count = banCount def calcBanTime(self, jail, banTime, banCount): be = jail.getBanTimeExtra() return be['evformula'](self.BanTimeIncr(banTime, banCount)) def incrBanTime(self, jail, banTime, ticket): """Check for IP address to increment ban time (if was already banned). Returns ------- float new ban time. """ # check jail active : if not jail.isAlive() or not jail.database: return banTime be = jail.getBanTimeExtra() ip = ticket.getIP() orgBanTime = banTime # check ip was already banned (increment time of ban): try: if banTime > 0 and be.get('increment', False): # search IP in database and increase time if found: for banCount, timeOfBan, lastBanTime in \ jail.database.getBan(ip, jail, overalljails=be.get('overalljails', False)) \ : # increment count in ticket (if still not increased from banmanager, test-cases?): if banCount >= ticket.getBanCount(): ticket.setBanCount(banCount+1) logSys.debug('IP %s was already banned: %s #, %s', ip, banCount, timeOfBan); # calculate new ban time if banCount > 0: banTime = be['evformula'](self.BanTimeIncr(banTime, banCount)) ticket.setBanTime(banTime) # check current ticket time to prevent increasing for twice read tickets (restored from log file besides database after restart) if ticket.getTime() > timeOfBan: logSys.info('[%s] IP %s is bad: %s # last %s - incr %s to %s' % (jail.name, ip, banCount, MyTime.time2str(timeOfBan), datetime.timedelta(seconds=int(orgBanTime)), datetime.timedelta(seconds=int(banTime)))); else: ticket.restored = True break except Exception as e: logSys.error('%s', e, exc_info=logSys.getEffectiveLevel()<=logging.DEBUG) return banTime def banFound(self, ticket, jail, btime): """ Notify observer a ban occured for ip Observer will check ip was known (bad) and possibly increase/prolong a ban time Secondary we will actualize the bans and bips (bad ip) in database """ if ticket.restored: # pragma: no cover (normally not resored tickets only) return try: oldbtime = btime ip = ticket.getIP() logSys.debug("[%s] Observer: ban found %s, %s", jail.name, ip, btime) # if not permanent and ban time was not set - check time should be increased: if btime != -1 and ticket.getBanTime() is None: btime = self.incrBanTime(jail, btime, ticket) # if we should prolong ban time: if btime == -1 or btime > oldbtime: ticket.setBanTime(btime) # if not permanent if btime != -1: bendtime = ticket.getTime() + btime logtime = (datetime.timedelta(seconds=int(btime)), MyTime.time2str(bendtime)) # check ban is not too old : if bendtime < MyTime.time(): logSys.debug('Ignore old bantime %s', logtime[1]) return False else: logtime = ('permanent', 'infinite') # if ban time was prolonged - log again with new ban time: if btime != oldbtime: logSys.notice("[%s] Increase Ban %s (%d # %s -> %s)", jail.name, ip, ticket.getBanCount(), *logtime) # delayed prolonging ticket via actions that expected this (not later than 10 sec): logSys.log(5, "[%s] Observer: prolong %s in %s", jail.name, ip, (btime, oldbtime)) self.add_timer(min(10, max(0, btime - oldbtime - 5)), self.prolongBan, ticket, jail) # add ticket to database, but only if was not restored (not already read from database): if jail.database is not None and not ticket.restored: # add to database always only after ban time was calculated an not yet already banned: jail.database.addBan(jail, ticket) except Exception as e: logSys.error('%s', e, exc_info=logSys.getEffectiveLevel()<=logging.DEBUG) def prolongBan(self, ticket, jail): """ Notify observer a ban occured for ip Observer will check ip was known (bad) and possibly increase/prolong a ban time Secondary we will actualize the bans and bips (bad ip) in database """ try: btime = ticket.getBanTime() ip = ticket.getIP() logSys.debug("[%s] Observer: prolong %s, %s", jail.name, ip, btime) # prolong ticket via actions that expected this: jail.actions._prolongBan(ticket) except Exception as e: logSys.error('%s', e, exc_info=logSys.getEffectiveLevel()<=logging.DEBUG) # Global observer initial created in server (could be later rewriten via singleton) class _Observers: def __init__(self): self.Main = None Observers = _Observers()
The Government is Using Inflation to Take More of Our Money! The politicians think they have pulled a real sly trick on the American public with buying into and propagating the Boskin Commission’s Core Index method of reporting inflation. By using this method they have avoided raising COLA payments to Seniors on Social Security, Veterans pensions,et al. No one has been able to break this stance and many in the media simply take it as okay and meekly mumble that inflation is under control according to government reporting. This is blindness, self inflicted. To see the practical workings of the fallacy other than simply buying groceries and fuel please consider the following scenario. Since the essential component of the Core Index method of reporting inflation consists of excluding food and fuel in the calculation, lets look at how that works in the real world. Suppose you are a person working or running a business and you decide to eat or fill up your vehicle with gas. The contention of the Boskin Commission is that food and fuel are “volatile “ and therefore should be excluded when reporting inflation. But what if food and fuel prices are not fluctuating up and down but are rapidly rising as we see today? As you order your meal or fill up your tank more money is required from your pocket. This means that unless you are willing to live with a lower standard of living you must raise prices on the products or services you sell or if you are a working person your wages must go up to offset the higher cost of living you are experiencing. As a business owner you must raise prices to keep your bottom line from turning sour. The point here is that food and fuel drive all price increases if these two categories go up. Everybody eats and most everybody travels. So while the government says , inflation is low the reality it is increasing rapidly as food and fuel accelerate and is not reported. You would think if there was any concern with this core index reporting by the government they would insist on comparing core index figures with figures reporting inflation rates without core index figures. This would show what discrepancies are apparent and whether the core index has validity. The evasion the government is utilizing to dismiss the fallacy of the core index shows the government is not interested in honest reporting but hiding the inflation they are generating. Prices in food and fuel are not rising simply because of supply and demand. The way the market works, as supply increases prices drop if demand remains stable or drops. Likewise if demand soars and supply is short then prices rise as more people will pay more to get items in short supply. Now compound the issue with printed money saturating the market and you have all the ingredients that fuel inflation. For a benchmark of this phenomena take a look at the price of gold. Gold has been on a steady upward rise as government debt and infusion of printed dollars has hit the market. Paper money ,as more appears, is simply worth less than if it were in short supply. Simple economics for any commodity. The swelling rage toward government spending is commendable and necessary if we are to avoid total collapse. But if we are not going to insist on the honest reporting of inflation we cannot be prepared to stop the printing of money which is going unchallenged. The debt limit extension is a bellwether event which will show whether we have a new breed in Congress or more of the same imbeciles that think the extension of debt is necessary . Necessary for what? To continue deficit spending of course. If the debt limit is extended you can bet it will reach the same crisis in a few months and have to be revisited. If and when it is rejected will be the beginning of cost reduction by government. For those seeking more power and more political pandering this will be the battle of last resort and you can be sure those who want to redistribute and enhance their political power will fight tooth and nail to continue to raise the deb limit and go on with their deficit spending. They will talk of greater priorities, moral imperatives and how progress must come from a government that passes more laws, more regulations and more taxes. We have seen where their progress has brought us and it is to the precipice of socialistic failure. It is time to press on for honesty and accuracy in reporting of government spending and it should begin with the sham of the Core Index method of reporting inflation. If this continues to be ignored and glossed over the reality of hyper inflation will appear so rapidly it will be almost impossible to contain. Those of us who remember the Carter years , the inflation and high interest rates when the debt ceiling was much lower and deficits lower can only imagine the reaction to an inflation rate much higher and much more difficult to contain. I fear it really is too late to avoid this ugly event but the cause is not too hard to discern. A mesmerized press, a lying government and a preponderance of spineless ignorant politicians along with a band of compartmentalized academics ignoring the role of ideas and pretending to maintain the status quo while simultaneously calling themselves progressives is holding on to the stage as the play has run its course and the audience simply waits for the curtain to close. The promise of America is on the line and it is a sad testimonial for those who have been fortunate enough to experience her special status that she be allowed to sink into third world status because of a citizenry that is willing to accept a corrupt government reporting system on face value. The problem with democracy is the danger of a complacent citizenry willing to allow politicians free rein while shackling themselves under the political yoke. Our Founding Fathers talked of necessary uprisings. They too must have seen the obtuse evasions of politicians willing to destroy by dishonesty and fraud. It is said experience keeps a dear school but fools will learn by no other. Analogously freedom cannot survive in a land of complacency. If the people who are paying the bills are content to have the spenders lie to them, take from them and lead them into a financial abyss, do these citizens deserve anything but what they have condoned?
def check_toxicity(x): if x <= 5: return "Very Toxic" elif x <= 5 and x >= 50: return "Toxic" elif x <= 50 and x >= 300: return "Moderately Toxic" elif x <= 300 and x >= 2000: return "Not Toxic" elif x == "Varies": return "Unknown" else: return "Unknown" def get_ld50(x): chemicals = { 'Bipiridils': 157, 'Anticoagulants': 280, 'Botanic prod&biologSdTrF': "Varies", 'Carbamates-insect-SdTr': 500, 'Chlorinated Hydrocarbons': 18, 'Urea derivates': 11000, 'Uracil': 6000, 'Mineral Oils': "Varies", 'Triazines': 672, 'Organo-Phosphates': 1300, 'Inorganics': "Varies", 'Botanic.Produc&Biologic.': "Varies", 'Carbamates Herbicides': 30000, 'Amides': 380, 'Triazoles diazoles-SdTrF': 1453, 'Disinfectants': 192, 'Phenoxy Hormone Products': 930, 'Benzimidazoles-SeedTrF': 385, 'Carbamates Insecticides': 500, 'Pyrethroids': 2000, 'Dithiocarbamates-SeedTrF': 400, 'Dinitroanilines': 10000, 'Triazoles, Diazoles': 1453, 'Diazines, Morpholines': 3900, 'Organo-phospates-SdTr In': 1300, 'Narcotics': 127, 'Plant Growth Regulators': "Varies", 'Benzimidazoles': 385, 'Pyrethroids-SeedTr Ins': 2000, 'Dithiocarbamates': 400, 'Sulfonyl Ureas': 2000 } if x in chemicals: return chemicals.get(x) else: return "NONE" def get_tox(chemical): lDValue = get_ld50(chemical) toxicity = check_toxicity(lDValue) return toxicity
(Area: NF 904 794) Partial examination of a cairn at Rudh' a' Charnain Mhoir upon the extreme point of Otternish, opposite Berneray Island, revealed the remains of a skeleton and typical iron rivets indicating the unburnt burial of a Norseman with his boat. It can be dated with certainty to before the 11th century. The site is 50 yards to the N of a smaller cairn marked by the 0S map 'Human Remains found AD 1870' (NF97NW 2) (E Beveridge 1911). E Beveridge 1911; Proc Soc Antiq Scot 1912; H Shetelig 1940. The remains of this greatly mutilated cairn, now a low, oval, grass-covered mound, c.0.3m high, with stones protruding, are at NF 9043 7935. Visited by OS (J T T) 26 June 1965.
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import zipfile, traceback, argparse, collections from . import parsedex from .jvm import writeclass from .mutf8 import decode from .jvm.optimization import options def read(fname, mode='rb'): with open(fname, mode) as f: return f.read() def translate(data, opts, classes=None, errors=None): dex = parsedex.DexFile(data) classes = collections.OrderedDict() if classes is None else classes errors = collections.OrderedDict() if errors is None else errors for cls in dex.classes: unicode_name = decode(cls.name) + '.class' if unicode_name in classes or unicode_name in errors: print('Warning, duplicate class name', unicode_name) continue try: class_data = writeclass.toClassFile(cls, opts) classes[unicode_name] = class_data except Exception: errors[unicode_name] = traceback.format_exc() if not (len(classes) + len(errors)) % 1000: print(len(classes) + len(errors), 'classes processed') return classes, errors def writeToJar(fname, classes): with zipfile.ZipFile(fname, 'w') as out: for unicode_name, data in classes.items(): # Don't bother compressing small files compress_type = zipfile.ZIP_DEFLATED if len(data) > 10000 else zipfile.ZIP_STORED info = zipfile.ZipInfo(unicode_name) info.external_attr = 0o775 << 16 # set Unix file permissions out.writestr(info, data, compress_type=compress_type) def main(): parser = argparse.ArgumentParser(prog='enjarify', description='Translates Dalvik bytecode (.dex or .apk) to Java bytecode (.jar)') parser.add_argument('inputfile') parser.add_argument('-o', '--output', help='Output .jar file. Default is [input-filename]-enjarify.jar.') parser.add_argument('-f', '--force', action='store_true', help='Force overwrite. If output file already exists, this option is required to overwrite.') parser.add_argument('--fast', action='store_true', help='Speed up translation at the expense of generated bytecode being less readable.') args = parser.parse_args() dexs = [] if args.inputfile.lower().endswith('.apk'): with zipfile.ZipFile(args.inputfile, 'r') as z: for name in z.namelist(): if name.startswith('classes') and name.endswith('.dex'): dexs.append(z.read(name)) else: dexs.append(read(args.inputfile)) # Exclusive mode requires 3.3+, so provide helpful error in this case if not args.force: try: FileExistsError except NameError: print('Overwrite protection requires Python 3.3+. Either pass -f or --force, or upgrade to a more recent version of Python. If you are using Pypy3 2.4, you need to switch to a nightly build or build from source. Or just pass -f.') return # Might as well open the output file early so we can detect existing file error # before going to the trouble of translating everything outname = args.output or args.inputfile.rpartition('/')[-1].rpartition('.')[0] + '-enjarify.jar' try: outfile = open(outname, mode=('wb' if args.force else 'xb')) except FileExistsError: print('Error, output file already exists and --force was not specified.') print('To overwrite the output file, pass -f or --force.') return opts = options.NONE if args.fast else options.PRETTY classes = collections.OrderedDict() errors = collections.OrderedDict() for data in dexs: translate(data, opts=opts, classes=classes, errors=errors) writeToJar(outfile, classes) outfile.close() print('Output written to', outname) for name, error in sorted(errors.items()): print(name, error) print('{} classes translated successfully, {} classes had errors'.format(len(classes), len(errors))) if __name__ == "__main__": main()
I have always taken it for granted that Vancouver, BC is ‘just right there’ – just 3 hours away, give or take – an amazing example of great urban spaces and an incredible crossroads of many international influences. Not to mention what that means as far as food is concerned! And while I used to find myself up there quite often – it had been 4 years since my last visit. 4 years! So, when my buddy Lori, with an in at fancy Fairmont Hotels, asked if we would like a little getaway to Vancouver, we jumped at the chance. We arrived on a Friday night, donned our ‘going out’ clothes and headed down Robson to Denman in search of, what a friend said, was THE best Izakaya style Japanese restaurant in Vancouver – Kingyo Izakaya (website down as of this posting – 871 Denman Street). We don’t really have a lot of this style of Japanese dining in Seattle (will have to check out Wann in Belltown and report back) – where the atmosphere is welcoming and festive with small plates to share – think Irish pub meets tapas meets excellently executed Japanese food, where they all welcome you in a unison sing-song way (above, top, left), and you are kind of getting close. Kind of. Anywho – the food was excellent as the snacks kept rolling across the table – with everything from fresh tofu (above, bottom left – a treat I had two times this visit – also a rarity around these parts), to superb yellowtail sashimi, to a Japanese take on a Cesar salad, excellent prawns, to meats, to noodles, to… well, a little bit of everything and it was all good! The sake was the perfect accompaniment and the frozen grapes were a nice final touch (above, top, middle). Oh, and I can’t forget to mention – the perfectly yummy cocktail we had across the street at the Central Bistro while we were waiting on our table! The Twisted Ginger (above, bottom, middle) – with ginger, grapefruit, lychee, and a hint of rose along with vodka – it was lovely. Our Saturday began, with the lovely lingering perks of the Fairmont Hotel. The pool and the sauna both were wonderfully relaxing – but the real treat was the room service breakfast – who knew a table full comforting food being rolled into your room, while you stay robe-enveloped, could make for such a lovely time?! (okay – all those folks that stay at that type of place regularly know this – i.e. not really me…). After a fair amount of lounging had been had, we met up with Lori and Brian for some wandering in the city, and a to-do list that included items such as: lay in grass and watch clouds (check), test out some swings (check), peruse public art (check), chocolate tasting at Mink (bottom left – check) stare at the water from as many park-like vantage points as possible (Vancouver has many – but, check). The new convention center is great piece of public work for all you archi-types who haven’t been since the Olympics (above, bottom right, with Olympic cauldron in front). We ended this lovely day over a lingering meal at the Fairmont Hotel Waterfront’s Herons West Coast Kitchen – a tasting menu for the rest of the table was heavy on the meats, so I happily focused on the Salmon edamame bi bim bap. Huge portions were a surprise, and there were many leftovers. It was a beautiful day of relaxing, meandering, and meals. Apparently Shawn didn’t want the good feelings of the day to end, and after we bid goodnight to our buddies, he surprisingly had other plans for us in the “fancy bar” downstairs. Once I realized the drinks of choice that were ordered, champagne cocktails, I had a sneaking suspicion something serious was happening – but was utterly surprise to see Shawn down on one knee, and with a ring and everything! It was very sweet, and very Shawn – and quite natural to say “yes!” to. Some of you may know that we like to say “our relationship is Canadian” due to its beginnings on a fateful camping trip to Vancouver Island 6.5 years ago, so this seemed like a very fitting proposal. And so, it looks like I will be marrying my best friend….. Before heading back down the road the next morning to Seattle and the phone calls to spread the news – there was meandering in the farmer’s market on Granville Island, a perennial favorite where a stop at Terra Breads is always a must (top collage, top right). The rustic pastries are always a great balance of sweet crispness, and the market is a perfect spot to stroll through for goodies to bring home. Thanks so much to Lori and Brian for their part in such a special trip!
# -*- coding: UTF-8 -*- # # Copyright 2010 Google, Inc. # # This file is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License, version 2, # as published by the Free Software Foundation. # # In addition to the permissions in the GNU General Public License, # the authors give you unlimited permission to link the compiled # version of this file into combinations with other programs, # and to distribute those combinations without any restriction # coming from the use of this file. (The General Public License # restrictions do apply in other respects; for example, they cover # modification of the file, and distribution when not linked into # a combined executable.) # # This file is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; see the file COPYING. If not, write to # the Free Software Foundation, 51 Franklin Street, Fifth Floor, # Boston, MA 02110-1301, USA. """Tests for Tag objects.""" from __future__ import absolute_import from __future__ import unicode_literals import unittest import pygit2 from . import utils __author__ = '[email protected] (Dave Borowitz)' TAG_SHA = '3d2962987c695a29f1f80b6c3aa4ec046ef44369' class TagTest(utils.BareRepoTestCase): def test_read_tag(self): tag = self.repo[TAG_SHA] self.assertTrue(isinstance(tag, pygit2.Tag)) self.assertEqual(pygit2.GIT_OBJ_TAG, tag.type) self.assertEqual(pygit2.GIT_OBJ_COMMIT, tag.target.type) self.assertEqual('root', tag.name) self.assertEqual( ('Dave Borowitz', '[email protected]', 1288724692, -420), tag.tagger) self.assertEqual('Tagged root commit.\n', tag.message) commit = tag.target self.assertEqual('Initial test data commit.\n', commit.message) def test_new_tag(self): name = 'thetag' target = 'af431f20fc541ed6d5afede3e2dc7160f6f01f16' message = 'Tag a blob.\n' tagger = ('John Doe', '[email protected]', 12347, 0) target_prefix = target[:5] too_short_prefix = target[:3] self.assertRaises(ValueError, self.repo.create_tag, name, too_short_prefix, pygit2.GIT_OBJ_BLOB, tagger, message) sha = self.repo.create_tag(name, target_prefix, pygit2.GIT_OBJ_BLOB, tagger, message) tag = self.repo[sha] self.assertEqual('3ee44658fd11660e828dfc96b9b5c5f38d5b49bb', tag.hex) self.assertEqual(name, tag.name) self.assertEqual(target, tag.target.hex) self.assertEqual(tagger, tag.tagger) self.assertEqual(message, tag.message) self.assertEqual(name, self.repo[tag.hex].name) def test_modify_tag(self): name = 'thetag' target = 'af431f20fc541ed6d5afede3e2dc7160f6f01f16' message = 'Tag a blob.\n' tagger = ('John Doe', '[email protected]', 12347) tag = self.repo[TAG_SHA] self.assertRaises(AttributeError, setattr, tag, 'name', name) self.assertRaises(AttributeError, setattr, tag, 'target', target) self.assertRaises(AttributeError, setattr, tag, 'tagger', tagger) self.assertRaises(AttributeError, setattr, tag, 'message', message) if __name__ == '__main__': unittest.main()
Open Mon-Fri 7am-8pm. Closed weekends and public holidays. Toilets. Secure Parking.
import asyncio import logging import struct from bfnet.packets import PacketHandler, Packet, PacketButterfly from bfnet import util logging.basicConfig(filename='/dev/null', level=logging.INFO) formatter = logging.Formatter('%(asctime)s - [%(levelname)s] %(name)s - %(message)s') root = logging.getLogger() consoleHandler = logging.StreamHandler() consoleHandler.setFormatter(formatter) root.addHandler(consoleHandler) # Create your event loop. loop = asyncio.get_event_loop() my_handler = PacketHandler.get_handler(loop=loop, log_level=logging.DEBUG) # Create a new packet. @my_handler.add_packet_type class Packet0Echo(Packet): id = 0 def __init__(self, pbf): super().__init__(pbf) # Set our attributes. self.data_to_echo = "" def unpack(self, data: dict): """ Unpack the packet. """ self.data_to_echo = data["echo"] return True def gen(self): """ Pack a new packet. """ return {"echo": self.data_to_echo} @asyncio.coroutine def main(): my_server = yield from my_handler.create_server(("127.0.0.1", 8001), ("keys/test.crt", "keys/test.key", None)) @my_server.set_handler @asyncio.coroutine def handler(bf: PacketButterfly): while True: echopacket = yield from bf.read() if not echopacket: break bf.write(echopacket) if __name__ == '__main__': loop.create_task(main()) try: loop.run_forever() except KeyboardInterrupt: # Close the server. my_handler.stop() loop.close()
A classy navy diamond cotton and polyester upholstered dining chair with arms detailed with silver studs and piping. Black timber legs. Perfect for the living room.
NUMS = { 0: 0, #This value makes one condition less 1: 3, 2: 3, 3: 5, 4: 4, 5: 4, 6: 3, 7: 5, 8: 5, 9: 4, 10: 3, 11: 6, 12: 6, 13: 8, 14: 8, 15: 7, 16: 7, 17: 9, 18: 8, 19: 8 } def numeral_to_string(num): ans = 0 if num > 999: ans += len("onethousand") num %= 1000 if num > 99: temp = num // 100 ans += NUMS[temp] if num % 100: ans += len("hundredand") else: ans += len("hundred") num %= 100 if num > 19: #Strings of 80 and 90 have same length #Strings of 40, 50, 60 have same length #Strings of 20, 30 have same length if num > 79: ans += 6 elif num > 69: ans += 7 elif num > 39: ans += 5 else: ans += 6 num %= 10 #NUMS[num] for num < 20 return (ans + NUMS[num]) def prob_017(): return sum(numeral_to_string(i) for i in range(1, 1001)) if __name__ == "__main__": print(prob_017())
Are there any setup fees? : MindFlash International Inc. There are no setup fees or hidden charges with the HeatMapOnline service plan.
import os import sys import glob import subprocess import signal import tempfile import time import unittest class KLTest(unittest.TestCase): def __init__(self, klFilePath): super(KLTest, self).__init__() self.__klFilePath = klFilePath def id(self): return os.path.split(self.__klFilePath)[1].partition('.')[0] def shortDescription(self): return self.id() def runTest(self): stageFolder = os.path.abspath(os.path.join(os.path.split(self.__klFilePath)[0], '..', '..', 'stage')) env = {} env.update(os.environ) if not env.has_key('FABRIC_EXTS_PATH'): env['FABRIC_EXTS_PATH'] = stageFolder else: env['FABRIC_EXTS_PATH'] += os.pathsep + stageFolder p = None def handler(signum, frame): if p: os.kill(p.pid, signal.SIGTERM) sys.exit(0) signal.signal(signal.SIGINT, handler) signal.signal(signal.SIGTERM, handler) klArgs = ['kl'] + ['--showthread', '--loadexts', self.__klFilePath] logFile = tempfile.TemporaryFile() logFilePath = logFile.name logFile.file.flush() logFile.file.close() logFile = open(logFilePath, 'wb') p = subprocess.Popen( klArgs, env = env, cwd = os.path.abspath(os.path.split(__file__)[0]), shell=True, universal_newlines=True, stdout = logFile ) while True: time.sleep(1) p.poll() if not p.returncode is None: break logFile.close() if not os.path.exists(logFilePath): self.fail('logFile was not created.') return currContent = open(logFilePath, 'rb').read() print '------- '+self.__klFilePath+' --------' print currContent print '----------------------------------' outFilePath = self.__klFilePath.rpartition('.')[0]+'.out' if not os.path.exists(outFilePath): self.fail('.out file does not exist.') prevContent = open(outFilePath, 'rb').read() prevContent = prevContent.replace('\r', '') self.assertEqual(currContent, prevContent) if __name__ == '__main__': klFolder = os.path.join(os.path.split(os.path.abspath(__file__))[0], 'kl') klFiles = glob.glob(os.path.join(klFolder, '*.kl')) suite = unittest.TestSuite() for klFile in klFiles: test = KLTest(klFile) suite.addTest(test) runner = unittest.TextTestRunner() result = runner.run(suite)
We have established a developmentally appropriate curriculum based on a child’s intellectual, social, and physical needs and which is designed to encourage child-initiated learning activities within a supportive environment. Our modern teaching philosophy incorporates the best of the European Kindergarten model with the American school. The KinderHaus is an inquiry-based environment that supports children’s development of problem-solving and creative thinking skills where activities and projects derive from children’s interest and experiences. KinderHaus teachers share the philosophy that children are actively engaging with their environment and are curious about the world around them, ask questions and construct own knowledge through investigation and exploration. We believe that children’s play is an expression of intelligence and growth and that social interactions promote a child’s healthy development. The emergent curriculum is based on children’s input and interests as well as seasonal observations and cultural festivities. In the classroom, teachers use project-based learning to support children’s cooperation during interdisciplinary activities, related to math, science, art, language, movement and music. KinderHaus children develop a love for learning, gain independence and confidence in their own abilities which KinderHaus values as the strong foundation for learning in the primary grades. KinderHaus children become curious, motivated and competent learners. The curriculum offers children activities and materials that stimulate imagination, build independence, and present ample opportunities for exploration and discovery. Through a wide range of activities we help children gain confidence as they become competent learners, adapt to group experiences, and learn to respect the feelings of others. We believe that children’s play is an expression of intelligence and growth, and that young children learn best through hands-on, concrete experiences. Learning is both an individual and a social process. Young children are excellent language learners when given the chance for a consistent and early exposure through native speakers. KinderHaus teachers put a strong focus on language aquisition and support each child’s language development. The KinderHaus German Immersion environment enables children to become confident in their ability to speak and understand the German language, converse about developmentally-appropriate school topics, experience aspects of German community and culture while learning in a warm and caring environment. The KinderHaus parent community is an important, vibrant and diverse partner in children’s multilingual upbringing that supports the children in becoming the future global citizens. By learning German, we are adding countless opportunities to children’s lives that includes many benefits for personal and professional growth. Regardless whether or not one of your home language(s) is German, parents can support the language journey of their child by sharing an encouraging attitude towards the German language and culture. KinderHaus truly embraces the different linguistic and cultural backgrounds within our community and celebrates the diversity that becomes visible in our curriculum and classroom. KinderHaus teachers share the belief that young children benefit greatly from building a strong connection to nature and during daily outdoor experiences, we seek to implement a love and respect for nature and a compassion towards animals and our environment. Based on the understanding that children thrive in all developmental areas through close experiences with nature, KinderHaus follows a seasonal curriculum which includes outdoor times in all types of weather. Spending time in nearby Prospect Park or the backyard allows teachers and children to take learning to a higher level, collaborating on communal goals when building and crafting with natural items and building self-esteem and independence while being physically active outdoors.
# coding: utf-8 # Module: blog_tags # Created on: 25.11.2015 # Author: Roman Miroshnychenko aka Roman V.M. ([email protected]) import json from collections import namedtuple from urllib.parse import quote_plus from django import template from django.conf import settings from django.core.urlresolvers import reverse from django.utils.translation import ugettext as _ from django.core.paginator import EmptyPage from ..models import Category, Post register = template.Library() SideBarObjects = namedtuple('SideBarObjects', ['objects', 'more']) MenuLink = namedtuple('MenuLink', ['caption', 'url']) @register.simple_tag def get_categories(): """ Simple tag :return: list of non-empty categories ordered by post count in desc. order """ return Category.objects.ordered_by_post_count() @register.simple_tag def get_posts_digest(featured=False, posts_count=3): """ Simple tag Get the lists of the latest posts (general of featured) for the blog sidebar :param featured: if ``True`` featured posts digest is returned :param posts_count: the number of posts to include in a digest :return: the digest of recent posts and "More" link :rtype: :class:`SideBarObjects` """ if featured: posts = Post.objects.featured() more_link = reverse('blog:featured_posts') else: posts = Post.objects.published() more_link = reverse('blog:home') more = more_link if posts.count() > posts_count else None return SideBarObjects(posts[:posts_count], more) @register.simple_tag def get_archive_digest(months_count=6): """ Simple tag :param months_count: the number of month to include in a digest :return: the list of the most recent months from the blog archive for the blog sidebar :rtype: :class:`SideBarObjects` """ months = Post.objects.published().dates( 'date_published', 'month', order='DESC')[:months_count + 1] more = reverse('blog:archive') if len(months) > months_count else None return SideBarObjects(months[:months_count], more) @register.simple_tag def get_blog_menu_links(): """ Simple tag :return: blog menu links for the site main menu. """ featured = Post.objects.featured() featured_link = reverse('blog:featured_posts') if featured.exists() else None return ( MenuLink(_('Recent Posts'), reverse('blog:home')), MenuLink(_('Featured Posts'), featured_link), MenuLink(_('Categories'), reverse('blog:categories_list')), MenuLink(_('Archive'), reverse('blog:archive')) ) @register.inclusion_tag('{0}/paginator.html'.format(settings.CURRENT_SKIN), takes_context=True) def render_paginator(context, adjacent_pages=2): """ Inclusion tag Renders paginator for multi-page lists. A skin must provide the respective paginator template. Adds pagination context variables for use in displaying first, adjacent and last page links in addition to those created by the object_list generic view. :param context: parent template context :param adjacent_pages: the number of pages adjacent to the current :return: rendered paginator html code """ start_page = max(context['page_obj'].number - adjacent_pages, 1) if start_page <= 3: start_page = 1 end_page = context['page_obj'].number + adjacent_pages + 1 if end_page >= context['paginator'].num_pages - 1: end_page = context['paginator'].num_pages + 1 page_numbers = [n for n in range(start_page, end_page) if n in range(1, context['paginator'].num_pages + 1)] page_obj = context['page_obj'] paginator = context['paginator'] try: next_ = context['page_obj'].next_page_number() except EmptyPage: next_ = None try: previous = context['page_obj'].previous_page_number() except EmptyPage: previous = None return { 'page_obj': page_obj, 'paginator': paginator, 'page': context['page_obj'].number, 'pages': context['paginator'].num_pages, 'page_numbers': page_numbers, 'next': next_, 'previous': previous, 'has_next': context['page_obj'].has_next(), 'has_previous': context['page_obj'].has_previous(), 'show_first': 1 not in page_numbers, 'show_last': context['paginator'].num_pages not in page_numbers, 'request': context['request'], 'query': quote_plus(context['query']), } @register.simple_tag(takes_context=True) def check_blog_url(context): """ Check if a current URL belong to blog application :param context: template context :type context: dict :return: check result :rtype: bool """ return context['request'].path in [item.url for item in get_blog_menu_links()] @register.inclusion_tag('common_content/json-ld.html', takes_context=True) def blog_json_ld(context): """ Renders JSON-LD for the blog :param context: parent template context :type context: dict :return: context for json-ld template :rtype: dict """ site_url = '{}://{}'.format( context['request'].scheme, context['request'].get_host() ) try: site_logo_url = site_url + context['site_config'].site_logo.url except AttributeError: site_logo_url = site_url + settings.DEFAULT_LOGO json_ld = { '@context': 'http://schema.org', '@type': 'Blog', 'name': context['site_config'].site_name, 'url': site_url, 'description': context['site_config'].site_tagline, 'publisher': { '@type': 'Organization', 'name': context['site_config'].site_name, 'logo': { '@type': 'imageObject', 'url': site_logo_url } } } return {'json_ld': json.dumps(json_ld, indent=2)} @register.inclusion_tag('common_content/json-ld.html', takes_context=True) def blog_post_json_ld(context): """ Renders JSON-LD for the blog :param context: parent template context :type context: dict :return: context for json-ld template :rtype: dict """ site_url = '{}://{}'.format( context['request'].scheme, context['request'].get_host() ) try: featured_image_url = site_url + context['post'].featured_image.url except AttributeError: featured_image_url = site_url + settings.DEFAULT_FEATURED_IMAGE try: site_logo_url = site_url + context['site_config'].site_logo.url except AttributeError: site_logo_url = site_url + settings.DEFAULT_LOGO json_ld = { '@context': 'https://schema.org', '@type': 'BlogPosting', 'headline': context['post'].title, 'description': context['post'].meta_description, 'datePublished': (context['post'].date_published.strftime('%Y-%m-%d') if context['post'].date_published else None), 'dateModified': (context['post'].last_updated.strftime('%Y-%m-%d') if context['post'].last_updated else None), 'image': { '@type': 'imageObject', 'url': featured_image_url, }, 'publisher': { '@type': 'Organization', 'name': context['site_config'].site_name, 'logo': { '@type': 'imageObject', 'url': site_logo_url } }, 'author': { '@type': 'Person', 'name': 'Roman Miroshnychenko' # todo: implement Post.author field }, 'keywords': ', '.join([category.name for category in context['post'].categories.all()]), 'mainEntityOfPage': site_url + context['request'].path, 'articleBody': context['post'].content } return {'json_ld': json.dumps(json_ld, indent=2)}
It is over — the so-called friendship that existed between Matt Lauer and his former colleagues Savannah Guthrie and Hoda Kotb has come to an end. Believe it or not, it was just a few weeks ago that Guthrie was holding Kotb’s hand as she read the news to the world. Her very good friend and co-host, Lauer, was fired from TODAY after several women came forward and revealed that the TV personality sexually assaulted them. An emotional Guthrie showed support for the victims while claiming that Lauer is still a friend she loves. Those days are over. A source spoke to Life and Style and revealed that the two women no longer talk to Lauer. The source took it a step further by saying that they are relieved that Lauer is gone. It appears that they never really liked him. They found that he made the atmosphere heavy, it was tough to be around him because he was very demanding and controlling. It is being claimed that the two female co-chairs are getting along very well and they plan to continue hosting the morning show together. The tipster went on to say that staff members at the Peacock Network all agree that Lauer had to leave after the disturbing allegations made against him and no one will miss him. According to the publication, Guthrie and Kotb no longer feel sorry for Lauer after seeing the mountain of evidence against him. Do you enjoy TODAY without Lauer? Chrissy Teigen Posts Adorable Vid Of Daughter Luna Playing With Her Mini Food Truck - Check It Out!
import gevent import logging import requests from gevent.queue import Queue, Full from gevent.event import Event from .contrib.client import APICLient from .exceptions import LBMismatchError QUEUE_FULL_DELAY = 5 EMPTY_QUEUE_DELAY = 1 ON_EMPTY_DELAY = 10 FORWARD_WORKER_SLEEP = 5 BACKWARD_WOKER_DELAY = 1 WATCH_DELAY = 1 logger = logging.getLogger(__name__) class APIRetreiver(object): def __init__(self, config, **options): if not isinstance(config, dict): raise TypeError( "Expected a dict as config, got {}".format(type(config)) ) self.api_host = config.get('api_host') self.api_version = config.get('api_version') self.api_key = config.get('api_key') if 'api_extra_params' in options: self._extra = options.get('api_extra_params') self.tender_queue = Queue(maxsize=config.get('queue_max_size', 250)) self.filter_callback = options.get('filter_callback', lambda x: x) self.forward_worker_dead = Event() self.forward_worker_dead.set() self.backward_worker_dead = Event() self.backward_worker_dead.set() self._init_clients() def _init_clients(self): logger.info('Sync: Init clients') self.forward_client = APICLient( self.api_key, self.api_host, self.api_version ) self.backward_client = APICLient( self.api_key, self.api_host, self.api_version ) self.origin_cookie = self.forward_client.session.cookies self.backward_client.session.cookies = self.origin_cookie def _get_sync_point(self): logger.info('Sync: initializing sync') forward = {'feed': 'changes'} backward = {'feed': 'changes', 'descending': '1'} if getattr(self, '_extra', ''): [x.update(self._extra) for x in [forward, backward]] r = self.backward_client.get_tenders(backward) backward['offset'] = r['next_page']['offset'] forward['offset'] = r['prev_page']['offset'] logger.error(forward) self.tender_queue.put(filter(self.filter_callback, r['data'])) logger.info('Sync: initial sync params forward: ' '{}, backward: {}'.format(forward, backward)) return forward, backward def _start_sync_workers(self): forward, backward = self._get_sync_point() self.workers = [ gevent.spawn(self._forward_worker, forward), gevent.spawn(self._backward_worker, backward), ] def _forward_worker(self, params): worker = "Forward worker:" logger.info('{} starting'.format(worker)) r = self.forward_client.get_tenders(params) if self.forward_client.session.cookies != self.origin_cookie: raise LBMismatchError try: while True: try: while r['data']: try: self.tender_queue.put( filter(self.filter_callback, r['data']) ) except Full: while self.tender_queue.full(): gevent.sleep(QUEUE_FULL_DELAY) self.tender_queue.put( filter(self.filter_callback, r['data']) ) params['offset'] = r['next_page']['offset'] r = self.forward_client.get_tenders(params) if self.forward_client.session.cookies != self.origin_cookie: raise LBMismatchError if r['data']: gevent.sleep(FORWARD_WORKER_SLEEP) logger.warn('{} got empty listing. Sleep'.format(worker)) gevent.sleep(ON_EMPTY_DELAY) except LBMismatchError: logger.info('LB mismatch error on backward worker') self.reinit_clients.set() except Exception as e: logger.error("{} down! Error: {}".format(worker, e)) self.forward_worker_dead.set() else: logger.error("{} finished.".format(worker)) def _backward_worker(self, params): worker = "Backward worker: " logger.info('{} staring'.format(worker)) try: while True: try: r = self.backward_client.get_tenders(params) if not r['data']: logger.debug('{} empty listing..exiting'.format(worker)) break gevent.sleep(BACKWARD_WOKER_DELAY) if self.backward_client.session.cookies != self.origin_cookie: raise LBMismatchError try: self.tender_queue.put( filter(self.filter_callback, r['data']) ) except Full: logger.error('full queue') while self.tender_queue.full(): gevent.sleep(QUEUE_FULL_DELAY) self.tender_queue.put( filter(self.filter_callback, r['data']) ) params['offset'] = r['next_page']['offset'] except LBMismatchError: logger.info('{} LB mismatch error'.format(worker)) if not self.reinit_clients.is_set(): self.reinit_clients.set() except Exception as e: logger.error("{} down! Error: {}".format(worker, e)) self.forward_worker_dead.set() else: logger.error("{} finished.".format(worker)) def _restart_workers(self): self._init_clients() gevent.killall(self.workers) self._start_sync_workers() return self.workers def get_tenders(self): self._start_sync_workers() forward, backward = self.workers try: while True: if self.tender_queue.empty(): gevent.sleep(EMPTY_QUEUE_DELAY) if (forward.dead or forward.ready()) or \ (backward.dead and not backward.successful()): forward, backward = self._restart_workers() while not self.tender_queue.empty(): yield self.tender_queue.get() except Exception as e: logger.error(e)
Audizine Forums > Main Model-Line Discussion > B6/B7 S4/RS4 > B6/B7 S4/RS4 Classifieds > Wanted: WTB: B6 s4 fly wheel needed ASAP. View Full Version : Wanted: WTB: B6 s4 fly wheel needed ASAP. Stock b6 s4 fly wheel.
"""empty message Revision ID: 0009_created_by_for_jobs Revises: 0008_archive_template Create Date: 2016-04-26 14:54:56.852642 """ # revision identifiers, used by Alembic. revision = '0009_created_by_for_jobs' down_revision = '0008_archive_template' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('jobs', sa.Column('created_by_id', postgresql.UUID(as_uuid=True), nullable=True)) op.create_index(op.f('ix_jobs_created_by_id'), 'jobs', ['created_by_id'], unique=False) op.create_foreign_key(None, 'jobs', 'users', ['created_by_id'], ['id']) op.get_bind() op.execute('UPDATE jobs SET created_by_id = \ (SELECT user_id FROM user_to_service WHERE jobs.service_id = user_to_service.service_id LIMIT 1)') op.alter_column('jobs', 'created_by_id', nullable=False) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'jobs', type_='foreignkey') op.drop_index(op.f('ix_jobs_created_by_id'), table_name='jobs') op.drop_column('jobs', 'created_by_id') ### end Alembic commands ###
GLENN’S TERROR-IFIC TIPS FOR A HAPPY HALLOWEEN! GLENN’S TERROR-IFIC TIPS FOR A HAPPY HALLOWEEN ! We are always talking about setting a theme for your event, but Halloween is a time when you can really go all out ! Whether you want to replicate your favourite scary movie, pick a letter of the alphabet that everyone sticks to, or make it more personal to you or your friends/ organisation – there are hours of fun for your guests before the party even starts. Creating your own outfit and getting ready is half the fun, but the best bit is when you arrive and see what everyone else has conjured up – it’s the best conversation starter! At Sonix, we have a host of entertainment to offer for this time of year, including Bone-Rattling Rock n’ Roll Bingo; Thrilling Treasure Hunt; Crazy Casino Night; Rib-tickling Race night; Murder Mystery night and more ! Contact Fiona for more info [email protected] or call 087-738-2090. For more ideas or to ask about how we can help you or your organisation celebrate Halloween, just contact [email protected] or call 087-738-2090.
# -*- coding: utf8 -*- __author__ = 'sergey' from dedupsqlfs.db.mysql.table import Table class TableLink( Table ): _table_name = "link" def create( self ): c = self.getCursor() # Create table c.execute( "CREATE TABLE IF NOT EXISTS `%s` (" % self.getName()+ "`inode_id` BIGINT UNSIGNED PRIMARY KEY, "+ "`target` BLOB NOT NULL"+ ")"+ self._getCreationAppendString() ) return def insert( self, inode, target): """ :param target: bytes :return: int """ self.startTimer() cur = self.getCursor() cur.execute( "INSERT INTO `%s` " % self.getName()+ " (`inode_id`, `target`) VALUES (%(inode)s, %(target)s)", { "inode": inode, "target": target } ) item = cur.lastrowid self.stopTimer('insert') return item def find_by_inode( self, inode): """ :param inode: int :return: int """ self.startTimer() cur = self.getCursor() cur.execute( "SELECT `target` FROM `%s` " % self.getName()+ " WHERE `inode_id`=%(inode)s", { "inode": inode } ) item = cur.fetchone() if item: item = item["target"] self.stopTimer('find_by_inode') return item def get_count(self): self.startTimer() cur = self.getCursor() cur.execute("SELECT COUNT(1) as `cnt` FROM `%s`" % self.getName()) item = cur.fetchone() if item: item = item["cnt"] else: item = 0 self.stopTimer('get_count') return item def get_inode_ids(self, start_id, end_id): self.startTimer() cur = self.getCursor() cur.execute("SELECT `inode_id` FROM `%s` " % self.getName()+ " WHERE `inode_id`>=%s AND `inode_id`<%s", (start_id, end_id,)) nameIds = set(str(item["inode_id"]) for item in cur) self.stopTimer('get_inode_ids') return nameIds def remove_by_ids(self, inode_ids): self.startTimer() count = 0 id_str = ",".join(inode_ids) if id_str: cur = self.getCursor() cur.execute("DELETE FROM `%s` " % self.getName()+ " WHERE `inode_id` IN (%s)" % (id_str,)) count = cur.rowcount self.stopTimer('remove_by_ids') return count pass
Find out more about our organization, mission, methods, and the results of our the SSRL Team. Intrigued and want to help us take the next step? You can become a contributor to our cause through a partnership or donations.
#!/usr/bin/python # # Copyright (c) SAS Institute Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from rbuild_test import rbuildhelp from rbuild import errors from rbuild import pluginapi class MyPlugin(pluginapi.Plugin): foo = 'bar' def myApiCall(self, *args, **kw): print 'api call: %s, %s' % (args, kw) return 'return value' class PluginTest(rbuildhelp.RbuildHelper): def myHook(self, *args, **kw): args = ('foo', ) + args[1:] return args, {'newkw' : kw['kw']} def myHook2(self, *args, **kw): args = ('barz ' + args[0], ) + args[1:] return args, {'newkw2' : kw['newkw']} def myPostHook(self, rv, *args, **kw): return rv+' augmented' def myPostHookError(self, rv, *args, **kw): raise KeyError def brokenHook(self, *args, **kw): return 3 def testPrehooks(self): plugin = MyPlugin('plugin', 'path', None) rc, txt = self.captureOutput(plugin.myApiCall, 'arg1', kw='kw1') assert(rc == 'return value') self.assertEquals(txt, "api call: ('arg1',), {'kw': 'kw1'}\n") plugin._installPrehook('myApiCall', self.myHook) rc, txt = self.captureOutput(plugin.myApiCall, 'arg1', kw='kw1') assert(rc == 'return value') self.assertEquals(txt, "api call: ('foo',), {'newkw': 'kw1'}\n") plugin._installPrehook('myApiCall', self.myHook2) rc, txt = self.captureOutput(plugin.myApiCall, 'arg1', kw='kw1') self.assertEquals(txt, "api call: ('barz foo',), {'newkw2': 'kw1'}\n") plugin._installPrehook('myApiCall', self.brokenHook) err = self.discardOutput( self.assertRaises, errors.InvalidHookReturnError, plugin.myApiCall, 'arg1', kw='kw1') self.assertEquals(err.hook, self.brokenHook) # after removing the broken hook this should work. plugin._getPrehooks('myApiCall').remove(self.brokenHook) rc, txt = self.captureOutput(plugin.myApiCall, 'arg1', kw='kw1') def testPrehookErrors(self): plugin = MyPlugin('plugin', 'path', None) err = self.assertRaises(errors.InvalidAPIMethodError, plugin._installPrehook, 'nosuchApi', self.myHook) self.assertEquals(err.method, 'nosuchApi') err = self.assertRaises(errors.InvalidAPIMethodError, plugin._getPrehooks, 'nosuchApi') self.assertEquals(err.method, 'nosuchApi') def testPosthooks(self): plugin = MyPlugin('plugin', 'path', None) plugin._installPosthook('myApiCall', self.myPostHook) rc, txt = self.captureOutput(plugin.myApiCall, 'arg1', kw='kw1') assert(rc == 'return value augmented') def testPosthookErrors(self): plugin = MyPlugin('plugin', 'path', None) plugin._installPosthook('myApiCall', self.myPostHookError) err = self.discardOutput( self.assertRaises, KeyError, plugin.myApiCall, 'arg1', kw='kw1') # after removing the broken hook this should work. plugin._getPosthooks('myApiCall').remove(self.myPostHookError) rc, txt = self.captureOutput(plugin.myApiCall, 'arg1', kw='kw1') assert(rc == 'return value') err = self.assertRaises(errors.InvalidAPIMethodError, plugin._installPosthook, 'nosuchApi', self.myPostHook) self.assertEquals(err.method, 'nosuchApi') err = self.assertRaises(errors.InvalidAPIMethodError, plugin._getPosthooks, 'nosuchApi') self.assertEquals(err.method, 'nosuchApi')
Car accidents are the No. 1 cause of occupational fatalities and are in the middle of the pack on most top 10 lists of occupational injuries. These often are severe injuries with life-altering consequences. Adding insult to injury is the fact that one of these consequences often is a diminished ability to earn a living – or in some cases the complete inability to bring home a paycheck. While accidents that happen during the commute to and from work are generally not covered by workers’ compensation, crashes that occur while on the job generally are covered. If you have been hurt in a work-related car accident, you should talk to an experienced attorney to learn more about your legal rights and options. Remember that workers’ compensation is a no-fault system, which means you could still recover workers’ compensation benefits even if the crash was your fault. The legal team at the Kenton Koszdin Law Office concentrates on helping injured and disabled workers pursue the compensation they deserve after work-related car accidents. Let us guide you through the process, handle all your paperwork, and fight for the compensation you deserve. Call or contact us online today to schedule a free consultation. When Is a Car Accident Work-Related? Statisticians have affirmed that car accidents are the leading cause of occupational injuries, but what qualifies as a work-related auto accident is a matter for lawyers and judges, not number crunchers. That distinction makes all the difference when it comes to financial compensation for medical care and lost wages. The employee is executing his or her duties in the course of a workday. The employee is performing a work-related errand while commuting to or from work. Broken bones: Beyond casts, there can be surgeries, often with insertion of hardware to hold bones in place. Recovery can take months and mean lingering pain, nerve or blood vessel damage, and eventually arthritis. Blood loss, blood clots, or infection are among causes of death for fracture victims. Back and spinal cord injuries: Degree of injury dictates complexity, length, and effectiveness of treatment. Care can include painful physical and occupational therapy and even vocational rehabilitation. Partial or full paralysis is possible. Crashes also yield potentially crippling tears, sprains, and strains of muscles, tendons, and ligaments in the back. Traumatic head and brain injuries: Survive the emergency care stage and you could face physical therapy, occupational therapy, speech or language therapy, and vocational therapy during an excruciating recovery process that can include multiple surgeries. Loss of cognitive capacity and personality characteristics is likely, along with loss of sensation, motor skills, sight, and hearing. Psychological effects can range from slight depression to a lifetime in a vegetative state. Whiplash: Symptoms can range from neck pain and stiffness to headaches, dizziness, tingling or numbness in the arms, blurred vision, ringing in the ears, trouble sleeping, difficulty concentrating, problems with memory, and depression. Effects can be short-term, but some people suffer long-term chronic pain and other complications. Burns: Along with first-, second-, and third-degree burns, accident victims can suffer smoke and heat inhalation that can cause internal burns, carbon-monoxide poisoning, and damage from inhaled toxins. Treatment can mean months of hospitalization fighting serious infection and undergoing multiple skin grafts and other surgeries, psychological/ psychiatric care, a variety of rehabilitative therapies. Sprains and strains: These are soft-tissue injuries such as whiplash. A sprain is the stressing or tearing of ligaments. Symptoms can be slow to appear, and the pain can be chronic. Strains are the stressing or tearing of tendon and/or muscle. Treatments for both run the gamut from ice packs and elevation of the injured area to painkillers, physical therapy, and surgeries. Cuts and bruises: Flesh wounds that don’t require stitches can still cause infection. Scrapes and bruises also can signal serious internal injuries. A medical assessment can identify underlying injuries that could go unnoticed and become more problematic without care. One signature effect of a car crash is that damage done in seconds can last a lifetime – or end one. Partial or full paralysis and death are among potential consequences. Crippling financial damage can result, too, so it helps to know how to position yourself for financial recovery after a crash. The first and most important thing to do after a car accident is to get all necessary medical help. If able, call the police and make sure an accident report is filed. There are other steps that can be taken to help you make a legal case for compensation if it comes to that. Take notes and photos, including details about potential witnesses. Save all paperwork the incident generates, including receipts for related expenses, and keep a record of all lost wages. Be careful what you say, and don’t admit fault. Your employer must give or mail you a workers’ comp benefits application form within a day of you reporting your injury. If your employer does not give you a claim form, get one from a workers’ comp information and assistance officer. Read everything that comes with the claim form. Fill out and sign the employee portion of the form. Describe injuries thoroughly. Include every part of your body affected by the accident. Give the form to your employer. This is called filing the claim form. If you mail the form to your employer, use first-class or certified mail and buy a return receipt. If you are the victim of a crime that happened at work, the employer must give notice of workers’ compensation eligibility within one working day of the crime. Injuries from a work-related car accident can alter lives beyond recognition. One way to deal with the financial damage is to get a qualified workers’ compensation law team on your side. The Kenton Koszdin Law Office has put heart, soul, and mind into clients’ battles for workers’ compensation benefits for injuries caused by work-related car crashes. The result has been rewarding for accident victims and a source of invaluable experience for attorneys well-versed in California law. Contact us today for a free, no obligation case evaluation. Unable to leave home? Ask for a free in-home consultation.
__author__ = 'IEUser' from model.group import Group class GroupHelper: def __init__(self, app): self.app = app def is_groups_page_opened(self): wd = self.app.wd return wd.current_url.endswith("/group.php") and len(wd.find_elements_by_name("new")) > 0 def return_to_groups_page(self): wd = self.app.wd if not self.is_groups_page_opened(): wd.find_element_by_link_text("group page").click() def fill_in_fields(self, group): wd = self.app.wd if group.name: wd.find_element_by_name("group_name").click() wd.find_element_by_name("group_name").clear() wd.find_element_by_name("group_name").send_keys(group.name) if group.header: wd.find_element_by_name("group_header").click() wd.find_element_by_name("group_header").clear() wd.find_element_by_name("group_header").send_keys(group.header) if group.footer: wd.find_element_by_name("group_footer").click() wd.find_element_by_name("group_footer").clear() wd.find_element_by_name("group_footer").send_keys(group.footer) def create(self, group): wd = self.app.wd self.open_groups_page() # init group creation wd.find_element_by_name("new").click() # fill in group fields self.fill_in_fields(group) # submit group creation wd.find_element_by_name("submit").click() self.return_to_groups_page() self.group_cache=None def open_groups_page(self): wd = self.app.wd if not self.is_groups_page_opened(): wd.find_element_by_link_text("groups").click() def select_first_group(self): self.select_group_by_index(0) def select_group_by_index(self, index): wd = self.app.wd wd.find_elements_by_name("selected[]")[index].click() def select_group_by_id(self, id): wd = self.app.wd wd.find_element_by_css_selector("input[value='%s']" % id).click() def delete_first_group(self): self.delete_group_by_index(0) def delete_group_by_index(self, index): wd = self.app.wd self.open_groups_page() # select group by index self.select_group_by_index(index) # submit deletion wd.find_element_by_name("delete").click() self.return_to_groups_page() self.group_cache=None def delete_group_by_id(self, id): wd = self.app.wd self.open_groups_page() # select group by index self.select_group_by_id(id) # submit deletion wd.find_element_by_name("delete").click() self.return_to_groups_page() self.group_cache=None def modify_group_by_index(self, group, index): wd = self.app.wd self.open_groups_page() # select first group self.select_group_by_index(index) wd.find_element_by_name("edit").click() # fill in group fields self.fill_in_fields(group) # submit update wd.find_element_by_name("update").click() self.return_to_groups_page() self.group_cache=None def modify_group_by_id(self, group, id): wd = self.app.wd self.open_groups_page() # select first group self.select_group_by_id(id) wd.find_element_by_name("edit").click() # fill in group fields self.fill_in_fields(group) # submit update wd.find_element_by_name("update").click() self.return_to_groups_page() self.group_cache=None def edit_first_group(self, group): self.modify_group_by_index(group,0) def count(self): wd = self.app.wd self.open_groups_page() return len(wd.find_elements_by_name("selected[]")) group_cache=None def get_group_list(self): if self.group_cache is None: wd = self.app.wd self.open_groups_page() self.group_cache = [] for element in wd.find_elements_by_css_selector("span.group"): text = element.text id = element.find_element_by_name("selected[]").get_attribute("value") self.group_cache.append(Group(name=text, id=id)) return list(self.group_cache) def remove_contact_by_id_from_group(self, id): wd = self.app.wd wd.find_element_by_xpath("//input[@value='%s']" % id).click() wd.find_element_by_xpath("//*[@name='remove']").click() self.app.navigation.return_to_home_page()
Wheelwright has a trusted network of over 500 accredited retail partners, wheel specialists, service centres and tyre bays offering our unrivalled selection of top quality Alloy Wheels. Steel Wheels, Tyres & TPMS products. Enter your postcode below to search our network for your nearest stockist. Wheelwright LTD Wholesale/Distributor Steelfields, Owens Way, Gillingham ME7 2RT, United Kingdom Distributor of alloy wheels and accessories. Unfortunately we only sell to the motor trade, not to the public. Cambridge Campervan Retailer/Stockist Sawtry Way, Wyton-on-the-Hill, PE28 2DX Trusted stockist of Calibre, DRC, Dezent, AEZ, Dotz and Mak Alloy Wheels. Prime Vehicle Sales Retailer/Stockist Brookside Industrial Estate, Sawtry, Huntingdon PE28 5SD, UK Trusted stockist of Calibre, DRC, Dezent, AEZ, Dotz and Mak Alloy Wheels.
import logging import logging.handlers import combaine.common.configloader.config __all__ = ["ParsingLogger", "AggregateLogger", "CommonLogger"] def _initLogger(name): try: config = combaine.common.configloader.config.parse_common_cfg("combaine")['cloud_config'] except Exception as err: pass print err else: _format = logging.Formatter("%(levelname)-5s %(asctime)s %(id)s %(message)s", "%Y-%m-%d %H:%M:%S") parsing_log = logging.getLogger('combaine.%s' % name) log_level = eval('logging.' + config['log_level']) fh = logging.handlers.TimedRotatingFileHandler('/var/log/combaine/%s.log' % name, when="midnight", backupCount=3) fh.setFormatter(_format) fh.setLevel(log_level) sh = logging.StreamHandler() sh.setFormatter(_format) sh.setLevel(log_level) parsing_log.addHandler(fh) parsing_log.addHandler(sh) parsing_log.setLevel(log_level) class GlobalLogId(object): def __new__(cls, _id): if not hasattr(cls, "_instanse"): print "INIT GLOBAL LOGGER ID" cls._instanse = super(GlobalLogId, cls).__new__(cls) cls._id = _id @classmethod def get_id(cls): if hasattr(cls, "_id"): return cls._id else: return "DUMMY_ID" class ParsingLogger(object): def __new__(cls, _id): if not hasattr(cls, "_instanse"): cls._instanse = super(ParsingLogger, cls).__new__(cls) _initLogger("parsing") GlobalLogId(_id) return logging.LoggerAdapter(logging.getLogger("combaine.parsing"), {"id" : _id}) class AggregateLogger(object): def __new__(cls, _id): if not hasattr(cls, "_instanse"): cls._instanse = super(AggregateLogger, cls).__new__(cls) _initLogger("aggregate") GlobalLogId(_id) return logging.LoggerAdapter(logging.getLogger("combaine.aggregate"), {"id" : _id}) class DataFetcherLogger(object): def __new__(cls): if not hasattr(cls, "_instanse"): cls._instanse = super(DataFetcherLogger, cls).__new__(cls) _initLogger("datafetcher") return logging.LoggerAdapter(logging.getLogger("combaine.datafetcher"), {"id" : GlobalLogId.get_id()}) class DataGridLogger(object): def __new__(cls): if not hasattr(cls, "_instanse"): cls._instanse = super(DataGridLogger, cls).__new__(cls) _initLogger("datagrid") return logging.LoggerAdapter(logging.getLogger("combaine.datagrid"), {"id" : GlobalLogId.get_id()}) class CommonLogger(object): def __new__(cls): if hasattr(ParsingLogger, "_instanse"): return logging.LoggerAdapter(logging.getLogger("combaine.parsing"), {"id" : GlobalLogId.get_id()}) elif hasattr(AggregateLogger, "_instanse"): return logging.LoggerAdapter(logging.getLogger("combaine.aggregate"), {"id" : GlobalLogId.get_id()}) else: return logging.LoggerAdapter(logging.getLogger("combaine"), {"id" : GlobalLogId.get_id()})
First of all, let’s preface this by saying, no, you shouldn’t be getting toasted every night for no good reason. Yes, there are health benefits of alcohol, but in the same way sugar is healthy: it definitely won’t kill you if you partake every now and then, but let’s not go overboard. Wait, is drinking alcohol healthy, or not? The health benefits experts attach to drinking booze is surprising, actually. Here are some ways that a few drinks a week can actually benefit you! You may be asking yourself, “How can drinking help if I’m so used to hearing the negative aspects of alcohol?” Well, again, it’s no miracle medicine, so don’t go crazy with this info. But, studies show that drinking alcohol four days per week can help reduce the risk of diabetes. To be exact, it can reduce the risk 27% in men and 32% in women. Oh wow! What other benefits of alcohol are there? According to the University of Exeter, drinking moderately when you’re with your friends or family can actually improve short term memory. Yeah, moderate drinking—not the thing people do when they drink a lot, embarrass themselves, and need their friends to remind them of the shame they don’t remember. The study also goes on to say that social drinking can encourage learning. Apparently, out of the 88 participants, the ones who drank learned more and remembered almost everything they were told the night before. We had to throw in some anti-benefits of alcohol to sort of balance the scales a bit and also be very clear that you shouldn’t take this as permission to drink every night. As one study found, excessive drinking can lead to obesity. A group of teenagers were anonymously asked about their drinking habits. Evidently, the fat kids were binge drinkers. That’s the study in a nutshell, but there are plenty of risks involved with excessive drinking, as much as we’d love to sit and drink all day. It’s just good to know that drinking alcohol won’t kill us overnight—especially if we drink it moderately and responsibly. The most shocking news of all: everyone’s frenemy, tequila, comes out on top as the healthiest alcohol choice. Yes, the one we all have drinking nightmares about actually comes out on top as the healthiest choice of alcohol. With tequila and other spirits, the clearer the alcohol, the most calorie friendly and sugar free you can get. Also, tequila’s from a natural form of a sweet plant called agave, so there’s really no added sugar. It’s even best served plain, without any soda or mixers (with the exception of margarita night). Right up there with wine, doctors may tell you to drink one or the other for a healthy cardiovascular system. One of the other benefits of drinking tequila is that it’s gluten free, so free game for celiacs! The Vegas Pub Crawler leaves every night. The destination? Clean, responsible tomfoolery in DTLV. Book your Downtown Las Vegas drink night today!
"""Log which user modifies resources in additional 'audit' database.""" import substanced.util import transaction from pyramid.i18n import TranslationStringFactory from pyramid.traversal import resource_path from pyramid.request import Request from BTrees.OOBTree import OOBTree from logging import getLogger from adhocracy_core.interfaces import IResource from adhocracy_core.interfaces import SerializedActivity from adhocracy_core.interfaces import Activity logger = getLogger(__name__) _ = TranslationStringFactory('adhocracy') class AuditLog(OOBTree): """An Auditlog composed of audit entries. This is a dictionary (:class:`collections.abc.Mapping`) with key :class:`datetime.datetime` and value :class:`adhocracy_core.interfaces.SerializedActivity`. The methods `items`, `keys`, and `values` have the additional kwargs `max_key` and `min_key` to allow range queries:: january = datetime(2015, 1, 1) february = datetime(2015, 2, 1) audit = get_auditlog(context) audit.items(min=january, max=february) ... """ def add(self, activity: Activity) -> None: """Serialize `activity` and store in audit log.""" kwargs = {'object_path': resource_path(activity.object), 'type': activity.type, } if activity.subject: kwargs['subject_path'] = resource_path(activity.subject) if activity.target: kwargs['target_path'] = resource_path(activity.target) if activity.sheet_data: kwargs['sheet_data'] = activity.sheet_data entry = SerializedActivity()._replace(**kwargs) self[activity.published] = entry def get_auditlog(context: IResource) -> AuditLog: """Return the auditlog.""" return substanced.util.get_auditlog(context) def set_auditlog(context: IResource) -> None: """Set an auditlog for the context.""" conn = context._p_jar try: connection = conn.get_connection('audit') except KeyError: return root = connection.root() if 'auditlog' in root: return auditlog = AuditLog() root['auditlog'] = auditlog def add_to_auditlog(activities: [Activity], request: Request) -> None: """Add activities to the audit database. The audit database is created if missing. If the `zodbconn.uri.audit` value is not specified in the config, auditing does not happen. """ auditlog = get_auditlog(request.root) if auditlog is None: return for activity in activities: auditlog.add(activity) transaction.commit()
LONGMONT — One of just two remaining unbeaten teams in Class 4A, the Longmont boys know Colorado is watching and waiting for them to slip up or have an off night. They're well aware how they play each night is interpreted either as a show of dominance or a sign of vulnerability by those who endeavor to knock off the reigning state champions. On Friday night, the Trojans chose dominance. And the signal was clear as ever. Ranked No. 2 in Class 4A, the Trojans used a 78-38 home victory against No. 9 Greeley Central to make it clear that they don't intend to let up until after they've crossed the finish line again at the end of this season. "It just felt like a statement game," said Trojans senior Calvin Seamons, who stamped an exclamation point on his team's statement with an alley-oop dunk in the third quarter. "They're a good team and it was a big conference matchup so we had to come to play. We know Lewis-Palmer (No. 1) is in the gym. We know Holy Family (No. 3) is in the gym. So we just wanted to send a statement out to everybody." With a commanding lead in the Northern League standings, the Trojans (16-0, 7-0) have been incredible at the defensive end of the floor. They continued their defensive dominance against the Wildcats (11-5, 4-3), holding the visiting team's top scorers — Jackson Hayslip (22.1 ppg) and Spencer Conway (13.3 ppg) — to a combined 21 points. Through 16 games, Longmont has held its opponents to an average of 41.25 points. Their defensive efforts have allowed the Trojans to win their first 16 games by an average margin of 27.3 points. "Man, we worked hard tonight," Longmont head coach Jeff Kloster said. "The tone of the game was set at the defensive end. We knew they were a dribble-drive team and we knew numbers (Hayslip and Conway) were their two cogs. We made them work hard and take difficult looks every time down the floor. "The kids really enjoyed tonight. They were very unselfish and played like a typical unselfish team. They rewarded each other at the offensive end and we got into a tremendous rhythm once that transition game of ours took effect." Including a season-high 88 points against Northridge on Tuesday, the Trojans are averaging 83 points in their last two games. Longmont's trio of double-figure scorers — Beck Page, Brady Renck and Seamons — combined to score 43 points against the Wildcats. Ten different Trojans scored at least two points, and junior Ryan Schneider scored all 10 of his points to help pace a 28-point Longmont second quarter that gave the Trojans a 43-17 halftime lead. The most points Schneider, who transferred to Longmont from Faith Christian, had scored in an entire game prior to Friday night was 12. "Ryan Schneider really came through tonight," Kloster said. "He's a hard worker and he has just been patiently waiting. He's a coach's dream as far as someone new coming into your program, being all-in and having a great basketball IQ. That's him." Page hit four 3-pointers to score a game-high 18 points for the Trojans, who led by more than 30 points to begin the fourth quarter. At the end of the third quarter Seamons brought the home crowd to its feet with his breakaway alley-oop courtesy of an off-the-backboard pass from Renck, who finished the game with 10 assists. Longmont and Greeley Central will meet again in both team's regular-season finale on Feb. 13 at Greeley Central. Set to begin the second turn through the Northern League schedule, the Trojans will be back in action at Thompson Valley on Tuesday night. Conway 3 0-0 6, Hayslip 4 7-11 15, Blackburn 0 0-0 0, Kingsford 1 0-1 2, Knox 3 0-2 7, Hernandez 0 0-0 0, Arrendondo 3 0-0 8, Munguia 0 0-0 0, Ndayishimiye 0 0-0 0. Totals 14 7-14 38. Schneider 4 1-1 10, Yazzie 0 0-0 0, Derksen 1 2-3 4, Renck 5 2-4 12, Elkins 3 2-2 8, Dye 1 0-0 2, Dehning 2 2-4 7, Rulon 0 2-2 2, Sprecher 1 0-0 2, Page 4 6-7 18, Seamons 5 3-6 13. Totals 26 20-29 78. 3-point field goals — Greeley Central (3): Arrendondo 2, Knox; Longmont (6): Page 4, Dehning, Schneider. Total fouls — Greeley Central 24, Longmont 17. Fouled out — Arrendondo.
# -*- coding: utf-8 -*- # This code is part of Qiskit. # # (C) Copyright IBM 2017. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. # pylint: disable=invalid-name """ T=sqrt(S) phase gate or its inverse. """ import numpy from qiskit.circuit import Gate from qiskit.circuit import QuantumCircuit from qiskit.circuit import QuantumRegister from qiskit.qasm import pi from qiskit.extensions.standard.u1 import U1Gate class TGate(Gate): """T Gate: pi/4 rotation around Z axis.""" def __init__(self, label=None): """Create new T gate.""" super().__init__("t", 1, [], label=label) def _define(self): """ gate t a { u1(pi/4) a; } """ definition = [] q = QuantumRegister(1, "q") rule = [ (U1Gate(pi/4), [q[0]], []) ] for inst in rule: definition.append(inst) self.definition = definition def inverse(self): """Invert this gate.""" return TdgGate() def to_matrix(self): """Return a Numpy.array for the S gate.""" return numpy.array([[1, 0], [0, (1+1j) / numpy.sqrt(2)]], dtype=complex) class TdgGate(Gate): """T Gate: -pi/4 rotation around Z axis.""" def __init__(self, label=None): """Create new Tdg gate.""" super().__init__("tdg", 1, [], label=label) def _define(self): """ gate t a { u1(pi/4) a; } """ definition = [] q = QuantumRegister(1, "q") rule = [ (U1Gate(-pi/4), [q[0]], []) ] for inst in rule: definition.append(inst) self.definition = definition def inverse(self): """Invert this gate.""" return TGate() def to_matrix(self): """Return a Numpy.array for the S gate.""" return numpy.array([[1, 0], [0, (1-1j) / numpy.sqrt(2)]], dtype=complex) def t(self, q): """Apply T to q.""" return self.append(TGate(), [q], []) def tdg(self, q): """Apply Tdg to q.""" return self.append(TdgGate(), [q], []) QuantumCircuit.t = t QuantumCircuit.tdg = tdg
'Westworld' stars Evan Rachel Wood, Jeffrey Wright see unplanned parallels to Me Too as abused android hosts find their voices in Season 2. LOS ANGELES – Westworld’s first season was filmed long before the Me Too movement came to national prominence, but star Evan Rachel Wood sees parallels between activist victims of sexual misconduct and the abused android hosts rising up against authority in the HBO sci-fi drama. In Season 1, the hosts at the high-tech Westworld theme park were the playthings of wealthy visitors, who could sexually assault, beat and even kill the lifelike beings with impunity. Erasure of memory protected the hosts, by rendering them unaware of countless acts of abuse. It also made them less inclined to fight back. That began to change late in the season as Wood’s Dolores, the oldest park host with the longest history of suffering, and Thandie Newton’s brothel madam Maeve began to remember, feeling pain but also gaining the fortitude to fight back. Sweet Dolores, through her vengeful alter ego, Wyatt, triggers the host revolt by shooting park co-creator Robert Ford (Anthony Hopkins) in the Season 1 finale. Me Too, a rallying cry and unifying hashtag for those subjected to sexual assault and harassment, especially in the workplace, isn't fictional, of course, and the real women and men who were abused have had to carry those memories for years, often in silence. Their decision to speak out against disgraced movie mogul Harvey Weinstein and others, figuratively finding their voices, has toppled powerful men and led to change, at least temporarily, in a Hollywood culture that permitted such abuse. “We are a little creeped out by (comparisons) actually, because I don’t think that was on purpose. We must just kind of be in tune with what’s going on,” says Wood, who has spoken out in different ways against abusers. Last month, she told Congress about being raped in testimony supporting a bill of rights for sexual assault survivors. Wood also appeared in a musical Funny or Die, Guess Who: #MeToo Edition, whose lighthearted tone identifying famous abusers leads to a serious message of support for Rise, which is pushing for a survivors' bill of rights. Jeffrey Wright, whose theme-park programming chief Bernard was revealed to be an android, finds the robot hosts “a clever metaphor” with many applications. “Humans are like hosts in that … we struggle to be free of constraints. We ask questions about whether or not we’re in control, or whether external forces are in control,” he says. "More specifically to the Me Too movement, the hosts are a group that has been oppressed, abused and harmed, so they’re responding to that and finding a voice in opposition, and also trying to free themselves, to take charge of their own security,” he says. Executive producers Jonathan Nolan and Lisa Joy, who mapped the Westworld story five years ago, see a universal comparisonthat goes beyond specific movements such as Me Too, Time’s Up and Black Lives Matter. The Me Too and Time’s Up movements may add symbolic weight to Westworld’s Season 2 drama, which continues the story of hosts rebelling against their masters. Wood appreciates the connection to the zeitgeist, but isn’t entirely comfortable with it.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ from machina.core.loading import get_class ForumReadTrackManager = get_class('forum_tracking.managers', 'ForumReadTrackManager') @python_2_unicode_compatible class AbstractForumReadTrack(models.Model): """ Represents a track which records which forums have been read by a given user. """ user = models.ForeignKey( settings.AUTH_USER_MODEL, related_name='forum_tracks', on_delete=models.CASCADE, verbose_name=_('User')) forum = models.ForeignKey( 'forum.Forum', related_name='tracks', on_delete=models.CASCADE, verbose_name=_('Forum')) mark_time = models.DateTimeField(auto_now=True, db_index=True) objects = ForumReadTrackManager() class Meta: abstract = True app_label = 'forum_tracking' unique_together = ['user', 'forum', ] verbose_name = _('Forum track') verbose_name_plural = _('Forum tracks') def __str__(self): return '{} - {}'.format(self.user, self.forum) @python_2_unicode_compatible class AbstractTopicReadTrack(models.Model): """ Represents a track which records which topics have been read by a given user. """ user = models.ForeignKey( settings.AUTH_USER_MODEL, related_name='topic_tracks', on_delete=models.CASCADE, verbose_name=_('User')) topic = models.ForeignKey( 'forum_conversation.Topic', related_name='tracks', on_delete=models.CASCADE, verbose_name=_('Topic')) mark_time = models.DateTimeField(auto_now=True, db_index=True) class Meta: abstract = True app_label = 'forum_tracking' unique_together = ['user', 'topic', ] verbose_name = _('Topic track') verbose_name_plural = _('Topic tracks') def __str__(self): return '{} - {}'.format(self.user, self.topic)
ОАО "United Aircraft Corporation" (UAC, ОАО "UAC") — Russian aircraft manufacturing company, uniting the largest Russian aircraft manufacturers. ОАО "Sukhoi Company" — Russias biggest aircraft holding with over 26 thousand employees. The "Sukhoi" Company is a member of the United Aircraft Corporation (UAC). The "Sukhoi" Group comprises leading Russian design bureaus and serial aircraft manufacturers. The company provides for complete process cycle in the aircraft manufacturing, from design to effective after-sale service. The Group manufactures — "Su" combat jets being the most up-to-date products in the world armament market and the basic tactical aircraft of Russia and many other countries. The Company is the largest Russian aircraft exporter, being the 3st in the world sales of modern fighter planes. For the time being, the Company is realizing prospective programs in the spheres of civil and military aircraft manufacturing. "Irkut" Corporation has leading positions among Russian aircraft manufacturers, being a vertically integrated Group of companies with activities in design, production, sales and after-sale civil and military aircraft manufacturing. ZАО "Aviastar-SP" — aircraft works in Ulyanovsk. The company is specialized on manufacturing of modern passenger and cargo aircrafts Tu-204, civil cargo planes Аn-124-100 "Ruslan" and military cargo planes Il-476. The production facilities of "Aviastar" allow for production of up to 50 aircrafts annually. ОАО "United Shipbuilding Corporation" (USC) — Russian state shipbuilding group of companies uniting the largest Russian shipyards. ОАО "PO SEVMASH" – Russias largest shipyard, the only shipyard in Russia building nuclear-powered submarines for the Navy. The shipyard has premises over 300 ha and over 100 subdivisions. ОАО "Admiralty Shipyards" – a fundamental shipbuilding enterprise, Russias non-nuclear submarine building shipyard. During over 307 years of activities the enterprise has built over 2600 ships and vessels of different types and classes: first Russian steamers, battleships and cruisers, the worlds first nuclear powered icebreaker, unique research and deep-sea submerged vessels, tankers of different types and classes, including strengthened ones for ice navigation, over 300 unique submarines of different projects. In the meantime, Admiralty Shipyards is a joint venture actively participating in the development of the domestic shipbuilding and the rebirth of Russias maritime glory. For the time being, the capacities of the enterprise are completely occupied, the shipyard is successfully realizing a series of contracts for home and foreign customers. ОАО "Baltic Shipyard "YANTAR" – since 1945, former shipyard of German company "Schihau". The history of the enterprise is closely related with the origin and development of the youngest Russian region, the Kaliningrad Region. The only Russian shipbuilding enterprise located in the non-freezing South-West part of the Baltic Sea, in the vicinity of the largest industrial centers of Europe. ОАО "Almaz-Antey Air Defense Company" — Russian group of companies developing and manufacturing armaments (Air and Anti-Missile Defense). The enterprises united in the group develop, manufacture and modernize air defense missile and radar equipment and its components (air defense is the main sphere of activities of the group). Beside that, the mission of the group of companies comprises operation monitoring, repair and utilization для федеральных государственных нужд и иностранных заказчиков систем, of facilities and means of air defense and non-strategic anti-missile defense. ОАО "Gazprom" – Russian fuel company for exploration, extraction, transportation, storage, processing and sales of natural gas, gas condensate and oil, as well as generation and supplies of thermal and electric power. ОАО "LUKOIL" — one of the biggest international vertically integrated group of oil & gas companies with 2,1% share of the worlds crude oil production. ОАО "NK "Rosneft" – Leader of Russian oil branch and one of the biggest public oil & gas companies in the world. The main activities of "Rosneft" are exploration and extraction of oil and gas, manufacturing of oil products and petrochemical products, as well as the distribution of these products. ОАО "Atomredmetzoloto" (Urianium Holding "ARMZ") – a uranium manufacturing company, in accordance with its internal data it has the 5th position in the world in uranium extraction and the 2nd position as to the discovered reserve. Open Joint-Stock Company "Priargunskoye proizvodstvennoye gorno-khimicheskoye obyedineniye" (ОАО "PPGHO") – the biggest uranium mining enterprise in Russia including: a uranium ore mining department, a hydrometallurgical plant, Mining Department "Urtuyiskoye", a central scientific research laboratory, a central laboratory of instrumentation and automation, an independent mine-rescue militia detachment, a thermal power station, a specialized automotive department, a railway fleet and a mechanical repair works.
# Copyright 2016 Carlos Dauden <[email protected]> # Copyright 2016 Pedro M. Baeza <[email protected]> # Copyright 2017 David Vidal <[email protected]> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from odoo import _, api, models from odoo.exceptions import ValidationError from lxml import etree class StockScrap(models.Model): _inherit = 'stock.scrap' @api.multi def action_validate(self): self.ensure_one() self.lot_id.message_post( body=_("Lot was scrapped by <b>%s</b>.") % self.env.user.name) return super(StockScrap, self).action_validate() class StockProductionLot(models.Model): _inherit = 'stock.production.lot' @api.model def fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False): # pragma: no cover """Inject the button here to avoid conflicts with other modules that add a header element in the main view. """ res = super(StockProductionLot, self).fields_view_get( view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu) eview = etree.fromstring(res['arch']) xml_header = eview.xpath("//header") if not xml_header: # Create a header header_element = etree.Element('header') # Append it to the view forms = eview.xpath("//form") if forms: forms[0].insert(0, header_element) else: header_element = xml_header[0] button_element = etree.Element( 'button', {'type': 'object', 'name': 'action_scrap_lot', 'confirm': _('This will scrap the whole lot. Are you' ' sure you want to continue?'), 'string': _('Scrap')}) header_element.append(button_element) res['arch'] = etree.tostring(eview) return res def _prepare_scrap_vals(self, quant, scrap_location_id): self.ensure_one() return { 'origin': quant.lot_id.name, 'product_id': quant.product_id.id, 'product_uom_id': quant.product_id.uom_id.id, 'scrap_qty': quant.quantity, 'location_id': quant.location_id.id, 'scrap_location_id': scrap_location_id, 'lot_id': self.id, 'package_id': quant.package_id.id, } @api.multi def action_scrap_lot(self): self.ensure_one() quants = self.quant_ids.filtered( lambda x: x.location_id.usage == 'internal', ) if not quants: raise ValidationError( _("This lot doesn't contain any quant in internal location."), ) scrap_obj = self.env['stock.scrap'] scraps = scrap_obj.browse() scrap_location_id = self.env.ref('stock.stock_location_scrapped').id for quant in quants: scrap = scrap_obj.create( self._prepare_scrap_vals(quant, scrap_location_id), ) scraps |= scrap result = self.env.ref('stock.action_stock_scrap').read()[0] result['context'] = self.env.context if len(scraps) != 1: result['domain'] = "[('id', 'in', %s)]" % scraps.ids else: # pragma: no cover res = self.env.ref('stock.stock_scrap_form_view', False) result['views'] = [(res and res.id or False, 'form')] result['res_id'] = scraps.id return result
We are an EXPERIENCED company located in Piraeus, Greece. Our MISSION is to improve your yachting life. POWERFUL AND SAFE UNDERWATER LIGHTING PROJECTS. Do you need some underwater lights to boost your fishing successes or the absolute solution to enjoy the beauty of the underwater world and become more classy? It’s our mission to identify your needs and offer you options and solutions. From our very first underwater lighting project (back to 2006) until today, we chose to collaborate with and distribute only the best manufacturers of underwater lights, in the world. Using the best products and materials, following the international rules of safety in technical applications, we take no risks with your safety and pleasure. The best part of our job is seeing the happy faces of those we helped to make their yachting life more… brightened. Our customers enjoy the underwater beauty, while their yacht attracts the eye of people who admire the successful result. It’s time for you to be one of those classy yacht owners, don’t you think? Nature gives us wonders. We help you explore them! Our services are delivered by our team with years of experience are passionate about underwater lighting projects. YOU’RE JUST 3 STEPS AWAY FROM YOUR NEW UNDERWATER LIGHTS. Let’s have a cup of coffee in our offices and discuss your needs further. Are you an effective fisherman or a proud yacht owner? Surface mount or thru-hull underwater lights and why? What about the light colour? Where to place the lights? What about the power consumption? All these questions -and many more- are the first step. This step will lead us to choose the right underwater light model for you and secure the technical parameters of the installation. Next stop: the vessel! Now we know everything we need to know about YOU and your VESSEL. It’s time to find the right underwater light for both of you. It’s true that there are a lot of options, however we will guide you and help you find the best combination between your needs, your budget and the desired result. There is always a way to do that if you really appreciate your customers. And we do! Welcome to the final step! The final days (maybe hours) before you enjoy your best decision ever. Our experienced tech crew will handle the installation of the underwater lights according to the international standards. Our first and foremost priority is your and your vessels’ safety. After all, this is a job for skilled and experienced technicians and our team has proven successful in every single project all these years. Additionally, the installation method we follow is approved by the manufacturers of the underwater lights, as they trust us for many years in such exacting projects. Keep in mind that the best kind of yachting is the safe one. And we guarantee that! ENJOY the underwater world beauty. ATTRACT fish and boost your fishing skills. CREATE the scene and let the people around you enjoy it. WANT TO KNOW MORE ABOUT UNDERWATER LIGHTING & OUR PROJECTS? We'd really love to hear from you so why not drop us an email and we'll get back to you!
import xbmc,xbmcgui,xbmcaddon,xbmcplugin import urllib,re,datetime import thesportsdb,feedparser from random import randint from centerutils.common_variables import * from centerutils.tweet import * def tweets(tweeter_user): window = dialog_tweet('DialogTweeter.xml',addonpath,'Default',str(tweeter_user)) window.doModal() class dialog_tweet(xbmcgui.WindowXMLDialog): def __init__( self, *args, **kwargs ): xbmcgui.WindowXML.__init__(self) self.mode = eval(args[3])[0] if self.mode == 'user': self.twitter_var = eval(args[3])[1] self.twitter_list = get_tweets(self.twitter_var) else: self.twitter_var = eval(args[3])[1] self.twitter_list = get_hashtag_tweets(self.twitter_var) def onInit(self): #set twitter logo self.getControl(3).setImage(os.path.join(addonpath,'resources','img','twitter.png')) #set twitter user name if self.mode == 'user': self.getControl(1).setLabel('@'+self.twitter_var) else: self.getControl(1).setLabel('#'+self.twitter_var.replace('#','')) for tweet_item,tweet_item_date in self.twitter_list: tweet = xbmcgui.ListItem(tweet_item) match = re.compile('(.+?) \+').findall(tweet_item_date) if match: tweet.setProperty('tweet_date',match[0]) self.getControl(6).addItem(tweet) self.setFocusId(6) self.getControl(6).selectItem(0)
We are always looking for new ways to get involved in our local community and this year was no exception. The Give and Gain Day 2013 brought together 687 business volunteers from 31 companies to support 23 community groups and schools within Wales. The success of this day resulted in almost 90% of volunteers saying they felt more committed to their employer by volunteering, while 97% said that it was important to them that their employer supported volunteering projects. We’re very proud that our own Danielle and Felicity feature on the front page of this years report! Construction has started on the new clubhouse for our client South Bucks Council which when complete at the start of 2014 will provide facilities to compliment the current 18 hole course. The clubhouse has been sited adjacent to the 18th with function rooms and bars taking full advantage of views across the finishing hole. The £2 million project has been supported by Sport England. On the 10th July we successfully secured full planning consent for new build student accommodation at Egham Hill, adjacent to Royal Holloway University. The scheme for Danehurst Developments Limited comprises of 100 studios alongside their current development. Construction is scheduled to commence 2014. At the end of June, HLN celebrated its 25th annual charity golf day at Royal Porthcawl Golf Club. 80 golfers took on the challenge of this championship golf course in aid of local charities and through their efforts we successfully raised over £2000. Our congratulations to the winning team of Vear Group from Southampton.
# -*- coding: utf-8 -*- """ Created on Wed Jan 13 06:41:54 2016 @author: piotr at nicecircuits.com """ from libraryManager.library import libraryClass from libraryManager.part import part from footprints.footprintSmdQuad import * from footprints.footprintSmdDualRow import * from libraryManager.footprintPrimitive import * from libraryManager.defaults import * from symbols.symbolsIC import symbolIC from libraryManager.symbolPrimitive import * from parts.icGenerator import icGenerator from libraryManager.footprint import footprint import os.path from libraryManager.generateLibraries import generateLibraries from libraries.libraryOpamps import symbolOpamp class libraryTest(libraryClass): """ """ def __init__(self): super().__init__("Test") # ============== R7F7010343AFP ============== footprints = [footprintSmdQuad("R7F7010343AFP","niceSemiconductors",\ 176,0.5,[25.4,25.4],[1.3,0.3],[24.1,24.1,1.7],defaults.court["N"],\ [1.0,0.25,1.7/2])] path=os.path.join(os.path.dirname(__file__),"R7F7010343AFP.ods") #generate quad pin-by-pin symbols self.parts.extend(icGenerator.generate(path,pinNames=None,\ footprints=footprints,symbolType="quad",namePosfix="",size=5600)) # ============== AXK5S60047YG ============== # added as footprints only self.parts[0].footprints.append(footprintAXK5S60047YG()) self.parts[0].footprints.append(footprintAXK6S60447YG()) # ============== Dummy semiconductor for package generation ============== self.parts.append(partDummySemiconductor()) class partDummySemiconductor(part): """ Dummy part """ def __init__(self, name="Dummy", refDes=defaults.icRefDes): super().__init__(name, refDes) self.symbols.append(symbolOpamp()) for density in ["N", "L", "M"]: for pinCount in [8, 14, 16]: self.footprints.append(footprintSoic(pinCount=pinCount,density=density)) for pinCount in [3,5,6,8]: self.footprints.append(footprintSot23(pinCount=pinCount,density=density)) self.footprints.append(footprintSc70(pinCount=pinCount,density=density)) class footprintAXK5S60047YG(footprintSmdDualRow): """ Panasonic Narrow Pitch Connector P5KS Socket 60 pin For mated height 4.0 mm, 5.0 mm and 6.0 mm Without positioning boss/with direction for protection from reverse mating """ def __init__(self, name="AXK5S60047YG", alternativeLibName="niceConectorsOther", density="N", wide=False): super().__init__(name, alternativeLibName,\ pinCount=60, pitch=0.5,\ padSpan=4.6,padDimensions=[0.25,2.2],\ bodyDimensions=[18.40,4.8,3.05],\ leadDimensions=[0.4,0.2,0.18],\ court = defaults.court[density],\ leadStyle="cube_metal") pads=["%d"%(2*i+1) for i in range(30)]+["%d"%(60-2*i) for i in range(30)] self.renamePads(pads) class footprintAXK6S60447YG(footprintSmdDualRow): """ Panasonic Narrow Pitch Connector P5KS Header 60 pin For mated height 4.0 mm, 4.5 mm and 7.0 mm Without positioning boss/with direction for protection from reverse mating """ def __init__(self, name="AXK6S60447YG", alternativeLibName="niceConectorsOther", density="N", wide=False): super().__init__(name, alternativeLibName,\ pinCount=60, pitch=0.5,\ padSpan=4.2,padDimensions=[0.25,2.2],\ bodyDimensions=[18.40,4.4,0.95],\ leadDimensions=[0.4,0.2,0.23],\ court = defaults.court[density],\ leadStyle="cube_metal") pads=["%d"%(2*i+1) for i in range(30)]+["%d"%(60-2*i) for i in range(30)] self.renamePads(pads) self.addSimple3Dbody([0,0],[16.5,1.72,3.3]) class pogoPin(footprint): """ """ def __init__(self, name="AXK5S60047YG", alternativeLibName="niceConectorsOther", density="N", wide=False): super().__init__(name, alternativeLibName,\ pinCount=60, pitch=0.5,\ padSpan=4.6,padDimensions=[0.25,2.2],\ bodyDimensions=[18.40,4.8,3.05],\ leadDimensions=[0.4,0.2,0.18],\ court = defaults.court[density],\ leadStyle="cube_metal") pads=["%d"%(2*i+1) for i in range(30)]+["%d"%(60-2*i) for i in range(30)] self.renamePads(pads) if __name__ == "__main__": generateLibraries([libraryTest()])
Located very much in the centre of the action in Prenzlauerberg, you'll find lots to do in the area, great places to eat and drink and heaps of shopping opportunities. You can get anywhere you like quite easily with a U2 train station is just minutes away, plus tram or S-Bahn stations. Prices range from €35 for a single to up to €71 for a double. No restaurant or bar on the premises but this accommodation does sit above a their very own bike rental shop where you can get good deals on bike hire for your stay in Berlin. Nice and handy. All rooms here have Cable TV, their own bathroom and come complete with linen and towels. Double rooms also have a fridge and coffee machine. Extra beds are available on request, or if there are more of you, perhaps go for the East Side Pension Apartment on the first floor.
# -*- coding: utf-8 -*- import os,math from qgis.core import NULL from mole import oeq_global from mole.project import config from mole.extensions import OeQExtension from mole.stat_corr import contemporary_base_uvalue_by_building_age_lookup def calculation(self=None, parameters={},feature = None): from math import floor, ceil from PyQt4.QtCore import QVariant wl_qtp = NULL if not oeq_global.isnull([parameters['WL_AR'],parameters['WL_UP'],parameters['HHRS']]): wl_qtp=float(parameters['WL_AR']) * float(parameters['WL_UP'])*float(parameters['HHRS'])/1000 return {'WL_QTP': {'type': QVariant.Double, 'value': wl_qtp}} extension = OeQExtension( extension_id=__name__, category='Evaluation', subcategory='Present Transm. Heat Loss', extension_name='Wall Quality (QT, Present)', layer_name= 'QT Wall Present', extension_filepath=os.path.join(__file__), colortable = os.path.join(os.path.splitext(__file__)[0] + '.qml'), field_id='WL_QTP', source_type='none', par_in=['WL_AR','WL_UP','HHRS'], sourcelayer_name=config.data_layer_name, targetlayer_name=config.data_layer_name, active=True, show_results=['WL_QTP'], description=u"Calculate the present Transmission Heat Loss of the Building's Walls", evaluation_method=calculation) extension.registerExtension(default=True)
What services does UWA offer to indigenous students? The School of Indigenous Studies (SIS) supports indigenous students at UWA. Contact SIS if you would like more information.
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import getpass import os import warnings import paramiko from paramiko.config import SSH_PORT from sshtunnel import SSHTunnelForwarder from airflow.exceptions import AirflowException from airflow.hooks.base_hook import BaseHook from airflow.utils.log.logging_mixin import LoggingMixin class SSHHook(BaseHook, LoggingMixin): """ Hook for ssh remote execution using Paramiko. ref: https://github.com/paramiko/paramiko This hook also lets you create ssh tunnel and serve as basis for SFTP file transfer :param ssh_conn_id: connection id from airflow Connections from where all the required parameters can be fetched like username, password or key_file. Thought the priority is given to the param passed during init :type ssh_conn_id: str :param remote_host: remote host to connect :type remote_host: str :param username: username to connect to the remote_host :type username: str :param password: password of the username to connect to the remote_host :type password: str :param key_file: key file to use to connect to the remote_host. :type key_file: str :param port: port of remote host to connect (Default is paramiko SSH_PORT) :type port: int :param timeout: timeout for the attempt to connect to the remote_host. :type timeout: int :param keepalive_interval: send a keepalive packet to remote host every keepalive_interval seconds :type keepalive_interval: int """ def __init__(self, ssh_conn_id=None, remote_host=None, username=None, password=None, key_file=None, port=None, timeout=10, keepalive_interval=30 ): super(SSHHook, self).__init__(ssh_conn_id) self.ssh_conn_id = ssh_conn_id self.remote_host = remote_host self.username = username self.password = password self.key_file = key_file self.port = port self.timeout = timeout self.keepalive_interval = keepalive_interval # Default values, overridable from Connection self.compress = True self.no_host_key_check = True self.allow_host_key_change = False self.host_proxy = None # Placeholder for deprecated __enter__ self.client = None # Use connection to override defaults if self.ssh_conn_id is not None: conn = self.get_connection(self.ssh_conn_id) if self.username is None: self.username = conn.login if self.password is None: self.password = conn.password if self.remote_host is None: self.remote_host = conn.host if self.port is None: self.port = conn.port if conn.extra is not None: extra_options = conn.extra_dejson self.key_file = extra_options.get("key_file") if "timeout" in extra_options: self.timeout = int(extra_options["timeout"], 10) if "compress" in extra_options\ and str(extra_options["compress"]).lower() == 'false': self.compress = False if "no_host_key_check" in extra_options\ and\ str(extra_options["no_host_key_check"]).lower() == 'false': self.no_host_key_check = False if "allow_host_key_change" in extra_options\ and\ str(extra_options["allow_host_key_change"]).lower() == 'true': self.allow_host_key_change = True if not self.remote_host: raise AirflowException("Missing required param: remote_host") # Auto detecting username values from system if not self.username: self.log.debug( "username to ssh to host: %s is not specified for connection id" " %s. Using system's default provided by getpass.getuser()", self.remote_host, self.ssh_conn_id ) self.username = getpass.getuser() user_ssh_config_filename = os.path.expanduser('~/.ssh/config') if os.path.isfile(user_ssh_config_filename): ssh_conf = paramiko.SSHConfig() ssh_conf.parse(open(user_ssh_config_filename)) host_info = ssh_conf.lookup(self.remote_host) if host_info and host_info.get('proxycommand'): self.host_proxy = paramiko.ProxyCommand(host_info.get('proxycommand')) if not (self.password or self.key_file): if host_info and host_info.get('identityfile'): self.key_file = host_info.get('identityfile')[0] self.port = self.port or SSH_PORT def get_conn(self): """ Opens a ssh connection to the remote host. :return paramiko.SSHClient object """ self.log.debug('Creating SSH client for conn_id: %s', self.ssh_conn_id) client = paramiko.SSHClient() if not self.allow_host_key_change: self.log.warning('Remote Identification Change is not verified. ' 'This wont protect against Man-In-The-Middle attacks') client.load_system_host_keys() if self.no_host_key_check: self.log.warning('No Host Key Verification. This wont protect ' 'against Man-In-The-Middle attacks') # Default is RejectPolicy client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) if self.password and self.password.strip(): client.connect(hostname=self.remote_host, username=self.username, password=self.password, key_filename=self.key_file, timeout=self.timeout, compress=self.compress, port=self.port, sock=self.host_proxy) else: client.connect(hostname=self.remote_host, username=self.username, key_filename=self.key_file, timeout=self.timeout, compress=self.compress, port=self.port, sock=self.host_proxy) if self.keepalive_interval: client.get_transport().set_keepalive(self.keepalive_interval) self.client = client return client def __enter__(self): warnings.warn('The contextmanager of SSHHook is deprecated.' 'Please use get_conn() as a contextmanager instead.' 'This method will be removed in Airflow 2.0', category=DeprecationWarning) return self def __exit__(self, exc_type, exc_val, exc_tb): if self.client is not None: self.client.close() self.client = None def get_tunnel(self, remote_port, remote_host="localhost", local_port=None): """ Creates a tunnel between two hosts. Like ssh -L <LOCAL_PORT>:host:<REMOTE_PORT>. :param remote_port: The remote port to create a tunnel to :type remote_port: int :param remote_host: The remote host to create a tunnel to (default localhost) :type remote_host: str :param local_port: The local port to attach the tunnel to :type local_port: int :return: sshtunnel.SSHTunnelForwarder object """ if local_port: local_bind_address = ('localhost', local_port) else: local_bind_address = ('localhost',) if self.password and self.password.strip(): client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_password=self.password, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), logger=self.log) else: client = SSHTunnelForwarder(self.remote_host, ssh_port=self.port, ssh_username=self.username, ssh_pkey=self.key_file, ssh_proxy=self.host_proxy, local_bind_address=local_bind_address, remote_bind_address=(remote_host, remote_port), host_pkey_directories=[], logger=self.log) return client def create_tunnel(self, local_port, remote_port=None, remote_host="localhost"): warnings.warn('SSHHook.create_tunnel is deprecated, Please' 'use get_tunnel() instead. But please note that the' 'order of the parameters have changed' 'This method will be removed in Airflow 2.0', category=DeprecationWarning) return self.get_tunnel(remote_port, remote_host, local_port)
We’re receiving rave reviews about our new and improved PICS Help tool. It’s chock-full of helpful information for new and practiced Produce Inventory Control System (PICS) software users. Concerned About Giving Internet Access to Staff? One of the concerns we’ve heard is about allowing internet access to get to PICS Help Online and the risk that viruses or ransomware may be accidentally downloaded from unknown sources. This is a valid concern – and one that we would like to address today. Problem: We have seen about a quarter of our customers have their systems destroyed by ransomware in the past 2-3 years (2 in the last 2 weeks) because of faulty security protocols set up, or no set up, on servers and workstations. Solution: We strongly urge you to consider setting up ALL servers and workstations (not just the PICS server) to flow through a central hub where access to all internet sites would be blocked by default. Then a list of allowed sites would be added. PICS Help would go on the “approved” list, along with other websites your staff need to access, such as vendors, customers, banking, etc.). That way, when a user on any system requests something on the internet, this central spot would see if it is allowed and let them through if it is, or block them if it is not. The master “allowed” list could easily be reviewed by management from time to time to see if there is anything on it which shouldn’t be. The chances of a user downloading/installing anything harmful would be greatly reduced by setting up a process like this. If you would like assistance with server and workstation security processes, please let us know. We will put you in touch with WaudWare’s preferred IT Specialists, System Lifeline. We care about your business.
import oauth2 as oauth import urllib2 as urllib # See https://dev.twitter.com/oauth/overview/application-owner-access-tokens for how to get these credentials access_token_key = "type in your credentials" access_token_secret = "type in your credentials" consumer_key = "type in your credentials" consumer_secret = "type in your credentials" _debug = 0 oauth_token = oauth.Token(key=access_token_key, secret=access_token_secret) oauth_consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret) signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1() http_method = "GET" http_handler = urllib.HTTPHandler(debuglevel=_debug) https_handler = urllib.HTTPSHandler(debuglevel=_debug) ''' Construct, sign, and open a twitter request using the hard-coded credentials above. ''' def twitterreq(url, method, parameters): req = oauth.Request.from_consumer_and_token(oauth_consumer, token=oauth_token, http_method=http_method, http_url=url, parameters=parameters) req.sign_request(signature_method_hmac_sha1, oauth_consumer, oauth_token) headers = req.to_header() if http_method == "POST": encoded_post_data = req.to_postdata() else: encoded_post_data = None url = req.to_url() opener = urllib.OpenerDirector() opener.add_handler(http_handler) opener.add_handler(https_handler) response = opener.open(url, encoded_post_data) return response def fetchsamples(): url = "https://stream.twitter.com/1/statuses/sample.json" parameters = [] response = twitterreq(url, "GET", parameters) for line in response: print line.strip() if __name__ == '__main__': fetchsamples()
Right, we are tired of all the hit and runners, who download as much as they can then bugger off, opens a new account and do it all again, well I have good news and bad news. On the 2nd of August all low ratio members will be deleted from the site, once you have been deleted there will be no way to come back. if you dont mind, could you tell me what does ACN stand for?
"""Tests for cmake database generation.""" import imp from os import path from unittest import TestCase from EasyClangComplete.plugin.flags_sources import flags_source from EasyClangComplete.plugin.utils import flag imp.reload(flags_source) imp.reload(flag) FlagsSource = flags_source.FlagsSource Flag = flag.Flag class TestFlagsSource(TestCase): """Test getting flags from a list of chunks.""" def test_init(self): """Initialization test.""" include_prefixes = ["-I", "-isystem"] flags_source = FlagsSource(include_prefixes) self.assertEqual(flags_source._include_prefixes, include_prefixes) def test_parse_flags(self): """Test that the flags are parsed correctly.""" from os import listdir current_folder = path.dirname(__file__) folder_to_expand = path.join(current_folder, '*') initial_str_flags = ["-I", current_folder, "-I" + current_folder, "-isystem", current_folder, "-std=c++11", "#simulate a comment", "-Iblah\n", "-I", "blah", "-I" + folder_to_expand] flags = Flag.tokenize_list(initial_str_flags, current_folder) expected_blah_path = path.join(current_folder, "blah") self.assertIn(Flag("-I", current_folder, " "), flags) self.assertIn(Flag("-I", current_folder), flags) self.assertIn(Flag("-isystem", current_folder, " "), flags) self.assertIn(Flag("-I", expected_blah_path, " "), flags) self.assertIn(Flag("-I", expected_blah_path), flags) self.assertIn(Flag("", "-std=c++11"), flags) # Check star expansion for a flags source for child in listdir(current_folder): child = path.join(current_folder, child) if path.isdir(child): self.assertIn(Flag("-I", child), flags) self.assertNotIn(Flag("", "-Iblah"), flags) self.assertNotIn(Flag("-I", "blah", " "), flags) self.assertNotIn(Flag("", "-isystem" + current_folder), flags)
When I am changing the orientation of a device, the master page is behaving very strange. You can see that the white space on the left is appearing. Is it a known bug in Xamarin Forms? Does anyone know some workaround to prevent this strange behavior? I reproduced this issue on my side and reported it in Github. Set MasterBehavior = MasterBehavior.Popover; in the MainPage.xaml.cs constructor. The issue seems to be with Xamarin.Forms (Ver.3.0.XXXXXX). 5. Clean and rebuild your project.
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functional tests for BiasAdd.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import gradient_checker from tensorflow.python.ops import gradients_impl from tensorflow.python.ops import nn_ops import tensorflow.python.ops.nn_grad # pylint: disable=unused-import from tensorflow.python.platform import test class BiasAddTest(test.TestCase): def _npBias(self, inputs, bias): assert len(bias.shape) == 1 print(inputs.shape) print(bias.shape) assert inputs.shape[-1] == bias.shape[0] return inputs + bias.reshape(([1] * (len(inputs.shape) - 1)) + [bias.shape[0]]) def testNpBias(self): self.assertAllClose( np.array([[11, 22, 33], [41, 52, 63]]), self._npBias( np.array([[10, 20, 30], [40, 50, 60]]), np.array([1, 2, 3]))) def _testBias(self, np_inputs, np_bias, use_gpu=False): np_val = self._npBias(np_inputs, np_bias) with self.cached_session(use_gpu=use_gpu): tf_val = nn_ops.bias_add(np_inputs, np_bias).eval() self.assertAllCloseAccordingToType(np_val, tf_val) def _AtLeast3d(self, np_value): # fill the input value to at least 3-dimension if np_value.ndim < 3: return np.reshape(np_value, (1,) * (3 - np_value.ndim) + np_value.shape) return np_value def _NHWCToNCHW(self, np_value): # fill the input value to at least 3-dimension np_value = self._AtLeast3d(np_value) # move the last dimension to second np_dim = list(range(np_value.ndim)) np_dim_new = list(np_dim[0:1]) + list(np_dim[-1:]) + list(np_dim[1:-1]) return np.transpose(np_value, np_dim_new) def _NCHWToNHWC(self, np_value): assert len(np_value.shape) >= 3 np_dim = list(range(np_value.ndim)) # move the second dimension to the last np_dim_new = list(np_dim[0:1]) + list(np_dim[2:]) + list(np_dim[1:2]) return np.transpose(np_value, np_dim_new) def _testBiasNCHW(self, np_inputs, np_bias, use_gpu): np_val = self._npBias(np_inputs, np_bias) np_inputs = self._NHWCToNCHW(np_inputs) with self.cached_session(use_gpu=use_gpu): tf_val = nn_ops.bias_add(np_inputs, np_bias, data_format="NCHW").eval() tf_val = self._NCHWToNHWC(tf_val) self.assertAllCloseAccordingToType(self._AtLeast3d(np_val), tf_val) def _testAll(self, np_inputs, np_bias): self._testBias(np_inputs, np_bias, use_gpu=False) self._testBiasNCHW(np_inputs, np_bias, use_gpu=False) if np_inputs.dtype in [np.float16, np.float32, np.float64]: self._testBias(np_inputs, np_bias, use_gpu=True) self._testBiasNCHW(np_inputs, np_bias, use_gpu=True) @test_util.run_deprecated_v1 def testInputDims(self): with self.assertRaises(ValueError): nn_ops.bias_add([1, 2], [1]) @test_util.run_deprecated_v1 def testBiasVec(self): with self.assertRaises(ValueError): nn_ops.bias_add( array_ops.reshape( [1, 2], shape=[1, 2]), array_ops.reshape( [1, 2], shape=[1, 2])) @test_util.run_deprecated_v1 def testBiasInputsMatch(self): with self.assertRaises(ValueError): nn_ops.bias_add( array_ops.reshape( [1, 2], shape=[1, 2]), array_ops.reshape( [1], shape=[1])) @test_util.run_deprecated_v1 def testIntTypes(self): for t in [np.int8, np.int16, np.int32, np.int64]: self._testAll( np.array([[10, 20, 30], [40, 50, 60]]).astype(t), np.array([1, 2, 3]).astype(t)) @test_util.run_deprecated_v1 def testFloatTypes(self): for t in [np.float16, np.float32, np.float64]: self._testAll( np.random.rand(4, 3, 3).astype(t), np.random.rand(3).astype(t)) @test_util.run_deprecated_v1 def test4DFloatTypes(self): for t in [np.float16, np.float32, np.float64]: self._testAll( np.random.rand(4, 3, 2, 3).astype(t), np.random.rand(3).astype(t)) @test_util.run_deprecated_v1 def test5DFloatTypes(self): for t in [np.float16, np.float32, np.float64]: self._testAll( np.random.rand(4, 3, 2, 3, 4).astype(t), np.random.rand(4).astype(t)) def _testGradient(self, np_input, bias, dtype, data_format, use_gpu): with self.cached_session(use_gpu=use_gpu): if data_format == "NCHW": np_input = self._NHWCToNCHW(np_input) input_tensor = constant_op.constant( np_input, shape=np_input.shape, dtype=dtype) bias_tensor = constant_op.constant(bias, shape=bias.shape, dtype=dtype) output_tensor = nn_ops.bias_add( input_tensor, bias_tensor, data_format=data_format) tensor_jacob_t, tensor_jacob_n = gradient_checker.compute_gradient( input_tensor, np_input.shape, output_tensor, np_input.shape) bias_jacob_t, bias_jacob_n = gradient_checker.compute_gradient( bias_tensor, bias.shape, output_tensor, np_input.shape) # Test gradient of BiasAddGrad bias_add_grad = gradients_impl.gradients( nn_ops.l2_loss(output_tensor), bias_tensor)[0] grad_jacob_t, grad_jacob_n = gradient_checker.compute_gradient( output_tensor, np_input.shape, bias_add_grad, bias.shape) if dtype == np.float16: # Compare fp16 theoretical gradients to fp32 numerical gradients, # since fp16 numerical gradients are too imprecise unless great # care is taken with choosing the inputs and the delta. This is # a weaker check (in particular, it does not test the op itself, # only its gradient), but it's much better than nothing. input_tensor = constant_op.constant( np_input, shape=np_input.shape, dtype=np.float32) bias_tensor = constant_op.constant( bias, shape=bias.shape, dtype=np.float32) output_tensor = nn_ops.bias_add( input_tensor, bias_tensor, data_format=data_format) _, tensor_jacob_n = gradient_checker.compute_gradient(input_tensor, np_input.shape, output_tensor, np_input.shape) _, bias_jacob_n = gradient_checker.compute_gradient(bias_tensor, bias.shape, output_tensor, np_input.shape) bias_add_grad = gradients_impl.gradients( nn_ops.l2_loss(output_tensor), bias_tensor)[0] _, grad_jacob_n = gradient_checker.compute_gradient(output_tensor, np_input.shape, bias_add_grad, bias.shape) threshold = 2e-3 if dtype == dtypes.float64: threshold = 1e-10 self.assertAllClose(tensor_jacob_t, tensor_jacob_n, threshold, threshold) self.assertAllClose(bias_jacob_t, bias_jacob_n, threshold, threshold) self.assertAllClose(grad_jacob_t, grad_jacob_n, threshold, threshold) @test_util.run_deprecated_v1 def testGradientTensor2D(self): for (data_format, use_gpu) in ("NHWC", False), ("NHWC", True): for dtype in (dtypes.float16, dtypes.float32, dtypes.float64): np_input = np.array( [1.0, 2.0, 3.0, 4.0, 5.0, 6.0], dtype=dtype.as_numpy_dtype).reshape(3, 2) bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype) self._testGradient(np_input, bias, dtype, data_format, use_gpu) @test_util.run_deprecated_v1 def testGradientTensor3D(self): for (data_format, use_gpu) in [("NHWC", False), ("NHWC", True), ("NCHW", False), ("NCHW", True)]: for dtype in (dtypes.float16, dtypes.float32, dtypes.float64): np_input = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], dtype=dtype.as_numpy_dtype).reshape(1, 3, 2) bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype) self._testGradient(np_input, bias, dtype, data_format, use_gpu) @test_util.run_deprecated_v1 def testGradientTensor4D(self): for (data_format, use_gpu) in [("NHWC", False), ("NHWC", True), ("NCHW", False), ("NCHW", True)]: for dtype in (dtypes.float16, dtypes.float32, dtypes.float64): np_input = np.arange( 1.0, 49.0, dtype=dtype.as_numpy_dtype).reshape( [2, 3, 4, 2]).astype(np.float32) bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype) self._testGradient(np_input, bias, dtype, data_format, use_gpu) @test_util.run_deprecated_v1 def testGradientTensor5D(self): for (data_format, use_gpu) in [("NHWC", False), ("NHWC", True), ("NCHW", False), ("NCHW", True)]: for dtype in (dtypes.float16, dtypes.float32, dtypes.float64): np_input = np.arange( 1.0, 49.0, dtype=dtype.as_numpy_dtype).reshape( [1, 2, 3, 4, 2]).astype(np.float32) bias = np.array([1.3, 2.4], dtype=dtype.as_numpy_dtype) self._testGradient(np_input, bias, dtype, data_format, use_gpu) @test_util.run_deprecated_v1 def testEmpty(self): np.random.seed(7) for shape in (0, 0), (2, 0), (0, 2), (4, 3, 0), (4, 0, 3), (0, 4, 3): self._testAll(np.random.randn(*shape), np.random.randn(shape[-1])) @test_util.run_deprecated_v1 def testEmptyGradient(self): for (data_format, use_gpu) in ("NHWC", False), ("NHWC", True): for shape in (0, 0), (2, 0), (0, 2): self._testGradient( np.random.randn(*shape), np.random.randn(shape[-1]), dtypes.float64, data_format, use_gpu) for (data_format, use_gpu) in [("NHWC", False), ("NHWC", True), ("NCHW", False), ("NCHW", True)]: for shape in (4, 3, 0), (4, 0, 3), (0, 4, 3): self._testGradient( np.random.randn(*shape), np.random.randn(shape[-1]), dtypes.float64, data_format, use_gpu) if __name__ == "__main__": test.main()
The Bluetooth function of the camera is exclusive to Sena and creates a whole new experience for the users by providing unique Bluetooth capability such as remote voice recording, remote control, and useful voice feedbackfrom the operations. For motorcycle riders, Sena provides unique mounting accessories, QRM™ (Quick Release Mount) system, which enables riders to quickly attach and detach the camera to change the view through the use of a variety of mounting options such as the wind shield mount, fog lamp type mount, and side mirror mount. The Sena Prism is the perfect addition to every adventurer with Bluetooth technology in action.